Compare commits

..

2 Commits

Author SHA1 Message Date
b12fe66871 Merge branch 'main' into docs/update-net-sdk 2025-05-29 08:07:43 -07:00
28582d9134 Update .NET docs with new links 2025-05-29 08:07:07 -07:00
1421 changed files with 7670 additions and 66361 deletions

View File

@ -107,14 +107,6 @@ INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG= INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID= INF_APP_CONNECTION_GITHUB_APP_ID=
#github radar app connection
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG=
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID=
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
#gcp app connection #gcp app connection
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL= INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=

View File

@ -1,53 +0,0 @@
name: Detect Non-RE2 Regex
on:
pull_request:
types: [opened, synchronize]
jobs:
check-non-re2-regex:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get diff of backend/*
run: |
git diff --unified=0 "origin/${{ github.base_ref }}"...HEAD -- backend/ > diff.txt
- name: Scan backend diff for non-RE2 regex
run: |
# Extract only added lines (excluding file headers)
grep '^+' diff.txt | grep -v '^+++' | sed 's/^\+//' > added_lines.txt
if [ ! -s added_lines.txt ]; then
echo "✅ No added lines in backend/ to check for regex usage."
exit 0
fi
regex_usage_pattern='(^|[^A-Za-z0-9_"'"'"'`\.\/\\])(\/(?:\\.|[^\/\n\\])+\/[gimsuyv]*(?=\s*[\.\(;,)\]}:]|$)|new RegExp\()'
# Find all added lines that contain regex patterns
if grep -E "$regex_usage_pattern" added_lines.txt > potential_violations.txt 2>/dev/null; then
# Filter out lines that contain 'new RE2' (allowing for whitespace variations)
if grep -v -E 'new\s+RE2\s*\(' potential_violations.txt > actual_violations.txt 2>/dev/null && [ -s actual_violations.txt ]; then
echo "🚨 ERROR: Found forbidden regex pattern in added/modified backend code."
echo ""
echo "The following lines use raw regex literals (/.../) or new RegExp(...):"
echo "Please replace with 'new RE2(...)' for RE2 compatibility."
echo ""
echo "Offending lines:"
cat actual_violations.txt
exit 1
else
echo "✅ All identified regex usages are correctly using 'new RE2(...)'."
fi
else
echo "✅ No regex patterns found in added/modified backend lines."
fi
- name: Cleanup temporary files
if: always()
run: |
rm -f diff.txt added_lines.txt potential_violations.txt actual_violations.txt

View File

@ -3,62 +3,7 @@ name: Release Infisical Core Helm chart
on: [workflow_dispatch] on: [workflow_dispatch]
jobs: jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Add Helm repositories
run: |
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo update
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-standalone-postgres
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-standalone-postgres
- name: Create Infisical secrets
run: |
kubectl create secret generic infisical-secrets \
--namespace infisical-standalone-postgres \
--from-literal=AUTH_SECRET=6c1fe4e407b8911c104518103505b218 \
--from-literal=ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218 \
--from-literal=SITE_URL=http://localhost:8080
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-standalone-postgres \
--helm-extra-args="--timeout=300s" \
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres" \
--namespace infisical-standalone-postgres
release: release:
needs: test-helm
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
@ -74,4 +19,4 @@ jobs:
- name: Build and push helm package to Cloudsmith - name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
env: env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }} CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,59 +1,27 @@
name: Release K8 Operator Helm Chart name: Release K8 Operator Helm Chart
on: on:
workflow_dispatch: workflow_dispatch:
jobs: jobs:
test-helm: release-helm:
name: Test Helm Chart name: Release Helm Chart
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Helm - name: Install Helm
uses: azure/setup-helm@v4.2.0 uses: azure/setup-helm@v3
with: with:
version: v3.17.0 version: v3.10.0
- uses: actions/setup-python@v5.3.0 - name: Install python
with: uses: actions/setup-python@v4
python-version: "3.x"
check-latest: true
- name: Set up chart-testing - name: Install Cloudsmith CLI
uses: helm/chart-testing-action@v2.7.0 run: pip install --upgrade cloudsmith-cli
- name: Run chart-testing (lint) - name: Build and push helm package to CloudSmith
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
- name: Create kind cluster CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
uses: helm/kind-action@v1.12.0
- name: Run chart-testing (install)
run: ct install --config ct.yaml --charts helm-charts/secrets-operator
release-helm:
name: Release Helm Chart
needs: test-helm
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,70 +1,27 @@
name: Release Gateway Helm Chart name: Release Gateway Helm Chart
on: on:
workflow_dispatch: workflow_dispatch:
jobs: jobs:
test-helm: release-helm:
name: Test Helm Chart name: Release Helm Chart
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Helm - name: Install Helm
uses: azure/setup-helm@v4.2.0 uses: azure/setup-helm@v3
with: with:
version: v3.17.0 version: v3.10.0
- uses: actions/setup-python@v5.3.0 - name: Install python
with: uses: actions/setup-python@v4
python-version: "3.x"
check-latest: true
- name: Set up chart-testing - name: Install Cloudsmith CLI
uses: helm/chart-testing-action@v2.7.0 run: pip install --upgrade cloudsmith-cli
- name: Run chart-testing (lint) - name: Build and push helm package to CloudSmith
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway run: cd helm-charts && sh upload-gateway-cloudsmith.sh
env:
- name: Create kind cluster CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-gateway
- name: Create gateway secret
run: kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=my-test-token -n infisical-gateway
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-gateway \
--helm-extra-args="--timeout=300s" \
--namespace infisical-gateway
release-helm:
name: Release Helm Chart
needs: test-helm
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-gateway-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,49 +0,0 @@
name: Run Helm Chart Tests for Gateway
on:
pull_request:
paths:
- "helm-charts/infisical-gateway/**"
- ".github/workflows/run-helm-chart-tests-infisical-gateway.yml"
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-gateway
- name: Create gateway secret
run: kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=my-test-token -n infisical-gateway
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-gateway \
--helm-extra-args="--timeout=300s" \
--namespace infisical-gateway

View File

@ -1,61 +0,0 @@
name: Run Helm Chart Tests for Infisical Standalone Postgres
on:
pull_request:
paths:
- "helm-charts/infisical-standalone-postgres/**"
- ".github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml"
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Add Helm repositories
run: |
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo update
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-standalone-postgres
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-standalone-postgres
- name: Create Infisical secrets
run: |
kubectl create secret generic infisical-secrets \
--namespace infisical-standalone-postgres \
--from-literal=AUTH_SECRET=6c1fe4e407b8911c104518103505b218 \
--from-literal=ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218 \
--from-literal=SITE_URL=http://localhost:8080
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-standalone-postgres \
--helm-extra-args="--timeout=300s" \
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres" \
--namespace infisical-standalone-postgres

View File

@ -1,38 +0,0 @@
name: Run Helm Chart Tests for Secret Operator
on:
pull_request:
paths:
- "helm-charts/secrets-operator/**"
- ".github/workflows/run-helm-chart-tests-secret-operator.yml"
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Run chart-testing (install)
run: ct install --config ct.yaml --charts helm-charts/secrets-operator

View File

@ -40,8 +40,3 @@ cli/detect/config/gitleaks.toml:gcp-api-key:578
cli/detect/config/gitleaks.toml:gcp-api-key:579 cli/detect/config/gitleaks.toml:gcp-api-key:579
cli/detect/config/gitleaks.toml:gcp-api-key:581 cli/detect/config/gitleaks.toml:gcp-api-key:581
cli/detect/config/gitleaks.toml:gcp-api-key:582 cli/detect/config/gitleaks.toml:gcp-api-key:582
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:51
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:50
.github/workflows/helm-release-infisical-core.yml:generic-api-key:48
.github/workflows/helm-release-infisical-core.yml:generic-api-key:47
backend/src/services/smtp/smtp-service.ts:generic-api-key:79

View File

@ -84,11 +84,6 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
} }
}; };
const bigIntegerColumns: Record<string, string[]> = {
"folder_commits": ["commitId"]
};
const main = async () => { const main = async () => {
const tables = ( const tables = (
await db("information_schema.tables") await db("information_schema.tables")
@ -113,9 +108,6 @@ const main = async () => {
const columnName = columnNames[colNum]; const columnName = columnNames[colNum];
const colInfo = columns[columnName]; const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type); let ztype = getZodPrimitiveType(colInfo.type);
if (bigIntegerColumns[tableName]?.includes(columnName)) {
ztype = "z.coerce.bigint()";
}
if (["zodBuffer"].includes(ztype)) { if (["zodBuffer"].includes(ztype)) {
zodImportSet.add(ztype); zodImportSet.add(ztype);
} }

View File

@ -3,12 +3,13 @@ import "fastify";
import { Redis } from "ioredis"; import { Redis } from "ioredis";
import { TUsers } from "@app/db/schemas"; import { TUsers } from "@app/db/schemas";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-types"; import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-types"; import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
import { TAssumePrivilegeServiceFactory } from "@app/ee/services/assume-privilege/assume-privilege-types"; import { TAssumePrivilegeServiceFactory } from "@app/ee/services/assume-privilege/assume-privilege-service";
import { TAuditLogServiceFactory, TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types"; import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-types"; import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-types"; import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service"; import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service"; import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service"; import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
@ -24,25 +25,24 @@ import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service"; import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service"; import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TPitServiceFactory } from "@app/ee/services/pit/pit-service"; import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-types"; import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types"; import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { RateLimitConfiguration, TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-types"; import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-types"; import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-types"; import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service"; import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service"; import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service"; import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service"; import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service"; import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { TSecretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service"; import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service"; import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service"; import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
import { TSshHostServiceFactory } from "@app/ee/services/ssh-host/ssh-host-service"; import { TSshHostServiceFactory } from "@app/ee/services/ssh-host/ssh-host-service";
import { TSshHostGroupServiceFactory } from "@app/ee/services/ssh-host-group/ssh-host-group-service"; import { TSshHostGroupServiceFactory } from "@app/ee/services/ssh-host-group/ssh-host-group-service";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-types"; import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TAuthMode } from "@app/server/plugins/auth/inject-identity"; import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service"; import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service"; import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
@ -58,12 +58,10 @@ import { TCertificateTemplateServiceFactory } from "@app/services/certificate-te
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service"; import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service"; import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service"; import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service"; import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { THsmServiceFactory } from "@app/services/hsm/hsm-service"; import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service"; import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service"; import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityAliCloudAuthServiceFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-service";
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service"; import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service"; import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service"; import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
@ -85,7 +83,6 @@ import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-servi
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service"; import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service"; import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
import { TPkiSubscriberServiceFactory } from "@app/services/pki-subscriber/pki-subscriber-service"; import { TPkiSubscriberServiceFactory } from "@app/services/pki-subscriber/pki-subscriber-service";
import { TPkiTemplatesServiceFactory } from "@app/services/pki-templates/pki-templates-service";
import { TProjectServiceFactory } from "@app/services/project/project-service"; import { TProjectServiceFactory } from "@app/services/project/project-service";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service"; import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service"; import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
@ -120,10 +117,6 @@ declare module "@fastify/request-context" {
oidc?: { oidc?: {
claims: Record<string, string>; claims: Record<string, string>;
}; };
kubernetes?: {
namespace: string;
name: string;
};
}; };
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string }; assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };
@ -217,7 +210,6 @@ declare module "fastify" {
identityUa: TIdentityUaServiceFactory; identityUa: TIdentityUaServiceFactory;
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory; identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
identityGcpAuth: TIdentityGcpAuthServiceFactory; identityGcpAuth: TIdentityGcpAuthServiceFactory;
identityAliCloudAuth: TIdentityAliCloudAuthServiceFactory;
identityAwsAuth: TIdentityAwsAuthServiceFactory; identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory; identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOciAuth: TIdentityOciAuthServiceFactory; identityOciAuth: TIdentityOciAuthServiceFactory;
@ -278,11 +270,7 @@ declare module "fastify" {
microsoftTeams: TMicrosoftTeamsServiceFactory; microsoftTeams: TMicrosoftTeamsServiceFactory;
assumePrivileges: TAssumePrivilegeServiceFactory; assumePrivileges: TAssumePrivilegeServiceFactory;
githubOrgSync: TGithubOrgSyncServiceFactory; githubOrgSync: TGithubOrgSyncServiceFactory;
folderCommit: TFolderCommitServiceFactory;
pit: TPitServiceFactory;
secretScanningV2: TSecretScanningV2ServiceFactory;
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory; internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
pkiTemplate: TPkiTemplatesServiceFactory;
}; };
// this is exclusive use for middlewares in which we need to inject data // this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer // everywhere else access using service layer

View File

@ -6,9 +6,6 @@ import {
TAccessApprovalPoliciesApprovers, TAccessApprovalPoliciesApprovers,
TAccessApprovalPoliciesApproversInsert, TAccessApprovalPoliciesApproversInsert,
TAccessApprovalPoliciesApproversUpdate, TAccessApprovalPoliciesApproversUpdate,
TAccessApprovalPoliciesBypassers,
TAccessApprovalPoliciesBypassersInsert,
TAccessApprovalPoliciesBypassersUpdate,
TAccessApprovalPoliciesInsert, TAccessApprovalPoliciesInsert,
TAccessApprovalPoliciesUpdate, TAccessApprovalPoliciesUpdate,
TAccessApprovalRequests, TAccessApprovalRequests,
@ -80,24 +77,6 @@ import {
TExternalKms, TExternalKms,
TExternalKmsInsert, TExternalKmsInsert,
TExternalKmsUpdate, TExternalKmsUpdate,
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate,
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate,
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate,
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate,
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate,
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate,
TGateways, TGateways,
TGatewaysInsert, TGatewaysInsert,
TGatewaysUpdate, TGatewaysUpdate,
@ -125,9 +104,6 @@ import {
TIdentityAccessTokens, TIdentityAccessTokens,
TIdentityAccessTokensInsert, TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate, TIdentityAccessTokensUpdate,
TIdentityAlicloudAuths,
TIdentityAlicloudAuthsInsert,
TIdentityAlicloudAuthsUpdate,
TIdentityAwsAuths, TIdentityAwsAuths,
TIdentityAwsAuthsInsert, TIdentityAwsAuthsInsert,
TIdentityAwsAuthsUpdate, TIdentityAwsAuthsUpdate,
@ -300,9 +276,6 @@ import {
TSecretApprovalPoliciesApprovers, TSecretApprovalPoliciesApprovers,
TSecretApprovalPoliciesApproversInsert, TSecretApprovalPoliciesApproversInsert,
TSecretApprovalPoliciesApproversUpdate, TSecretApprovalPoliciesApproversUpdate,
TSecretApprovalPoliciesBypassers,
TSecretApprovalPoliciesBypassersInsert,
TSecretApprovalPoliciesBypassersUpdate,
TSecretApprovalPoliciesInsert, TSecretApprovalPoliciesInsert,
TSecretApprovalPoliciesUpdate, TSecretApprovalPoliciesUpdate,
TSecretApprovalRequests, TSecretApprovalRequests,
@ -357,24 +330,9 @@ import {
TSecretRotationV2SecretMappingsInsert, TSecretRotationV2SecretMappingsInsert,
TSecretRotationV2SecretMappingsUpdate, TSecretRotationV2SecretMappingsUpdate,
TSecrets, TSecrets,
TSecretScanningConfigs,
TSecretScanningConfigsInsert,
TSecretScanningConfigsUpdate,
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,
TSecretScanningDataSourcesUpdate,
TSecretScanningFindings,
TSecretScanningFindingsInsert,
TSecretScanningFindingsUpdate,
TSecretScanningGitRisks, TSecretScanningGitRisks,
TSecretScanningGitRisksInsert, TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate, TSecretScanningGitRisksUpdate,
TSecretScanningResources,
TSecretScanningResourcesInsert,
TSecretScanningResourcesUpdate,
TSecretScanningScans,
TSecretScanningScansInsert,
TSecretScanningScansUpdate,
TSecretSharing, TSecretSharing,
TSecretSharingInsert, TSecretSharingInsert,
TSecretSharingUpdate, TSecretSharingUpdate,
@ -789,11 +747,6 @@ declare module "knex/types/tables" {
TIdentityGcpAuthsInsert, TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate TIdentityGcpAuthsUpdate
>; >;
[TableName.IdentityAliCloudAuth]: KnexOriginal.CompositeTableType<
TIdentityAlicloudAuths,
TIdentityAlicloudAuthsInsert,
TIdentityAlicloudAuthsUpdate
>;
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType< [TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
TIdentityAwsAuths, TIdentityAwsAuths,
TIdentityAwsAuthsInsert, TIdentityAwsAuthsInsert,
@ -867,12 +820,6 @@ declare module "knex/types/tables" {
TAccessApprovalPoliciesApproversUpdate TAccessApprovalPoliciesApproversUpdate
>; >;
[TableName.AccessApprovalPolicyBypasser]: KnexOriginal.CompositeTableType<
TAccessApprovalPoliciesBypassers,
TAccessApprovalPoliciesBypassersInsert,
TAccessApprovalPoliciesBypassersUpdate
>;
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType< [TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
TAccessApprovalRequests, TAccessApprovalRequests,
TAccessApprovalRequestsInsert, TAccessApprovalRequestsInsert,
@ -896,11 +843,6 @@ declare module "knex/types/tables" {
TSecretApprovalPoliciesApproversInsert, TSecretApprovalPoliciesApproversInsert,
TSecretApprovalPoliciesApproversUpdate TSecretApprovalPoliciesApproversUpdate
>; >;
[TableName.SecretApprovalPolicyBypasser]: KnexOriginal.CompositeTableType<
TSecretApprovalPoliciesBypassers,
TSecretApprovalPoliciesBypassersInsert,
TSecretApprovalPoliciesBypassersUpdate
>;
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType< [TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
TSecretApprovalRequests, TSecretApprovalRequests,
TSecretApprovalRequestsInsert, TSecretApprovalRequestsInsert,
@ -1148,60 +1090,5 @@ declare module "knex/types/tables" {
TGithubOrgSyncConfigsInsert, TGithubOrgSyncConfigsInsert,
TGithubOrgSyncConfigsUpdate TGithubOrgSyncConfigsUpdate
>; >;
[TableName.FolderCommit]: KnexOriginal.CompositeTableType<
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate
>;
[TableName.FolderCommitChanges]: KnexOriginal.CompositeTableType<
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate
>;
[TableName.FolderCheckpoint]: KnexOriginal.CompositeTableType<
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate
>;
[TableName.FolderCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate
>;
[TableName.FolderTreeCheckpoint]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate
>;
[TableName.FolderTreeCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate
>;
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,
TSecretScanningDataSourcesUpdate
>;
[TableName.SecretScanningResource]: KnexOriginal.CompositeTableType<
TSecretScanningResources,
TSecretScanningResourcesInsert,
TSecretScanningResourcesUpdate
>;
[TableName.SecretScanningScan]: KnexOriginal.CompositeTableType<
TSecretScanningScans,
TSecretScanningScansInsert,
TSecretScanningScansUpdate
>;
[TableName.SecretScanningFinding]: KnexOriginal.CompositeTableType<
TSecretScanningFindings,
TSecretScanningFindingsInsert,
TSecretScanningFindingsUpdate
>;
[TableName.SecretScanningConfig]: KnexOriginal.CompositeTableType<
TSecretScanningConfigs,
TSecretScanningConfigsInsert,
TSecretScanningConfigsUpdate
>;
} }
} }

View File

@ -1,6 +1,6 @@
import knex, { Knex } from "knex"; import knex, { Knex } from "knex";
export type TDbClient = Knex; export type TDbClient = ReturnType<typeof initDbConnection>;
export const initDbConnection = ({ export const initDbConnection = ({
dbConnectionUri, dbConnectionUri,
dbRootCert, dbRootCert,
@ -50,8 +50,6 @@ export const initDbConnection = ({
} }
: false : false
}, },
// https://knexjs.org/guide/#pool
pool: { min: 0, max: 10 },
migrations: { migrations: {
tableName: "infisical_migrations" tableName: "infisical_migrations"
} }
@ -72,8 +70,7 @@ export const initDbConnection = ({
}, },
migrations: { migrations: {
tableName: "infisical_migrations" tableName: "infisical_migrations"
}, }
pool: { min: 0, max: 10 }
}); });
}); });

View File

@ -1,166 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (!hasFolderCommitTable) {
await knex.schema.createTable(TableName.FolderCommit, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.bigIncrements("commitId");
t.jsonb("actorMetadata").notNullable();
t.string("actorType").notNullable();
t.string("message");
t.uuid("folderId").notNullable();
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderId");
t.index("envId");
});
}
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
if (!hasFolderCommitChangesTable) {
await knex.schema.createTable(TableName.FolderCommitChanges, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.string("changeType").notNullable();
t.boolean("isUpdate").notNullable().defaultTo(false);
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (!hasFolderCheckpointTable) {
await knex.schema.createTable(TableName.FolderCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
if (!hasFolderCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCheckpointId").notNullable();
t.foreign("folderCheckpointId").references("id").inTable(TableName.FolderCheckpoint).onDelete("CASCADE");
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCheckpointId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
if (!hasFolderTreeCheckpointTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
if (!hasFolderTreeCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderTreeCheckpointId").notNullable();
t.foreign("folderTreeCheckpointId").references("id").inTable(TableName.FolderTreeCheckpoint).onDelete("CASCADE");
t.uuid("folderId").notNullable();
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderTreeCheckpointId");
t.index("folderId");
t.index("folderCommitId");
});
}
if (!hasFolderCommitTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommit);
}
if (!hasFolderCommitChangesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommitChanges);
}
if (!hasFolderCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpoint);
}
if (!hasFolderCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
}
if (!hasFolderTreeCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
}
if (!hasFolderTreeCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
}
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (hasFolderTreeCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpointResources);
}
if (hasFolderCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderCheckpointResources);
}
if (hasFolderTreeCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpoint);
}
if (hasFolderCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderCheckpoint);
}
if (hasFolderCommitChangesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommitChanges);
await knex.schema.dropTableIfExists(TableName.FolderCommitChanges);
}
if (hasFolderCommitTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommit);
await knex.schema.dropTableIfExists(TableName.FolderCommit);
}
}

View File

@ -1,107 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
import {
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretScanningDataSource))) {
await knex.schema.createTable(TableName.SecretScanningDataSource, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("externalId").index(); // if we need a unique way of identifying this data source from an external resource
t.string("name", 48).notNullable();
t.string("description");
t.string("type").notNullable();
t.jsonb("config").notNullable();
t.binary("encryptedCredentials"); // webhook credentials, etc.
t.uuid("connectionId");
t.boolean("isAutoScanEnabled").defaultTo(true);
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.timestamps(true, true, true);
t.boolean("isDisconnected").notNullable().defaultTo(false);
t.unique(["projectId", "name"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningResource))) {
await knex.schema.createTable(TableName.SecretScanningResource, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("externalId").notNullable();
t.string("name").notNullable();
t.string("type").notNullable();
t.uuid("dataSourceId").notNullable();
t.foreign("dataSourceId").references("id").inTable(TableName.SecretScanningDataSource).onDelete("CASCADE");
t.timestamps(true, true, true);
t.unique(["dataSourceId", "externalId"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningResource);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningScan))) {
await knex.schema.createTable(TableName.SecretScanningScan, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("status").notNullable().defaultTo(SecretScanningScanStatus.Queued);
t.string("statusMessage", 1024);
t.string("type").notNullable();
t.uuid("resourceId").notNullable();
t.foreign("resourceId").references("id").inTable(TableName.SecretScanningResource).onDelete("CASCADE");
t.timestamp("createdAt").defaultTo(knex.fn.now());
});
}
if (!(await knex.schema.hasTable(TableName.SecretScanningFinding))) {
await knex.schema.createTable(TableName.SecretScanningFinding, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("dataSourceName").notNullable();
t.string("dataSourceType").notNullable();
t.string("resourceName").notNullable();
t.string("resourceType").notNullable();
t.string("rule").notNullable();
t.string("severity").notNullable();
t.string("status").notNullable().defaultTo(SecretScanningFindingStatus.Unresolved);
t.string("remarks");
t.string("fingerprint").notNullable();
t.jsonb("details").notNullable();
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("scanId");
t.foreign("scanId").references("id").inTable(TableName.SecretScanningScan).onDelete("SET NULL");
t.timestamps(true, true, true);
t.unique(["projectId", "fingerprint"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningFinding);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningConfig))) {
await knex.schema.createTable(TableName.SecretScanningConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable().unique();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("content", 5000);
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningConfig);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretScanningFinding);
await dropOnUpdateTrigger(knex, TableName.SecretScanningFinding);
await knex.schema.dropTableIfExists(TableName.SecretScanningScan);
await knex.schema.dropTableIfExists(TableName.SecretScanningResource);
await dropOnUpdateTrigger(knex, TableName.SecretScanningResource);
await knex.schema.dropTableIfExists(TableName.SecretScanningDataSource);
await dropOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
await knex.schema.dropTableIfExists(TableName.SecretScanningConfig);
await dropOnUpdateTrigger(knex, TableName.SecretScanningConfig);
}

View File

@ -1,48 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicyBypasser))) {
await knex.schema.createTable(TableName.AccessApprovalPolicyBypasser, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("bypasserGroupId").nullable();
t.foreign("bypasserGroupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.uuid("bypasserUserId").nullable();
t.foreign("bypasserUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.uuid("policyId").notNullable();
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicyBypasser);
}
if (!(await knex.schema.hasTable(TableName.SecretApprovalPolicyBypasser))) {
await knex.schema.createTable(TableName.SecretApprovalPolicyBypasser, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("bypasserGroupId").nullable();
t.foreign("bypasserGroupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.uuid("bypasserUserId").nullable();
t.foreign("bypasserUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.uuid("policyId").notNullable();
t.foreign("policyId").references("id").inTable(TableName.SecretApprovalPolicy).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SecretApprovalPolicyBypasser);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretApprovalPolicyBypasser);
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicyBypasser);
await dropOnUpdateTrigger(knex, TableName.SecretApprovalPolicyBypasser);
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicyBypasser);
}

View File

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "usernameTemplate");
if (!hasColumn) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
t.string("usernameTemplate").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "usernameTemplate");
if (hasColumn) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
t.dropColumn("usernameTemplate");
});
}
}

View File

@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretFolderVersion, "description"))) {
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
t.string("description").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretFolderVersion, "description")) {
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
t.dropColumn("description");
});
}
}

View File

@ -1,24 +0,0 @@
import slugify from "@sindresorhus/slugify";
import { Knex } from "knex";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasNameCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "name");
if (hasNameCol) {
const templates = await knex(TableName.CertificateTemplate).select("id", "name");
await Promise.all(
templates.map((el) => {
const slugifiedName = el.name
? slugify(`${el.name.slice(0, 16)}-${alphaNumericNanoId(8)}`)
: slugify(alphaNumericNanoId(12));
return knex(TableName.CertificateTemplate).where({ id: el.id }).update({ name: slugifiedName });
})
);
}
}
export async function down(): Promise<void> {}

View File

@ -1,63 +0,0 @@
import { Knex } from "knex";
import { ApprovalStatus } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasPrivilegeDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalRequest,
"privilegeDeletedAt"
);
const hasStatusColumn = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "status");
if (!hasPrivilegeDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.timestamp("privilegeDeletedAt").nullable();
});
}
if (!hasStatusColumn) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.string("status").defaultTo(ApprovalStatus.PENDING).notNullable();
});
// Update existing rows based on business logic
// If privilegeId is not null, set status to "approved"
await knex(TableName.AccessApprovalRequest).whereNotNull("privilegeId").update({ status: ApprovalStatus.APPROVED });
// If privilegeId is null and there's a rejected reviewer, set to "rejected"
const rejectedRequestIds = await knex(TableName.AccessApprovalRequestReviewer)
.select("requestId")
.where("status", "rejected")
.distinct()
.pluck("requestId");
if (rejectedRequestIds.length > 0) {
await knex(TableName.AccessApprovalRequest)
.whereNull("privilegeId")
.whereIn("id", rejectedRequestIds)
.update({ status: ApprovalStatus.REJECTED });
}
}
}
export async function down(knex: Knex): Promise<void> {
const hasPrivilegeDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalRequest,
"privilegeDeletedAt"
);
const hasStatusColumn = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "status");
if (hasPrivilegeDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropColumn("privilegeDeletedAt");
});
}
if (hasStatusColumn) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropColumn("status");
});
}
}

View File

@ -1,139 +0,0 @@
/* eslint-disable no-await-in-loop */
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { SecretType, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
logger.info("Starting secret version fix migration");
// Get all shared secret IDs first to optimize versions query
const secretIds = await knex(TableName.SecretV2)
.where("type", SecretType.Shared)
.select("id")
.then((rows) => rows.map((row) => row.id));
logger.info(`Found ${secretIds.length} shared secrets to process`);
if (secretIds.length === 0) {
logger.info("No shared secrets found");
return;
}
const secretIdChunks = chunkArray(secretIds, 5000);
for (let chunkIndex = 0; chunkIndex < secretIdChunks.length; chunkIndex += 1) {
const currentSecretIds = secretIdChunks[chunkIndex];
logger.info(`Processing chunk ${chunkIndex + 1} of ${secretIdChunks.length}`);
// Get secrets and versions for current chunk
const [sharedSecrets, allVersions] = await Promise.all([
knex(TableName.SecretV2).whereIn("id", currentSecretIds).select(selectAllTableCols(TableName.SecretV2)),
knex(TableName.SecretVersionV2).whereIn("secretId", currentSecretIds).select("secretId", "version")
]);
const versionsBySecretId = new Map<string, number[]>();
allVersions.forEach((v) => {
const versions = versionsBySecretId.get(v.secretId);
if (versions) {
versions.push(v.version);
} else {
versionsBySecretId.set(v.secretId, [v.version]);
}
});
const versionsToAdd = [];
const secretsToUpdate = [];
// Process each shared secret
for (const secret of sharedSecrets) {
const existingVersions = versionsBySecretId.get(secret.id) || [];
if (existingVersions.length === 0) {
// No versions exist - add current version
versionsToAdd.push({
secretId: secret.id,
version: secret.version,
key: secret.key,
encryptedValue: secret.encryptedValue,
encryptedComment: secret.encryptedComment,
reminderNote: secret.reminderNote,
reminderRepeatDays: secret.reminderRepeatDays,
skipMultilineEncoding: secret.skipMultilineEncoding,
metadata: secret.metadata,
folderId: secret.folderId,
actorType: "platform"
});
} else {
const latestVersion = Math.max(...existingVersions);
if (latestVersion !== secret.version) {
// Latest version doesn't match - create new version and update secret
const nextVersion = latestVersion + 1;
versionsToAdd.push({
secretId: secret.id,
version: nextVersion,
key: secret.key,
encryptedValue: secret.encryptedValue,
encryptedComment: secret.encryptedComment,
reminderNote: secret.reminderNote,
reminderRepeatDays: secret.reminderRepeatDays,
skipMultilineEncoding: secret.skipMultilineEncoding,
metadata: secret.metadata,
folderId: secret.folderId,
actorType: "platform"
});
secretsToUpdate.push({
id: secret.id,
newVersion: nextVersion
});
}
}
}
logger.info(
`Chunk ${chunkIndex + 1}: Adding ${versionsToAdd.length} versions, updating ${secretsToUpdate.length} secrets`
);
// Batch insert new versions
if (versionsToAdd.length > 0) {
const insertBatches = chunkArray(versionsToAdd, 9000);
for (let i = 0; i < insertBatches.length; i += 1) {
await knex.batchInsert(TableName.SecretVersionV2, insertBatches[i]);
}
}
if (secretsToUpdate.length > 0) {
const updateBatches = chunkArray(secretsToUpdate, 1000);
for (const updateBatch of updateBatches) {
const ids = updateBatch.map((u) => u.id);
const versionCases = updateBatch.map((u) => `WHEN '${u.id}' THEN ${u.newVersion}`).join(" ");
await knex.raw(
`
UPDATE ${TableName.SecretV2}
SET version = CASE id ${versionCases} END,
"updatedAt" = NOW()
WHERE id IN (${ids.map(() => "?").join(",")})
`,
ids
);
}
}
}
logger.info("Secret version fix migration completed");
}
export async function down(): Promise<void> {
logger.info("Rollback not implemented for secret version fix migration");
// Note: Rolling back this migration would be complex and potentially destructive
// as it would require tracking which version entries were added
}

View File

@ -1,345 +0,0 @@
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { ChangeType } from "@app/services/folder-commit/folder-commit-service";
import {
ProjectType,
SecretType,
TableName,
TFolderCheckpoints,
TFolderCommits,
TFolderTreeCheckpoints,
TSecretFolders
} from "../schemas";
const sortFoldersByHierarchy = (folders: TSecretFolders[]) => {
// Create a map for quick lookup of children by parent ID
const childrenMap = new Map<string, TSecretFolders[]>();
// Set of all folder IDs
const allFolderIds = new Set<string>();
// Build the set of all folder IDs
folders.forEach((folder) => {
if (folder.id) {
allFolderIds.add(folder.id);
}
});
// Group folders by their parentId
folders.forEach((folder) => {
if (folder.parentId) {
const children = childrenMap.get(folder.parentId) || [];
children.push(folder);
childrenMap.set(folder.parentId, children);
}
});
// Find root folders - those with no parentId or with a parentId that doesn't exist
const rootFolders = folders.filter((folder) => !folder.parentId || !allFolderIds.has(folder.parentId));
// Process each level of the hierarchy
const result = [];
let currentLevel = rootFolders;
while (currentLevel.length > 0) {
result.push(...currentLevel);
const nextLevel = [];
for (const folder of currentLevel) {
if (folder.id) {
const children = childrenMap.get(folder.id) || [];
nextLevel.push(...children);
}
}
currentLevel = nextLevel;
}
return result.reverse();
};
const getSecretsByFolderIds = async (knex: Knex, folderIds: string[]): Promise<Record<string, string[]>> => {
const secrets = await knex(TableName.SecretV2)
.whereIn(`${TableName.SecretV2}.folderId`, folderIds)
.where(`${TableName.SecretV2}.type`, SecretType.Shared)
.join<TableName.SecretVersionV2>(TableName.SecretVersionV2, (queryBuilder) => {
void queryBuilder
.on(`${TableName.SecretVersionV2}.secretId`, `${TableName.SecretV2}.id`)
.andOn(`${TableName.SecretVersionV2}.version`, `${TableName.SecretV2}.version`);
})
.select(selectAllTableCols(TableName.SecretV2))
.select(knex.ref("id").withSchema(TableName.SecretVersionV2).as("secretVersionId"));
const secretsMap: Record<string, string[]> = {};
secrets.forEach((secret) => {
if (!secretsMap[secret.folderId]) {
secretsMap[secret.folderId] = [];
}
secretsMap[secret.folderId].push(secret.secretVersionId);
});
return secretsMap;
};
const getFoldersByParentIds = async (knex: Knex, parentIds: string[]): Promise<Record<string, string[]>> => {
const folders = await knex(TableName.SecretFolder)
.whereIn(`${TableName.SecretFolder}.parentId`, parentIds)
.where(`${TableName.SecretFolder}.isReserved`, false)
.join<TableName.SecretFolderVersion>(TableName.SecretFolderVersion, (queryBuilder) => {
void queryBuilder
.on(`${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`)
.andOn(`${TableName.SecretFolderVersion}.version`, `${TableName.SecretFolder}.version`);
})
.select(selectAllTableCols(TableName.SecretFolder))
.select(knex.ref("id").withSchema(TableName.SecretFolderVersion).as("folderVersionId"));
const foldersMap: Record<string, string[]> = {};
folders.forEach((folder) => {
if (!folder.parentId) {
return;
}
if (!foldersMap[folder.parentId]) {
foldersMap[folder.parentId] = [];
}
foldersMap[folder.parentId].push(folder.folderVersionId);
});
return foldersMap;
};
export async function up(knex: Knex): Promise<void> {
logger.info("Initializing folder commits");
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (hasFolderCommitTable) {
// Get Projects to Initialize
const projects = await knex(TableName.Project)
.where(`${TableName.Project}.version`, 3)
.where(`${TableName.Project}.type`, ProjectType.SecretManager)
.select(selectAllTableCols(TableName.Project));
logger.info(`Found ${projects.length} projects to initialize`);
// Process Projects in batches of 100
const batches = chunkArray(projects, 100);
let i = 0;
for (const batch of batches) {
i += 1;
logger.info(`Processing project batch ${i} of ${batches.length}`);
let foldersCommitsList = [];
const rootFoldersMap: Record<string, string> = {};
const envRootFoldersMap: Record<string, string> = {};
// Get All Folders for the Project
// eslint-disable-next-line no-await-in-loop
const folders = await knex(TableName.SecretFolder)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.whereIn(
`${TableName.Environment}.projectId`,
batch.map((project) => project.id)
)
.where(`${TableName.SecretFolder}.isReserved`, false)
.select(selectAllTableCols(TableName.SecretFolder));
logger.info(`Found ${folders.length} folders to initialize in project batch ${i} of ${batches.length}`);
// Sort Folders by Hierarchy (parents before nested folders)
const sortedFolders = sortFoldersByHierarchy(folders);
// eslint-disable-next-line no-await-in-loop
const folderSecretsMap = await getSecretsByFolderIds(
knex,
sortedFolders.map((folder) => folder.id)
);
// eslint-disable-next-line no-await-in-loop
const folderFoldersMap = await getFoldersByParentIds(
knex,
sortedFolders.map((folder) => folder.id)
);
// Get folder commit changes
for (const folder of sortedFolders) {
const subFolderVersionIds = folderFoldersMap[folder.id];
const secretVersionIds = folderSecretsMap[folder.id];
const changes = [];
if (subFolderVersionIds) {
changes.push(
...subFolderVersionIds.map((folderVersionId) => ({
folderId: folder.id,
changeType: ChangeType.ADD,
secretVersionId: undefined,
folderVersionId,
isUpdate: false
}))
);
}
if (secretVersionIds) {
changes.push(
...secretVersionIds.map((secretVersionId) => ({
folderId: folder.id,
changeType: ChangeType.ADD,
secretVersionId,
folderVersionId: undefined,
isUpdate: false
}))
);
}
if (changes.length > 0) {
const folderCommit = {
commit: {
actorMetadata: {},
actorType: ActorType.PLATFORM,
message: "Initialized folder",
folderId: folder.id,
envId: folder.envId
},
changes
};
foldersCommitsList.push(folderCommit);
if (!folder.parentId) {
rootFoldersMap[folder.id] = folder.envId;
envRootFoldersMap[folder.envId] = folder.id;
}
}
}
logger.info(`Retrieved folder changes for project batch ${i} of ${batches.length}`);
const filteredBrokenProjectFolders: string[] = [];
foldersCommitsList = foldersCommitsList.filter((folderCommit) => {
if (!envRootFoldersMap[folderCommit.commit.envId]) {
filteredBrokenProjectFolders.push(folderCommit.commit.folderId);
return false;
}
return true;
});
logger.info(
`Filtered ${filteredBrokenProjectFolders.length} broken project folders: ${JSON.stringify(filteredBrokenProjectFolders)}`
);
// Insert New Commits in batches of 9000
const newCommits = foldersCommitsList.map((folderCommit) => folderCommit.commit);
const commitBatches = chunkArray(newCommits, 9000);
let j = 0;
for (const commitBatch of commitBatches) {
j += 1;
logger.info(`Inserting folder commits - batch ${j} of ${commitBatches.length}`);
// Create folder commit
// eslint-disable-next-line no-await-in-loop
const newCommitsInserted = (await knex
.batchInsert(TableName.FolderCommit, commitBatch)
.returning("*")) as TFolderCommits[];
logger.info(`Finished inserting folder commits - batch ${j} of ${commitBatches.length}`);
const newCommitsMap: Record<string, string> = {};
const newCommitsMapInverted: Record<string, string> = {};
const newCheckpointsMap: Record<string, string> = {};
newCommitsInserted.forEach((commit) => {
newCommitsMap[commit.folderId] = commit.id;
newCommitsMapInverted[commit.id] = commit.folderId;
});
// Create folder checkpoints
// eslint-disable-next-line no-await-in-loop
const newCheckpoints = (await knex
.batchInsert(
TableName.FolderCheckpoint,
Object.values(newCommitsMap).map((commitId) => ({
folderCommitId: commitId
}))
)
.returning("*")) as TFolderCheckpoints[];
logger.info(`Finished inserting folder checkpoints - batch ${j} of ${commitBatches.length}`);
newCheckpoints.forEach((checkpoint) => {
newCheckpointsMap[newCommitsMapInverted[checkpoint.folderCommitId]] = checkpoint.id;
});
// Create folder commit changes
// eslint-disable-next-line no-await-in-loop
await knex.batchInsert(
TableName.FolderCommitChanges,
foldersCommitsList
.map((folderCommit) => folderCommit.changes)
.flat()
.map((change) => ({
folderCommitId: newCommitsMap[change.folderId],
changeType: change.changeType,
secretVersionId: change.secretVersionId,
folderVersionId: change.folderVersionId,
isUpdate: false
}))
);
logger.info(`Finished inserting folder commit changes - batch ${j} of ${commitBatches.length}`);
// Create folder checkpoint resources
// eslint-disable-next-line no-await-in-loop
await knex.batchInsert(
TableName.FolderCheckpointResources,
foldersCommitsList
.map((folderCommit) => folderCommit.changes)
.flat()
.map((change) => ({
folderCheckpointId: newCheckpointsMap[change.folderId],
folderVersionId: change.folderVersionId,
secretVersionId: change.secretVersionId
}))
);
logger.info(`Finished inserting folder checkpoint resources - batch ${j} of ${commitBatches.length}`);
// Create Folder Tree Checkpoint
// eslint-disable-next-line no-await-in-loop
const newTreeCheckpoints = (await knex
.batchInsert(
TableName.FolderTreeCheckpoint,
Object.keys(rootFoldersMap).map((folderId) => ({
folderCommitId: newCommitsMap[folderId]
}))
)
.returning("*")) as TFolderTreeCheckpoints[];
logger.info(`Finished inserting folder tree checkpoints - batch ${j} of ${commitBatches.length}`);
const newTreeCheckpointsMap: Record<string, string> = {};
newTreeCheckpoints.forEach((checkpoint) => {
newTreeCheckpointsMap[rootFoldersMap[newCommitsMapInverted[checkpoint.folderCommitId]]] = checkpoint.id;
});
// Create Folder Tree Checkpoint Resources
// eslint-disable-next-line no-await-in-loop
await knex
.batchInsert(
TableName.FolderTreeCheckpointResources,
newCommitsInserted.map((folderCommit) => ({
folderTreeCheckpointId: newTreeCheckpointsMap[folderCommit.envId],
folderId: folderCommit.folderId,
folderCommitId: folderCommit.id
}))
)
.returning("*");
logger.info(`Finished inserting folder tree checkpoint resources - batch ${j} of ${commitBatches.length}`);
}
}
}
logger.info("Folder commits initialized");
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (hasFolderCommitTable) {
// delete all existing entries
await knex(TableName.FolderCommit).del();
}
}

View File

@ -1,44 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasStepColumn = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "sequence");
const hasApprovalRequiredColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicyApprover,
"approvalsRequired"
);
if (!hasStepColumn || !hasApprovalRequiredColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (t) => {
if (!hasStepColumn) t.integer("sequence").defaultTo(1);
if (!hasApprovalRequiredColumn) t.integer("approvalsRequired").nullable();
});
}
// set rejected status for all access request that was rejected and still has status pending
const subquery = knex(TableName.AccessApprovalRequest)
.leftJoin(
TableName.AccessApprovalRequestReviewer,
`${TableName.AccessApprovalRequestReviewer}.requestId`,
`${TableName.AccessApprovalRequest}.id`
)
.where(`${TableName.AccessApprovalRequest}.status` as "status", "pending")
.where(`${TableName.AccessApprovalRequestReviewer}.status` as "status", "rejected")
.select(`${TableName.AccessApprovalRequest}.id`);
await knex(TableName.AccessApprovalRequest).where("id", "in", subquery).update("status", "rejected");
}
export async function down(knex: Knex): Promise<void> {
const hasStepColumn = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "sequence");
const hasApprovalRequiredColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicyApprover,
"approvalsRequired"
);
if (hasStepColumn || hasApprovalRequiredColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (t) => {
if (hasStepColumn) t.dropColumn("sequence");
if (hasApprovalRequiredColumn) t.dropColumn("approvalsRequired");
});
}
}

View File

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
if (!hasTokenReviewModeColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("tokenReviewMode").notNullable().defaultTo("api");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
if (hasTokenReviewModeColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.dropColumn("tokenReviewMode");
});
}
}

View File

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
if (!hasShowSnapshotsLegacyColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.boolean("showSnapshotsLegacy").notNullable().defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
if (hasShowSnapshotsLegacyColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.dropColumn("showSnapshotsLegacy");
});
}
}

View File

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasConfigColumn = await knex.schema.hasColumn(TableName.DynamicSecretLease, "config");
if (!hasConfigColumn) {
await knex.schema.alterTable(TableName.DynamicSecretLease, (table) => {
table.jsonb("config");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasConfigColumn = await knex.schema.hasColumn(TableName.DynamicSecretLease, "config");
if (hasConfigColumn) {
await knex.schema.alterTable(TableName.DynamicSecretLease, (table) => {
table.dropColumn("config");
});
}
}

View File

@ -1,45 +0,0 @@
import { Knex } from "knex";
import { selectAllTableCols } from "@app/lib/knex";
import { TableName } from "../schemas";
const BATCH_SIZE = 1000;
export async function up(knex: Knex): Promise<void> {
const hasKubernetesHostColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "kubernetesHost");
if (hasKubernetesHostColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("kubernetesHost").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasKubernetesHostColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "kubernetesHost");
// find all rows where kubernetesHost is null
const rows = await knex(TableName.IdentityKubernetesAuth)
.whereNull("kubernetesHost")
.select(selectAllTableCols(TableName.IdentityKubernetesAuth));
if (rows.length > 0) {
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
const batch = rows.slice(i, i + BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityKubernetesAuth)
.whereIn(
"id",
batch.map((row) => row.id)
)
.update({ kubernetesHost: "" });
}
}
if (hasKubernetesHostColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("kubernetesHost").notNullable().alter();
});
}
}

View File

@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityAliCloudAuth))) {
await knex.schema.createTable(TableName.IdentityAliCloudAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("type").notNullable();
t.string("allowedArns").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.IdentityAliCloudAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityAliCloudAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityAliCloudAuth);
}

View File

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.Identity, "hasDeleteProtection");
if (!hasCol) {
await knex.schema.alterTable(TableName.Identity, (t) => {
t.boolean("hasDeleteProtection").notNullable().defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.Identity, "hasDeleteProtection");
if (hasCol) {
await knex.schema.alterTable(TableName.Identity, (t) => {
t.dropColumn("hasDeleteProtection");
});
}
}

View File

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.IdentityAwsAuth, "allowedPrincipalArns");
if (hasColumn) {
await knex.schema.alterTable(TableName.IdentityAwsAuth, (t) => {
t.string("allowedPrincipalArns", 2048).notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.IdentityAwsAuth, "allowedPrincipalArns");
if (hasColumn) {
await knex.schema.alterTable(TableName.IdentityAwsAuth, (t) => {
t.string("allowedPrincipalArns", 255).notNullable().alter();
});
}
}

View File

@ -1,91 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasEncryptedGithubAppConnectionClientIdColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionClientId"
);
const hasEncryptedGithubAppConnectionClientSecretColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionClientSecret"
);
const hasEncryptedGithubAppConnectionSlugColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionSlug"
);
const hasEncryptedGithubAppConnectionAppIdColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionId"
);
const hasEncryptedGithubAppConnectionAppPrivateKeyColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionPrivateKey"
);
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (!hasEncryptedGithubAppConnectionClientIdColumn) {
t.binary("encryptedGitHubAppConnectionClientId").nullable();
}
if (!hasEncryptedGithubAppConnectionClientSecretColumn) {
t.binary("encryptedGitHubAppConnectionClientSecret").nullable();
}
if (!hasEncryptedGithubAppConnectionSlugColumn) {
t.binary("encryptedGitHubAppConnectionSlug").nullable();
}
if (!hasEncryptedGithubAppConnectionAppIdColumn) {
t.binary("encryptedGitHubAppConnectionId").nullable();
}
if (!hasEncryptedGithubAppConnectionAppPrivateKeyColumn) {
t.binary("encryptedGitHubAppConnectionPrivateKey").nullable();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedGithubAppConnectionClientIdColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionClientId"
);
const hasEncryptedGithubAppConnectionClientSecretColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionClientSecret"
);
const hasEncryptedGithubAppConnectionSlugColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionSlug"
);
const hasEncryptedGithubAppConnectionAppIdColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionId"
);
const hasEncryptedGithubAppConnectionAppPrivateKeyColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionPrivateKey"
);
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (hasEncryptedGithubAppConnectionClientIdColumn) {
t.dropColumn("encryptedGitHubAppConnectionClientId");
}
if (hasEncryptedGithubAppConnectionClientSecretColumn) {
t.dropColumn("encryptedGitHubAppConnectionClientSecret");
}
if (hasEncryptedGithubAppConnectionSlugColumn) {
t.dropColumn("encryptedGitHubAppConnectionSlug");
}
if (hasEncryptedGithubAppConnectionAppIdColumn) {
t.dropColumn("encryptedGitHubAppConnectionId");
}
if (hasEncryptedGithubAppConnectionAppPrivateKeyColumn) {
t.dropColumn("encryptedGitHubAppConnectionPrivateKey");
}
});
}

View File

@ -3,27 +3,12 @@ import { Knex } from "knex";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns"; import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service"; import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { TKeyStoreFactory } from "@app/keystore/keystore"; import { TKeyStoreFactory } from "@app/keystore/keystore";
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
import { identityDALFactory } from "@app/services/identity/identity-dal";
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal"; import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal"; import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal"; import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service"; import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { orgDALFactory } from "@app/services/org/org-dal"; import { orgDALFactory } from "@app/services/org/org-dal";
import { projectDALFactory } from "@app/services/project/project-dal"; import { projectDALFactory } from "@app/services/project/project-dal";
import { resourceMetadataDALFactory } from "@app/services/resource-metadata/resource-metadata-dal";
import { secretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { secretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal";
import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { secretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal";
import { secretVersionV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
import { userDALFactory } from "@app/services/user/user-dal";
import { TMigrationEnvConfig } from "./env-config"; import { TMigrationEnvConfig } from "./env-config";
@ -65,77 +50,3 @@ export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }
return { kmsService }; return { kmsService };
}; };
export const getMigrationPITServices = async ({
db,
keyStore,
envConfig
}: {
db: Knex;
keyStore: TKeyStoreFactory;
envConfig: TMigrationEnvConfig;
}) => {
const projectDAL = projectDALFactory(db);
const folderCommitDAL = folderCommitDALFactory(db);
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
const folderCheckpointDAL = folderCheckpointDALFactory(db);
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
const userDAL = userDALFactory(db);
const identityDAL = identityDALFactory(db);
const folderDAL = secretFolderDALFactory(db);
const folderVersionDAL = secretFolderVersionDALFactory(db);
const secretVersionV2BridgeDAL = secretVersionV2BridgeDALFactory(db);
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
const secretV2BridgeDAL = secretV2BridgeDALFactory({ db, keyStore });
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
const secretTagDAL = secretTagDALFactory(db);
const orgDAL = orgDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
const resourceMetadataDAL = resourceMetadataDALFactory(db);
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig
});
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL,
internalKmsDAL,
orgDAL,
projectDAL,
hsmService,
envConfig
});
await hsmService.startService();
await kmsService.startService();
const folderCommitService = folderCommitServiceFactory({
folderCommitDAL,
folderCommitChangesDAL,
folderCheckpointDAL,
folderTreeCheckpointDAL,
userDAL,
identityDAL,
folderDAL,
folderVersionDAL,
secretVersionV2BridgeDAL,
projectDAL,
folderCheckpointResourcesDAL,
secretV2BridgeDAL,
folderTreeCheckpointResourcesDAL,
kmsService,
secretTagDAL,
resourceMetadataDAL
});
return { folderCommitService };
};

View File

@ -13,9 +13,7 @@ export const AccessApprovalPoliciesApproversSchema = z.object({
createdAt: z.date(), createdAt: z.date(),
updatedAt: z.date(), updatedAt: z.date(),
approverUserId: z.string().uuid().nullable().optional(), approverUserId: z.string().uuid().nullable().optional(),
approverGroupId: z.string().uuid().nullable().optional(), approverGroupId: z.string().uuid().nullable().optional()
sequence: z.number().default(0).nullable().optional(),
approvalsRequired: z.number().default(1).nullable().optional()
}); });
export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>; export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>;

View File

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const AccessApprovalPoliciesBypassersSchema = z.object({
id: z.string().uuid(),
bypasserGroupId: z.string().uuid().nullable().optional(),
bypasserUserId: z.string().uuid().nullable().optional(),
policyId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAccessApprovalPoliciesBypassers = z.infer<typeof AccessApprovalPoliciesBypassersSchema>;
export type TAccessApprovalPoliciesBypassersInsert = Omit<
z.input<typeof AccessApprovalPoliciesBypassersSchema>,
TImmutableDBKeys
>;
export type TAccessApprovalPoliciesBypassersUpdate = Partial<
Omit<z.input<typeof AccessApprovalPoliciesBypassersSchema>, TImmutableDBKeys>
>;

View File

@ -18,9 +18,7 @@ export const AccessApprovalRequestsSchema = z.object({
createdAt: z.date(), createdAt: z.date(),
updatedAt: z.date(), updatedAt: z.date(),
requestedByUserId: z.string().uuid(), requestedByUserId: z.string().uuid(),
note: z.string().nullable().optional(), note: z.string().nullable().optional()
privilegeDeletedAt: z.date().nullable().optional(),
status: z.string().default("pending")
}); });
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>; export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;

View File

@ -16,8 +16,7 @@ export const DynamicSecretLeasesSchema = z.object({
statusDetails: z.string().nullable().optional(), statusDetails: z.string().nullable().optional(),
dynamicSecretId: z.string().uuid(), dynamicSecretId: z.string().uuid(),
createdAt: z.date(), createdAt: z.date(),
updatedAt: z.date(), updatedAt: z.date()
config: z.unknown().nullable().optional()
}); });
export type TDynamicSecretLeases = z.infer<typeof DynamicSecretLeasesSchema>; export type TDynamicSecretLeases = z.infer<typeof DynamicSecretLeasesSchema>;

View File

@ -28,8 +28,7 @@ export const DynamicSecretsSchema = z.object({
updatedAt: z.date(), updatedAt: z.date(),
encryptedInput: zodBuffer, encryptedInput: zodBuffer,
projectGatewayId: z.string().uuid().nullable().optional(), projectGatewayId: z.string().uuid().nullable().optional(),
gatewayId: z.string().uuid().nullable().optional(), gatewayId: z.string().uuid().nullable().optional()
usernameTemplate: z.string().nullable().optional()
}); });
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>; export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;

View File

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCheckpointResourcesSchema = z.object({
id: z.string().uuid(),
folderCheckpointId: z.string().uuid(),
secretVersionId: z.string().uuid().nullable().optional(),
folderVersionId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCheckpointResources = z.infer<typeof FolderCheckpointResourcesSchema>;
export type TFolderCheckpointResourcesInsert = Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>;
export type TFolderCheckpointResourcesUpdate = Partial<
Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>
>;

View File

@ -1,19 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCheckpointsSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCheckpoints = z.infer<typeof FolderCheckpointsSchema>;
export type TFolderCheckpointsInsert = Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>;
export type TFolderCheckpointsUpdate = Partial<Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>>;

View File

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCommitChangesSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
changeType: z.string(),
isUpdate: z.boolean().default(false),
secretVersionId: z.string().uuid().nullable().optional(),
folderVersionId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCommitChanges = z.infer<typeof FolderCommitChangesSchema>;
export type TFolderCommitChangesInsert = Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>;
export type TFolderCommitChangesUpdate = Partial<Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>>;

View File

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCommitsSchema = z.object({
id: z.string().uuid(),
commitId: z.coerce.bigint(),
actorMetadata: z.unknown(),
actorType: z.string(),
message: z.string().nullable().optional(),
folderId: z.string().uuid(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCommits = z.infer<typeof FolderCommitsSchema>;
export type TFolderCommitsInsert = Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>;
export type TFolderCommitsUpdate = Partial<Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>>;

View File

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderTreeCheckpointResourcesSchema = z.object({
id: z.string().uuid(),
folderTreeCheckpointId: z.string().uuid(),
folderId: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderTreeCheckpointResources = z.infer<typeof FolderTreeCheckpointResourcesSchema>;
export type TFolderTreeCheckpointResourcesInsert = Omit<
z.input<typeof FolderTreeCheckpointResourcesSchema>,
TImmutableDBKeys
>;
export type TFolderTreeCheckpointResourcesUpdate = Partial<
Omit<z.input<typeof FolderTreeCheckpointResourcesSchema>, TImmutableDBKeys>
>;

View File

@ -1,19 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderTreeCheckpointsSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderTreeCheckpoints = z.infer<typeof FolderTreeCheckpointsSchema>;
export type TFolderTreeCheckpointsInsert = Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>;
export type TFolderTreeCheckpointsUpdate = Partial<Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>>;

View File

@ -12,8 +12,7 @@ export const IdentitiesSchema = z.object({
name: z.string(), name: z.string(),
authMethod: z.string().nullable().optional(), authMethod: z.string().nullable().optional(),
createdAt: z.date(), createdAt: z.date(),
updatedAt: z.date(), updatedAt: z.date()
hasDeleteProtection: z.boolean().default(false)
}); });
export type TIdentities = z.infer<typeof IdentitiesSchema>; export type TIdentities = z.infer<typeof IdentitiesSchema>;

View File

@ -1,25 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityAlicloudAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
type: z.string(),
allowedArns: z.string()
});
export type TIdentityAlicloudAuths = z.infer<typeof IdentityAlicloudAuthsSchema>;
export type TIdentityAlicloudAuthsInsert = Omit<z.input<typeof IdentityAlicloudAuthsSchema>, TImmutableDBKeys>;
export type TIdentityAlicloudAuthsUpdate = Partial<Omit<z.input<typeof IdentityAlicloudAuthsSchema>, TImmutableDBKeys>>;

View File

@ -18,7 +18,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
createdAt: z.date(), createdAt: z.date(),
updatedAt: z.date(), updatedAt: z.date(),
identityId: z.string().uuid(), identityId: z.string().uuid(),
kubernetesHost: z.string().nullable().optional(), kubernetesHost: z.string(),
encryptedCaCert: z.string().nullable().optional(), encryptedCaCert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(), caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(), caCertTag: z.string().nullable().optional(),
@ -31,8 +31,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(), encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional(), encryptedKubernetesCaCertificate: zodBuffer.nullable().optional(),
gatewayId: z.string().uuid().nullable().optional(), gatewayId: z.string().uuid().nullable().optional(),
accessTokenPeriod: z.coerce.number().default(0), accessTokenPeriod: z.coerce.number().default(0)
tokenReviewMode: z.string().default("api")
}); });
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>; export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;

View File

@ -1,6 +1,5 @@
export * from "./access-approval-policies"; export * from "./access-approval-policies";
export * from "./access-approval-policies-approvers"; export * from "./access-approval-policies-approvers";
export * from "./access-approval-policies-bypassers";
export * from "./access-approval-requests"; export * from "./access-approval-requests";
export * from "./access-approval-requests-reviewers"; export * from "./access-approval-requests-reviewers";
export * from "./api-keys"; export * from "./api-keys";
@ -24,12 +23,6 @@ export * from "./dynamic-secrets";
export * from "./external-certificate-authorities"; export * from "./external-certificate-authorities";
export * from "./external-group-org-role-mappings"; export * from "./external-group-org-role-mappings";
export * from "./external-kms"; export * from "./external-kms";
export * from "./folder-checkpoint-resources";
export * from "./folder-checkpoints";
export * from "./folder-commit-changes";
export * from "./folder-commits";
export * from "./folder-tree-checkpoint-resources";
export * from "./folder-tree-checkpoints";
export * from "./gateways"; export * from "./gateways";
export * from "./git-app-install-sessions"; export * from "./git-app-install-sessions";
export * from "./git-app-org"; export * from "./git-app-org";
@ -39,7 +32,6 @@ export * from "./group-project-memberships";
export * from "./groups"; export * from "./groups";
export * from "./identities"; export * from "./identities";
export * from "./identity-access-tokens"; export * from "./identity-access-tokens";
export * from "./identity-alicloud-auths";
export * from "./identity-aws-auths"; export * from "./identity-aws-auths";
export * from "./identity-azure-auths"; export * from "./identity-azure-auths";
export * from "./identity-gcp-auths"; export * from "./identity-gcp-auths";
@ -100,7 +92,6 @@ export * from "./saml-configs";
export * from "./scim-tokens"; export * from "./scim-tokens";
export * from "./secret-approval-policies"; export * from "./secret-approval-policies";
export * from "./secret-approval-policies-approvers"; export * from "./secret-approval-policies-approvers";
export * from "./secret-approval-policies-bypassers";
export * from "./secret-approval-request-secret-tags"; export * from "./secret-approval-request-secret-tags";
export * from "./secret-approval-request-secret-tags-v2"; export * from "./secret-approval-request-secret-tags-v2";
export * from "./secret-approval-requests"; export * from "./secret-approval-requests";
@ -118,12 +109,7 @@ export * from "./secret-rotation-outputs";
export * from "./secret-rotation-v2-secret-mappings"; export * from "./secret-rotation-v2-secret-mappings";
export * from "./secret-rotations"; export * from "./secret-rotations";
export * from "./secret-rotations-v2"; export * from "./secret-rotations-v2";
export * from "./secret-scanning-configs";
export * from "./secret-scanning-data-sources";
export * from "./secret-scanning-findings";
export * from "./secret-scanning-git-risks"; export * from "./secret-scanning-git-risks";
export * from "./secret-scanning-resources";
export * from "./secret-scanning-scans";
export * from "./secret-sharing"; export * from "./secret-sharing";
export * from "./secret-snapshot-folders"; export * from "./secret-snapshot-folders";
export * from "./secret-snapshot-secrets"; export * from "./secret-snapshot-secrets";

View File

@ -80,7 +80,6 @@ export enum TableName {
IdentityGcpAuth = "identity_gcp_auths", IdentityGcpAuth = "identity_gcp_auths",
IdentityAzureAuth = "identity_azure_auths", IdentityAzureAuth = "identity_azure_auths",
IdentityUaClientSecret = "identity_ua_client_secrets", IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityAliCloudAuth = "identity_alicloud_auths",
IdentityAwsAuth = "identity_aws_auths", IdentityAwsAuth = "identity_aws_auths",
IdentityOciAuth = "identity_oci_auths", IdentityOciAuth = "identity_oci_auths",
IdentityOidcAuth = "identity_oidc_auths", IdentityOidcAuth = "identity_oidc_auths",
@ -96,12 +95,10 @@ export enum TableName {
ScimToken = "scim_tokens", ScimToken = "scim_tokens",
AccessApprovalPolicy = "access_approval_policies", AccessApprovalPolicy = "access_approval_policies",
AccessApprovalPolicyApprover = "access_approval_policies_approvers", AccessApprovalPolicyApprover = "access_approval_policies_approvers",
AccessApprovalPolicyBypasser = "access_approval_policies_bypassers",
AccessApprovalRequest = "access_approval_requests", AccessApprovalRequest = "access_approval_requests",
AccessApprovalRequestReviewer = "access_approval_requests_reviewers", AccessApprovalRequestReviewer = "access_approval_requests_reviewers",
SecretApprovalPolicy = "secret_approval_policies", SecretApprovalPolicy = "secret_approval_policies",
SecretApprovalPolicyApprover = "secret_approval_policies_approvers", SecretApprovalPolicyApprover = "secret_approval_policies_approvers",
SecretApprovalPolicyBypasser = "secret_approval_policies_bypassers",
SecretApprovalRequest = "secret_approval_requests", SecretApprovalRequest = "secret_approval_requests",
SecretApprovalRequestReviewer = "secret_approval_requests_reviewers", SecretApprovalRequestReviewer = "secret_approval_requests_reviewers",
SecretApprovalRequestSecret = "secret_approval_requests_secrets", SecretApprovalRequestSecret = "secret_approval_requests_secrets",
@ -160,21 +157,10 @@ export enum TableName {
MicrosoftTeamsIntegrations = "microsoft_teams_integrations", MicrosoftTeamsIntegrations = "microsoft_teams_integrations",
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs", ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
SecretReminderRecipients = "secret_reminder_recipients", SecretReminderRecipients = "secret_reminder_recipients",
GithubOrgSyncConfig = "github_org_sync_configs", GithubOrgSyncConfig = "github_org_sync_configs"
FolderCommit = "folder_commits",
FolderCommitChanges = "folder_commit_changes",
FolderCheckpoint = "folder_checkpoints",
FolderCheckpointResources = "folder_checkpoint_resources",
FolderTreeCheckpoint = "folder_tree_checkpoints",
FolderTreeCheckpointResources = "folder_tree_checkpoint_resources",
SecretScanningDataSource = "secret_scanning_data_sources",
SecretScanningResource = "secret_scanning_resources",
SecretScanningScan = "secret_scanning_scans",
SecretScanningFinding = "secret_scanning_findings",
SecretScanningConfig = "secret_scanning_configs"
} }
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt" | "commitId"; export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
export const UserDeviceSchema = z export const UserDeviceSchema = z
.object({ .object({
@ -248,7 +234,6 @@ export enum IdentityAuthMethod {
UNIVERSAL_AUTH = "universal-auth", UNIVERSAL_AUTH = "universal-auth",
KUBERNETES_AUTH = "kubernetes-auth", KUBERNETES_AUTH = "kubernetes-auth",
GCP_AUTH = "gcp-auth", GCP_AUTH = "gcp-auth",
ALICLOUD_AUTH = "alicloud-auth",
AWS_AUTH = "aws-auth", AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth", AZURE_AUTH = "azure-auth",
OCI_AUTH = "oci-auth", OCI_AUTH = "oci-auth",
@ -261,8 +246,7 @@ export enum ProjectType {
SecretManager = "secret-manager", SecretManager = "secret-manager",
CertificateManager = "cert-manager", CertificateManager = "cert-manager",
KMS = "kms", KMS = "kms",
SSH = "ssh", SSH = "ssh"
SecretScanning = "secret-scanning"
} }
export enum ActionProjectType { export enum ActionProjectType {
@ -270,7 +254,6 @@ export enum ActionProjectType {
CertificateManager = ProjectType.CertificateManager, CertificateManager = ProjectType.CertificateManager,
KMS = ProjectType.KMS, KMS = ProjectType.KMS,
SSH = ProjectType.SSH, SSH = ProjectType.SSH,
SecretScanning = ProjectType.SecretScanning,
// project operations that happen on all types // project operations that happen on all types
Any = "any" Any = "any"
} }

View File

@ -28,8 +28,7 @@ export const ProjectsSchema = z.object({
type: z.string(), type: z.string(),
enforceCapitalization: z.boolean().default(false), enforceCapitalization: z.boolean().default(false),
hasDeleteProtection: z.boolean().default(false).nullable().optional(), hasDeleteProtection: z.boolean().default(false).nullable().optional(),
secretSharing: z.boolean().default(true), secretSharing: z.boolean().default(true)
showSnapshotsLegacy: z.boolean().default(false)
}); });
export type TProjects = z.infer<typeof ProjectsSchema>; export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretApprovalPoliciesBypassersSchema = z.object({
id: z.string().uuid(),
bypasserGroupId: z.string().uuid().nullable().optional(),
bypasserUserId: z.string().uuid().nullable().optional(),
policyId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretApprovalPoliciesBypassers = z.infer<typeof SecretApprovalPoliciesBypassersSchema>;
export type TSecretApprovalPoliciesBypassersInsert = Omit<
z.input<typeof SecretApprovalPoliciesBypassersSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalPoliciesBypassersUpdate = Partial<
Omit<z.input<typeof SecretApprovalPoliciesBypassersSchema>, TImmutableDBKeys>
>;

View File

@ -14,8 +14,7 @@ export const SecretFolderVersionsSchema = z.object({
createdAt: z.date(), createdAt: z.date(),
updatedAt: z.date(), updatedAt: z.date(),
envId: z.string().uuid(), envId: z.string().uuid(),
folderId: z.string().uuid(), folderId: z.string().uuid()
description: z.string().nullable().optional()
}); });
export type TSecretFolderVersions = z.infer<typeof SecretFolderVersionsSchema>; export type TSecretFolderVersions = z.infer<typeof SecretFolderVersionsSchema>;

View File

@ -1,20 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningConfigsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
content: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningConfigs = z.infer<typeof SecretScanningConfigsSchema>;
export type TSecretScanningConfigsInsert = Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>;
export type TSecretScanningConfigsUpdate = Partial<Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>>;

View File

@ -1,32 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningDataSourcesSchema = z.object({
id: z.string().uuid(),
externalId: z.string().nullable().optional(),
name: z.string(),
description: z.string().nullable().optional(),
type: z.string(),
config: z.unknown(),
encryptedCredentials: zodBuffer.nullable().optional(),
connectionId: z.string().uuid().nullable().optional(),
isAutoScanEnabled: z.boolean().default(true).nullable().optional(),
projectId: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
isDisconnected: z.boolean().default(false)
});
export type TSecretScanningDataSources = z.infer<typeof SecretScanningDataSourcesSchema>;
export type TSecretScanningDataSourcesInsert = Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>;
export type TSecretScanningDataSourcesUpdate = Partial<
Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>
>;

View File

@ -1,32 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningFindingsSchema = z.object({
id: z.string().uuid(),
dataSourceName: z.string(),
dataSourceType: z.string(),
resourceName: z.string(),
resourceType: z.string(),
rule: z.string(),
severity: z.string(),
status: z.string().default("unresolved"),
remarks: z.string().nullable().optional(),
fingerprint: z.string(),
details: z.unknown(),
projectId: z.string(),
scanId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningFindings = z.infer<typeof SecretScanningFindingsSchema>;
export type TSecretScanningFindingsInsert = Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>;
export type TSecretScanningFindingsUpdate = Partial<
Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>
>;

View File

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningResourcesSchema = z.object({
id: z.string().uuid(),
externalId: z.string(),
name: z.string(),
type: z.string(),
dataSourceId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningResources = z.infer<typeof SecretScanningResourcesSchema>;
export type TSecretScanningResourcesInsert = Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>;
export type TSecretScanningResourcesUpdate = Partial<
Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>
>;

View File

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningScansSchema = z.object({
id: z.string().uuid(),
status: z.string().default("queued"),
statusMessage: z.string().nullable().optional(),
type: z.string(),
resourceId: z.string().uuid(),
createdAt: z.date().nullable().optional()
});
export type TSecretScanningScans = z.infer<typeof SecretScanningScansSchema>;
export type TSecretScanningScansInsert = Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>;
export type TSecretScanningScansUpdate = Partial<Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>>;

View File

@ -29,12 +29,7 @@ export const SuperAdminSchema = z.object({
adminIdentityIds: z.string().array().nullable().optional(), adminIdentityIds: z.string().array().nullable().optional(),
encryptedMicrosoftTeamsAppId: zodBuffer.nullable().optional(), encryptedMicrosoftTeamsAppId: zodBuffer.nullable().optional(),
encryptedMicrosoftTeamsClientSecret: zodBuffer.nullable().optional(), encryptedMicrosoftTeamsClientSecret: zodBuffer.nullable().optional(),
encryptedMicrosoftTeamsBotId: zodBuffer.nullable().optional(), encryptedMicrosoftTeamsBotId: zodBuffer.nullable().optional()
encryptedGitHubAppConnectionClientId: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionClientSecret: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionSlug: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionId: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionPrivateKey: zodBuffer.nullable().optional()
}); });
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>; export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

View File

@ -1,7 +1,7 @@
import { nanoid } from "nanoid"; import { nanoid } from "nanoid";
import { z } from "zod"; import { z } from "zod";
import { ApproverType, BypasserType } from "@app/ee/services/access-approval-policy/access-approval-policy-types"; import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { EnforcementLevel } from "@app/lib/types"; import { EnforcementLevel } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -23,41 +23,11 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
environment: z.string(), environment: z.string(),
approvers: z approvers: z
.discriminatedUnion("type", [ .discriminatedUnion("type", [
z.object({ z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
type: z.literal(ApproverType.Group), z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
id: z.string(),
sequence: z.number().int().default(1)
}),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional(),
sequence: z.number().int().default(1)
})
]) ])
.array() .array()
.max(100, "Cannot have more than 100 approvers") .min(1, { message: "At least one approver should be provided" }),
.min(1, { message: "At least one approver should be provided" })
.refine(
// @ts-expect-error this is ok
(el) => el.every((i) => Boolean(i?.id) || Boolean(i?.username)),
"Must provide either username or id"
),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),
stepNumber: z.number().int()
})
.array()
.optional(),
approvals: z.number().min(1).default(1), approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard), enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true) allowedSelfApprovals: z.boolean().default(true)
@ -99,16 +69,10 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
approvals: sapPubSchema approvals: sapPubSchema
.extend({ .extend({
approvers: z approvers: z
.object({ .object({ type: z.nativeEnum(ApproverType), id: z.string().nullable().optional() })
type: z.nativeEnum(ApproverType),
id: z.string().nullable().optional(),
sequence: z.number().nullable().optional(),
approvalsRequired: z.number().nullable().optional()
})
.array() .array()
.nullable() .nullable()
.optional(), .optional()
bypassers: z.object({ type: z.nativeEnum(BypasserType), id: z.string().nullable().optional() }).array()
}) })
.array() .array()
.nullable() .nullable()
@ -178,44 +142,14 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.transform((val) => (val === "" ? "/" : val)), .transform((val) => (val === "" ? "/" : val)),
approvers: z approvers: z
.discriminatedUnion("type", [ .discriminatedUnion("type", [
z.object({ z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
type: z.literal(ApproverType.Group), z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
id: z.string(),
sequence: z.number().int().default(1)
}),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional(),
sequence: z.number().int().default(1)
})
]) ])
.array() .array()
.min(1, { message: "At least one approver should be provided" }) .min(1, { message: "At least one approver should be provided" }),
.max(100, "Cannot have more than 100 approvers")
.refine(
// @ts-expect-error this is ok
(el) => el.every((i) => Boolean(i?.id) || Boolean(i?.username)),
"Must provide either username or id"
),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvals: z.number().min(1).optional(), approvals: z.number().min(1).optional(),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard), enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true), allowedSelfApprovals: z.boolean().default(true)
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),
stepNumber: z.number().int()
})
.array()
.optional()
}), }),
response: { response: {
200: z.object({ 200: z.object({
@ -282,16 +216,6 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.object({ .object({
type: z.nativeEnum(ApproverType), type: z.nativeEnum(ApproverType),
id: z.string().nullable().optional(), id: z.string().nullable().optional(),
name: z.string().nullable().optional(),
approvalsRequired: z.number().nullable().optional()
})
.array()
.nullable()
.optional(),
bypassers: z
.object({
type: z.nativeEnum(BypasserType),
id: z.string().nullable().optional(),
name: z.string().nullable().optional() name: z.string().nullable().optional()
}) })
.array() .array()

View File

@ -112,16 +112,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
id: z.string(), id: z.string(),
name: z.string(), name: z.string(),
approvals: z.number(), approvals: z.number(),
approvers: z approvers: z.string().array(),
.object({
userId: z.string().nullable().optional(),
sequence: z.number().nullable().optional(),
approvalsRequired: z.number().nullable().optional(),
email: z.string().nullable().optional(),
username: z.string().nullable().optional()
})
.array(),
bypassers: z.string().array(),
secretPath: z.string().nullish(), secretPath: z.string().nullish(),
envId: z.string(), envId: z.string(),
enforcementLevel: z.string(), enforcementLevel: z.string(),

View File

@ -1,17 +0,0 @@
import {
CreateOracleDBConnectionSchema,
SanitizedOracleDBConnectionSchema,
UpdateOracleDBConnectionSchema
} from "@app/ee/services/app-connections/oracledb";
import { registerAppConnectionEndpoints } from "@app/server/routes/v1/app-connection-routers/app-connection-endpoints";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const registerOracleDBConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.OracleDB,
server,
sanitizedResponseSchema: SanitizedOracleDBConnectionSchema,
createSchema: CreateOracleDBConnectionSchema,
updateSchema: UpdateOracleDBConnectionSchema
});
};

View File

@ -36,8 +36,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" }); ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
}), }),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path), path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.environmentSlug), environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.path)
config: z.any().optional()
}), }),
response: { response: {
200: z.object({ 200: z.object({

View File

@ -1,67 +0,0 @@
import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
import { ApiDocsTags, DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerKubernetesDynamicSecretLeaseRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.DynamicSecrets],
body: z.object({
dynamicSecretName: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.dynamicSecretName).toLowerCase(),
projectSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.projectSlug),
ttl: z
.string()
.optional()
.describe(DYNAMIC_SECRET_LEASES.CREATE.ttl)
.superRefine((val, ctx) => {
if (!val) return;
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
}),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.environmentSlug),
config: z
.object({
namespace: z.string().min(1).optional().describe(DYNAMIC_SECRET_LEASES.KUBERNETES.CREATE.config.namespace)
})
.optional()
}),
response: {
200: z.object({
lease: DynamicSecretLeasesSchema,
dynamicSecret: SanitizedDynamicSecretSchema,
data: z.unknown()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { data, lease, dynamicSecret } = await server.services.dynamicSecretLease.create({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.body.dynamicSecretName,
...req.body
});
return { lease, data, dynamicSecret };
}
});
};

View File

@ -6,8 +6,6 @@ import { ApiDocsTags, DYNAMIC_SECRETS } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates"; import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn"; import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms"; import { ms } from "@app/lib/ms";
import { isValidHandleBarTemplate } from "@app/lib/template/validate-handlebars";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas"; import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -15,31 +13,6 @@ import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchema
import { AuthMode } from "@app/services/auth/auth-type"; import { AuthMode } from "@app/services/auth/auth-type";
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema"; import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
const validateUsernameTemplateCharacters = characterValidator([
CharacterType.AlphaNumeric,
CharacterType.Underscore,
CharacterType.Hyphen,
CharacterType.OpenBrace,
CharacterType.CloseBrace,
CharacterType.CloseBracket,
CharacterType.OpenBracket,
CharacterType.Fullstop,
CharacterType.SingleQuote,
CharacterType.Spaces,
CharacterType.Pipe
]);
const userTemplateSchema = z
.string()
.trim()
.max(255)
.refine((el) => validateUsernameTemplateCharacters(el))
.refine((el) =>
isValidHandleBarTemplate(el, {
allowedExpressions: (val) => ["randomUsername", "unixTimestamp", "identity.name"].includes(val)
})
);
export const registerDynamicSecretRouter = async (server: FastifyZodProvider) => { export const registerDynamicSecretRouter = async (server: FastifyZodProvider) => {
server.route({ server.route({
method: "POST", method: "POST",
@ -79,8 +52,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash), path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1), environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
name: slugSchema({ min: 1, max: 64, field: "Name" }).describe(DYNAMIC_SECRETS.CREATE.name), name: slugSchema({ min: 1, max: 64, field: "Name" }).describe(DYNAMIC_SECRETS.CREATE.name),
metadata: ResourceMetadataSchema.optional(), metadata: ResourceMetadataSchema.optional()
usernameTemplate: userTemplateSchema.optional()
}), }),
response: { response: {
200: z.object({ 200: z.object({
@ -101,6 +73,39 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
} }
}); });
server.route({
method: "POST",
url: "/entra-id/users",
config: {
rateLimit: readLimit
},
schema: {
body: z.object({
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
}),
response: {
200: z
.object({
name: z.string().min(1).describe("The name of the user"),
id: z.string().min(1).describe("The ID of the user"),
email: z.string().min(1).describe("The email of the user")
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
tenantId: req.body.tenantId,
applicationId: req.body.applicationId,
clientSecret: req.body.clientSecret
});
return data;
}
});
server.route({ server.route({
method: "PATCH", method: "PATCH",
url: "/:name", url: "/:name",
@ -145,8 +150,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
}) })
.nullable(), .nullable(),
newName: z.string().describe(DYNAMIC_SECRETS.UPDATE.newName).optional(), newName: z.string().describe(DYNAMIC_SECRETS.UPDATE.newName).optional(),
metadata: ResourceMetadataSchema.optional(), metadata: ResourceMetadataSchema.optional()
usernameTemplate: userTemplateSchema.nullable().optional()
}) })
}), }),
response: { response: {
@ -324,37 +328,4 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
return { leases }; return { leases };
} }
}); });
server.route({
method: "POST",
url: "/entra-id/users",
config: {
rateLimit: readLimit
},
schema: {
body: z.object({
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
}),
response: {
200: z
.object({
name: z.string().min(1).describe("The name of the user"),
id: z.string().min(1).describe("The ID of the user"),
email: z.string().min(1).describe("The email of the user")
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
tenantId: req.body.tenantId,
applicationId: req.body.applicationId,
clientSecret: req.body.clientSecret
});
return data;
}
});
}; };

View File

@ -48,9 +48,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
id: z.string().trim().describe(GROUPS.GET_BY_ID.id) id: z.string().trim().describe(GROUPS.GET_BY_ID.id)
}), }),
response: { response: {
200: GroupsSchema.extend({ 200: GroupsSchema
customRoleSlug: z.string().nullable()
})
} }
}, },
handler: async (req) => { handler: async (req) => {

View File

@ -6,7 +6,6 @@ import { registerAssumePrivilegeRouter } from "./assume-privilege-router";
import { registerAuditLogStreamRouter } from "./audit-log-stream-router"; import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
import { registerCaCrlRouter } from "./certificate-authority-crl-router"; import { registerCaCrlRouter } from "./certificate-authority-crl-router";
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router"; import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerKubernetesDynamicSecretLeaseRouter } from "./dynamic-secret-lease-routers/kubernetes-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router"; import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerExternalKmsRouter } from "./external-kms-router"; import { registerExternalKmsRouter } from "./external-kms-router";
import { registerGatewayRouter } from "./gateway-router"; import { registerGatewayRouter } from "./gateway-router";
@ -19,7 +18,6 @@ import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router"; import { registerLicenseRouter } from "./license-router";
import { registerOidcRouter } from "./oidc-router"; import { registerOidcRouter } from "./oidc-router";
import { registerOrgRoleRouter } from "./org-role-router"; import { registerOrgRoleRouter } from "./org-role-router";
import { registerPITRouter } from "./pit-router";
import { registerProjectRoleRouter } from "./project-role-router"; import { registerProjectRoleRouter } from "./project-role-router";
import { registerProjectRouter } from "./project-router"; import { registerProjectRouter } from "./project-router";
import { registerRateLimitRouter } from "./rate-limit-router"; import { registerRateLimitRouter } from "./rate-limit-router";
@ -55,7 +53,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/workspace" } { prefix: "/workspace" }
); );
await server.register(registerSnapshotRouter, { prefix: "/secret-snapshot" }); await server.register(registerSnapshotRouter, { prefix: "/secret-snapshot" });
await server.register(registerPITRouter, { prefix: "/pit" });
await server.register(registerSecretApprovalPolicyRouter, { prefix: "/secret-approvals" }); await server.register(registerSecretApprovalPolicyRouter, { prefix: "/secret-approvals" });
await server.register(registerSecretApprovalRequestRouter, { await server.register(registerSecretApprovalRequestRouter, {
prefix: "/secret-approval-requests" prefix: "/secret-approval-requests"
@ -72,7 +69,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
async (dynamicSecretRouter) => { async (dynamicSecretRouter) => {
await dynamicSecretRouter.register(registerDynamicSecretRouter); await dynamicSecretRouter.register(registerDynamicSecretRouter);
await dynamicSecretRouter.register(registerDynamicSecretLeaseRouter, { prefix: "/leases" }); await dynamicSecretRouter.register(registerDynamicSecretLeaseRouter, { prefix: "/leases" });
await dynamicSecretRouter.register(registerKubernetesDynamicSecretLeaseRouter, { prefix: "/leases/kubernetes" });
}, },
{ prefix: "/dynamic-secrets" } { prefix: "/dynamic-secrets" }
); );

View File

@ -1,416 +0,0 @@
/* eslint-disable @typescript-eslint/no-base-to-string */
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { booleanSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { commitChangesResponseSchema, resourceChangeSchema } from "@app/services/folder-commit/folder-commit-schemas";
const commitHistoryItemSchema = z.object({
id: z.string(),
folderId: z.string(),
actorType: z.string(),
actorMetadata: z.unknown().optional(),
message: z.string().optional().nullable(),
commitId: z.string(),
createdAt: z.string().or(z.date()),
envId: z.string()
});
const folderStateSchema = z.array(
z.object({
type: z.string(),
id: z.string(),
versionId: z.string(),
secretKey: z.string().optional(),
secretVersion: z.number().optional(),
folderName: z.string().optional(),
folderVersion: z.number().optional()
})
);
export const registerPITRouter = async (server: FastifyZodProvider) => {
// Get commits count for a folder
server.route({
method: "GET",
url: "/commits/count",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim()
}),
response: {
200: z.object({
count: z.number(),
folderId: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitsCount({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
environment: req.query.environment,
path: req.query.path
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT,
metadata: {
environment: req.query.environment,
path: req.query.path,
commitCount: result.count.toString()
}
}
});
return result;
}
});
// Get all commits for a folder
server.route({
method: "GET",
url: "/commits",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim(),
offset: z.coerce.number().min(0).default(0),
limit: z.coerce.number().min(1).max(100).default(20),
search: z.string().trim().optional(),
sort: z.enum(["asc", "desc"]).default("desc")
}),
response: {
200: z.object({
commits: commitHistoryItemSchema.array(),
total: z.number(),
hasMore: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitsForFolder({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
environment: req.query.environment,
path: req.query.path,
offset: req.query.offset,
limit: req.query.limit,
search: req.query.search,
sort: req.query.sort
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMITS,
metadata: {
environment: req.query.environment,
path: req.query.path,
commitCount: result.commits.length.toString(),
offset: req.query.offset.toString(),
limit: req.query.limit.toString(),
search: req.query.search,
sort: req.query.sort
}
}
});
return result;
}
});
// Get commit changes for a specific commit
server.route({
method: "GET",
url: "/commits/:commitId/changes",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
projectId: z.string().trim()
}),
response: {
200: commitChangesResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitChanges({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES,
metadata: {
commitId: req.params.commitId,
changesCount: (result.changes.changes?.length || 0).toString()
}
}
});
return result;
}
});
// Retrieve rollback changes for a commit
server.route({
method: "GET",
url: "/commits/:commitId/compare",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
folderId: z.string().trim(),
environment: z.string().trim(),
deepRollback: booleanSchema.default(false),
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim()
}),
response: {
200: z.array(
z.object({
folderId: z.string(),
folderName: z.string(),
folderPath: z.string().optional(),
changes: z.array(resourceChangeSchema)
})
)
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.compareCommitChanges({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId,
folderId: req.query.folderId,
environment: req.query.environment,
deepRollback: req.query.deepRollback,
secretPath: req.query.secretPath
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.PIT_COMPARE_FOLDER_STATES,
metadata: {
targetCommitId: req.params.commitId,
folderId: req.query.folderId,
deepRollback: req.query.deepRollback,
diffsCount: result.length.toString(),
environment: req.query.environment,
folderPath: req.query.secretPath
}
}
});
return result;
}
});
// Rollback to a previous commit
server.route({
method: "POST",
url: "/commits/:commitId/rollback",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
body: z.object({
folderId: z.string().trim(),
deepRollback: z.boolean().default(false),
message: z.string().max(256).trim().optional(),
environment: z.string().trim(),
projectId: z.string().trim()
}),
response: {
200: z.object({
success: z.boolean(),
secretChangesCount: z.number().optional(),
folderChangesCount: z.number().optional(),
totalChanges: z.number().optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.rollbackToCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.body.projectId,
commitId: req.params.commitId,
folderId: req.body.folderId,
deepRollback: req.body.deepRollback,
message: req.body.message,
environment: req.body.environment
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.body.projectId,
event: {
type: EventType.PIT_ROLLBACK_COMMIT,
metadata: {
targetCommitId: req.params.commitId,
environment: req.body.environment,
folderId: req.body.folderId,
deepRollback: req.body.deepRollback,
message: req.body.message || "Rollback to previous commit",
totalChanges: result.totalChanges?.toString() || "0"
}
}
});
return result;
}
});
// Revert commit
server.route({
method: "POST",
url: "/commits/:commitId/revert",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
body: z.object({
projectId: z.string().trim()
}),
response: {
200: z.object({
success: z.boolean(),
message: z.string(),
originalCommitId: z.string(),
revertCommitId: z.string().optional(),
changesReverted: z.number().optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.revertCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.body.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.body.projectId,
event: {
type: EventType.PIT_REVERT_COMMIT,
metadata: {
commitId: req.params.commitId,
revertCommitId: result.revertCommitId,
changesReverted: result.changesReverted?.toString()
}
}
});
return result;
}
});
// Folder state at commit
server.route({
method: "GET",
url: "/commits/:commitId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
folderId: z.string().trim(),
projectId: z.string().trim()
}),
response: {
200: folderStateSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getFolderStateAtCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.PIT_GET_FOLDER_STATE,
metadata: {
commitId: req.params.commitId,
folderId: req.query.folderId,
resourceCount: result.length.toString()
}
}
});
return result;
}
});
};

View File

@ -270,6 +270,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
}), }),
body: z.object({ body: z.object({
schemas: z.array(z.string()), schemas: z.array(z.string()),
id: z.string().trim(),
userName: z.string().trim(), userName: z.string().trim(),
name: z name: z
.object({ .object({
@ -277,6 +278,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
givenName: z.string().trim().optional() givenName: z.string().trim().optional()
}) })
.optional(), .optional(),
displayName: z.string().trim(),
emails: z emails: z
.array( .array(
z.object({ z.object({

View File

@ -1,7 +1,7 @@
import { nanoid } from "nanoid"; import { nanoid } from "nanoid";
import { z } from "zod"; import { z } from "zod";
import { ApproverType, BypasserType } from "@app/ee/services/access-approval-policy/access-approval-policy-types"; import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { removeTrailingSlash } from "@app/lib/fn"; import { removeTrailingSlash } from "@app/lib/fn";
import { EnforcementLevel } from "@app/lib/types"; import { EnforcementLevel } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
@ -30,19 +30,10 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
approvers: z approvers: z
.discriminatedUnion("type", [ .discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }), z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), username: z.string().optional() }) z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
]) ])
.array() .array()
.min(1, { message: "At least one approver should be provided" }) .min(1, { message: "At least one approver should be provided" }),
.max(100, "Cannot have more than 100 approvers"),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvals: z.number().min(1).default(1), approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard), enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true) allowedSelfApprovals: z.boolean().default(true)
@ -84,19 +75,10 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
approvers: z approvers: z
.discriminatedUnion("type", [ .discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }), z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), username: z.string().optional() }) z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
]) ])
.array() .array()
.min(1, { message: "At least one approver should be provided" }) .min(1, { message: "At least one approver should be provided" }),
.max(100, "Cannot have more than 100 approvers"),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvals: z.number().min(1).default(1), approvals: z.number().min(1).default(1),
secretPath: z secretPath: z
.string() .string()
@ -175,12 +157,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
id: z.string().nullable().optional(), id: z.string().nullable().optional(),
type: z.nativeEnum(ApproverType) type: z.nativeEnum(ApproverType)
}) })
.array(),
bypassers: z
.object({
id: z.string().nullable().optional(),
type: z.nativeEnum(BypasserType)
})
.array() .array()
}) })
.array() .array()
@ -217,14 +193,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.object({ .object({
id: z.string().nullable().optional(), id: z.string().nullable().optional(),
type: z.nativeEnum(ApproverType), type: z.nativeEnum(ApproverType),
username: z.string().nullable().optional() name: z.string().nullable().optional()
})
.array(),
bypassers: z
.object({
id: z.string().nullable().optional(),
type: z.nativeEnum(BypasserType),
username: z.string().nullable().optional()
}) })
.array() .array()
}) })

View File

@ -47,11 +47,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
userId: z.string().nullable().optional() userId: z.string().nullable().optional()
}) })
.array(), .array(),
bypassers: z
.object({
userId: z.string().nullable().optional()
})
.array(),
secretPath: z.string().optional().nullable(), secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(), enforcementLevel: z.string(),
deletedAt: z.date().nullish(), deletedAt: z.date().nullish(),
@ -271,7 +266,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
name: z.string(), name: z.string(),
approvals: z.number(), approvals: z.number(),
approvers: approvalRequestUser.array(), approvers: approvalRequestUser.array(),
bypassers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable(), secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(), enforcementLevel: z.string(),
deletedAt: z.date().nullish(), deletedAt: z.date().nullish(),
@ -285,7 +279,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
commits: secretRawSchema commits: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true, version: true, secretValue: true }) .omit({ _id: true, environment: true, workspace: true, type: true, version: true, secretValue: true })
.extend({ .extend({
secretValueHidden: z.boolean(),
secretValue: z.string().optional(), secretValue: z.string().optional(),
isRotatedSecret: z.boolean().optional(), isRotatedSecret: z.boolean().optional(),
op: z.string(), op: z.string(),
@ -297,7 +290,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
version: z.number(), version: z.number(),
secretKey: z.string(), secretKey: z.string(),
secretValue: z.string().optional(), secretValue: z.string().optional(),
secretValueHidden: z.boolean(),
secretComment: z.string().optional() secretComment: z.string().optional()
}) })
.optional() .optional()
@ -308,7 +300,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
version: z.number(), version: z.number(),
secretKey: z.string(), secretKey: z.string(),
secretValue: z.string().optional(), secretValue: z.string().optional(),
secretValueHidden: z.boolean(),
secretComment: z.string().optional(), secretComment: z.string().optional(),
tags: SanitizedTagSchema.array().optional(), tags: SanitizedTagSchema.array().optional(),
secretMetadata: ResourceMetadataSchema.nullish() secretMetadata: ResourceMetadataSchema.nullish()

View File

@ -65,10 +65,9 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
rateLimit: writeLimit rateLimit: writeLimit
}, },
schema: { schema: {
hide: true, hide: false,
deprecated: true,
tags: [ApiDocsTags.Projects], tags: [ApiDocsTags.Projects],
description: "(Deprecated) Roll back project secrets to those captured in a secret snapshot version.", description: "Roll back project secrets to those captured in a secret snapshot version.",
security: [ security: [
{ {
bearerAuth: [] bearerAuth: []
@ -85,10 +84,6 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
}, },
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]), onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => { handler: async (req) => {
throw new Error(
"This endpoint is deprecated. Please use the new PIT recovery system. More information is available at: https://infisical.com/docs/documentation/platform/pit-recovery."
);
const secretSnapshot = await server.services.snapshot.rollbackSnapshot({ const secretSnapshot = await server.services.snapshot.rollbackSnapshot({
actor: req.permission.type, actor: req.permission.type,
actorId: req.permission.id, actorId: req.permission.id,

View File

@ -2,10 +2,6 @@ import {
registerSecretRotationV2Router, registerSecretRotationV2Router,
SECRET_ROTATION_REGISTER_ROUTER_MAP SECRET_ROTATION_REGISTER_ROUTER_MAP
} from "@app/ee/routes/v2/secret-rotation-v2-routers"; } from "@app/ee/routes/v2/secret-rotation-v2-routers";
import {
registerSecretScanningV2Router,
SECRET_SCANNING_REGISTER_ROUTER_MAP
} from "@app/ee/routes/v2/secret-scanning-v2-routers";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router"; import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerProjectRoleRouter } from "./project-role-router"; import { registerProjectRoleRouter } from "./project-role-router";
@ -35,17 +31,4 @@ export const registerV2EERoutes = async (server: FastifyZodProvider) => {
}, },
{ prefix: "/secret-rotations" } { prefix: "/secret-rotations" }
); );
await server.register(
async (secretScanningV2Router) => {
// register generic secret scanning endpoints
await secretScanningV2Router.register(registerSecretScanningV2Router);
// register service-specific secret scanning endpoints (gitlab/github, etc.)
for await (const [type, router] of Object.entries(SECRET_SCANNING_REGISTER_ROUTER_MAP)) {
await secretScanningV2Router.register(router, { prefix: `data-sources/${type}` });
}
},
{ prefix: "/secret-scanning" }
);
}; };

View File

@ -5,8 +5,6 @@ import { registerAwsIamUserSecretRotationRouter } from "./aws-iam-user-secret-ro
import { registerAzureClientSecretRotationRouter } from "./azure-client-secret-rotation-router"; import { registerAzureClientSecretRotationRouter } from "./azure-client-secret-rotation-router";
import { registerLdapPasswordRotationRouter } from "./ldap-password-rotation-router"; import { registerLdapPasswordRotationRouter } from "./ldap-password-rotation-router";
import { registerMsSqlCredentialsRotationRouter } from "./mssql-credentials-rotation-router"; import { registerMsSqlCredentialsRotationRouter } from "./mssql-credentials-rotation-router";
import { registerMySqlCredentialsRotationRouter } from "./mysql-credentials-rotation-router";
import { registerOracleDBCredentialsRotationRouter } from "./oracledb-credentials-rotation-router";
import { registerPostgresCredentialsRotationRouter } from "./postgres-credentials-rotation-router"; import { registerPostgresCredentialsRotationRouter } from "./postgres-credentials-rotation-router";
export * from "./secret-rotation-v2-router"; export * from "./secret-rotation-v2-router";
@ -17,8 +15,6 @@ export const SECRET_ROTATION_REGISTER_ROUTER_MAP: Record<
> = { > = {
[SecretRotation.PostgresCredentials]: registerPostgresCredentialsRotationRouter, [SecretRotation.PostgresCredentials]: registerPostgresCredentialsRotationRouter,
[SecretRotation.MsSqlCredentials]: registerMsSqlCredentialsRotationRouter, [SecretRotation.MsSqlCredentials]: registerMsSqlCredentialsRotationRouter,
[SecretRotation.MySqlCredentials]: registerMySqlCredentialsRotationRouter,
[SecretRotation.OracleDBCredentials]: registerOracleDBCredentialsRotationRouter,
[SecretRotation.Auth0ClientSecret]: registerAuth0ClientSecretRotationRouter, [SecretRotation.Auth0ClientSecret]: registerAuth0ClientSecretRotationRouter,
[SecretRotation.AzureClientSecret]: registerAzureClientSecretRotationRouter, [SecretRotation.AzureClientSecret]: registerAzureClientSecretRotationRouter,
[SecretRotation.AwsIamUserSecret]: registerAwsIamUserSecretRotationRouter, [SecretRotation.AwsIamUserSecret]: registerAwsIamUserSecretRotationRouter,

View File

@ -1,19 +0,0 @@
import {
CreateMySqlCredentialsRotationSchema,
MySqlCredentialsRotationSchema,
UpdateMySqlCredentialsRotationSchema
} from "@app/ee/services/secret-rotation-v2/mysql-credentials";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
export const registerMySqlCredentialsRotationRouter = async (server: FastifyZodProvider) =>
registerSecretRotationEndpoints({
type: SecretRotation.MySqlCredentials,
server,
responseSchema: MySqlCredentialsRotationSchema,
createSchema: CreateMySqlCredentialsRotationSchema,
updateSchema: UpdateMySqlCredentialsRotationSchema,
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
});

View File

@ -1,19 +0,0 @@
import {
CreateOracleDBCredentialsRotationSchema,
OracleDBCredentialsRotationSchema,
UpdateOracleDBCredentialsRotationSchema
} from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
export const registerOracleDBCredentialsRotationRouter = async (server: FastifyZodProvider) =>
registerSecretRotationEndpoints({
type: SecretRotation.OracleDBCredentials,
server,
responseSchema: OracleDBCredentialsRotationSchema,
createSchema: CreateOracleDBCredentialsRotationSchema,
updateSchema: UpdateOracleDBCredentialsRotationSchema,
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
});

View File

@ -6,8 +6,6 @@ import { AwsIamUserSecretRotationListItemSchema } from "@app/ee/services/secret-
import { AzureClientSecretRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/azure-client-secret"; import { AzureClientSecretRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/azure-client-secret";
import { LdapPasswordRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/ldap-password"; import { LdapPasswordRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/ldap-password";
import { MsSqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials"; import { MsSqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
import { MySqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mysql-credentials";
import { OracleDBCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
import { PostgresCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials"; import { PostgresCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema"; import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema";
import { ApiDocsTags, SecretRotations } from "@app/lib/api-docs"; import { ApiDocsTags, SecretRotations } from "@app/lib/api-docs";
@ -18,8 +16,6 @@ import { AuthMode } from "@app/services/auth/auth-type";
const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [ const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [
PostgresCredentialsRotationListItemSchema, PostgresCredentialsRotationListItemSchema,
MsSqlCredentialsRotationListItemSchema, MsSqlCredentialsRotationListItemSchema,
MySqlCredentialsRotationListItemSchema,
OracleDBCredentialsRotationListItemSchema,
Auth0ClientSecretRotationListItemSchema, Auth0ClientSecretRotationListItemSchema,
AzureClientSecretRotationListItemSchema, AzureClientSecretRotationListItemSchema,
AwsIamUserSecretRotationListItemSchema, AwsIamUserSecretRotationListItemSchema,

View File

@ -1,16 +0,0 @@
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
import {
CreateGitHubDataSourceSchema,
GitHubDataSourceSchema,
UpdateGitHubDataSourceSchema
} from "@app/ee/services/secret-scanning-v2/github";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export const registerGitHubSecretScanningRouter = async (server: FastifyZodProvider) =>
registerSecretScanningEndpoints({
type: SecretScanningDataSource.GitHub,
server,
responseSchema: GitHubDataSourceSchema,
createSchema: CreateGitHubDataSourceSchema,
updateSchema: UpdateGitHubDataSourceSchema
});

View File

@ -1,12 +0,0 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
export * from "./secret-scanning-v2-router";
export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
SecretScanningDataSource,
(server: FastifyZodProvider) => Promise<void>
> = {
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter
};

View File

@ -1,593 +0,0 @@
import { z } from "zod";
import { SecretScanningResourcesSchema, SecretScanningScansSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
SecretScanningDataSource,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { SECRET_SCANNING_DATA_SOURCE_NAME_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import {
TSecretScanningDataSource,
TSecretScanningDataSourceInput
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { ApiDocsTags, SecretScanningDataSources } from "@app/lib/api-docs";
import { startsWithVowel } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSecretScanningEndpoints = <
T extends TSecretScanningDataSource,
I extends TSecretScanningDataSourceInput
>({
server,
type,
createSchema,
updateSchema,
responseSchema
}: {
type: SecretScanningDataSource;
server: FastifyZodProvider;
createSchema: z.ZodType<{
name: string;
projectId: string;
connectionId?: string;
config: Partial<I["config"]>;
description?: string | null;
isAutoScanEnabled?: boolean;
}>;
updateSchema: z.ZodType<{
name?: string;
config?: Partial<I["config"]>;
description?: string | null;
isAutoScanEnabled?: boolean;
}>;
responseSchema: z.ZodTypeAny;
}) => {
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
server.route({
method: "GET",
url: `/`,
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `List the ${sourceType} Data Sources for the specified project.`,
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.LIST(type).projectId)
}),
response: {
200: z.object({ dataSources: responseSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId }
} = req;
const dataSources = (await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
{ projectId, type },
req.permission
)) as T[];
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
type,
count: dataSources.length,
dataSourceIds: dataSources.map((source) => source.id)
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.GET_BY_ID(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceById(
{ dataSourceId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
metadata: {
dataSourceId,
type
}
}
});
return { dataSource };
}
});
server.route({
method: "GET",
url: `/data-source-name/:dataSourceName`,
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the specified ${sourceType} Data Source by name and project ID.`,
params: z.object({
sourceName: z
.string()
.trim()
.min(1, "Data Source name required")
.describe(SecretScanningDataSources.GET_BY_NAME(type).sourceName)
}),
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.GET_BY_NAME(type).projectId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { sourceName } = req.params;
const { projectId } = req.query;
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceByName(
{ sourceName, projectId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
metadata: {
dataSourceId: dataSource.id,
type
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Create ${
startsWithVowel(sourceType) ? "an" : "a"
} ${sourceType} Data Source for the specified project.`,
body: createSchema,
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const dataSource = (await server.services.secretScanningV2.createSecretScanningDataSource(
{ ...req.body, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE,
metadata: {
dataSourceId: dataSource.id,
type,
...req.body
}
}
});
return { dataSource };
}
});
server.route({
method: "PATCH",
url: "/:dataSourceId",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Update the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.UPDATE(type).dataSourceId)
}),
body: updateSchema,
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.updateSecretScanningDataSource(
{ ...req.body, dataSourceId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE,
metadata: {
dataSourceId,
type,
...req.body
}
}
});
return { dataSource };
}
});
server.route({
method: "DELETE",
url: `/:dataSourceId`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Delete the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.DELETE(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.deleteSecretScanningDataSource(
{ type, dataSourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE,
metadata: {
type,
dataSourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: `/:dataSourceId/scan`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Trigger a scan for the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
{ type, dataSourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
metadata: {
type,
dataSourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: `/:dataSourceId/resources/:resourceId/scan`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Trigger a scan for the specified ${sourceType} Data Source resource.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId),
resourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).resourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId, resourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
{ type, dataSourceId, resourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
metadata: {
type,
dataSourceId,
resourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/resources",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the resources associated with the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_RESOURCES(type).dataSourceId)
}),
response: {
200: z.object({ resources: SecretScanningResourcesSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { resources, projectId } = await server.services.secretScanningV2.listSecretScanningResourcesByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
metadata: {
dataSourceId,
type,
resourceIds: resources.map((resource) => resource.id),
count: resources.length
}
}
});
return { resources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/scans",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the scans associated with the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_SCANS(type).dataSourceId)
}),
response: {
200: z.object({ scans: SecretScanningScansSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { scans, projectId } = await server.services.secretScanningV2.listSecretScanningScansByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_SCAN_LIST,
metadata: {
dataSourceId,
type,
count: scans.length
}
}
});
return { scans };
}
});
// not exposed, for UI only
server.route({
method: "GET",
url: "/:dataSourceId/resources-dashboard",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
params: z.object({
dataSourceId: z.string().uuid()
}),
response: {
200: z.object({
resources: SecretScanningResourcesSchema.extend({
lastScannedAt: z.date().nullish(),
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
lastScanStatusMessage: z.string().nullish(),
unresolvedFindings: z.number()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { resources, projectId } =
await server.services.secretScanningV2.listSecretScanningResourcesWithDetailsByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
metadata: {
dataSourceId,
type,
resourceIds: resources.map((resource) => resource.id),
count: resources.length
}
}
});
return { resources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/scans-dashboard",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
params: z.object({
dataSourceId: z.string().uuid()
}),
response: {
200: z.object({
scans: SecretScanningScansSchema.extend({
unresolvedFindings: z.number(),
resolvedFindings: z.number(),
resourceName: z.string()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { scans, projectId } =
await server.services.secretScanningV2.listSecretScanningScansWithDetailsByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_SCAN_LIST,
metadata: {
dataSourceId,
type,
count: scans.length
}
}
});
return { scans };
}
});
};

View File

@ -1,416 +0,0 @@
import { z } from "zod";
import { SecretScanningConfigsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
import {
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
SecretScanningDataSourceSchema,
SecretScanningFindingSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-union-schemas";
import {
ApiDocsTags,
SecretScanningConfigs,
SecretScanningDataSources,
SecretScanningFindings
} from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]);
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/data-sources/options",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List the available Secret Scanning Data Source Options.",
response: {
200: z.object({
dataSourceOptions: SecretScanningDataSourceOptionsSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: () => {
const dataSourceOptions = server.services.secretScanningV2.listSecretScanningDataSourceOptions();
return { dataSourceOptions };
}
});
server.route({
method: "GET",
url: "/data-sources",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List all the Secret Scanning Data Sources for the specified project.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningDataSources.LIST().projectId)
}),
response: {
200: z.object({ dataSources: SecretScanningDataSourceSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
{ projectId },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
count: dataSources.length
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/findings",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List all the Secret Scanning Findings for the specified project.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
}),
response: {
200: z.object({ findings: SecretScanningFindingSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const findings = await server.services.secretScanningV2.listSecretScanningFindingsByProjectId(
projectId,
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_LIST,
metadata: {
findingIds: findings.map((finding) => finding.id),
count: findings.length
}
}
});
return { findings };
}
});
server.route({
method: "PATCH",
url: "/findings/:findingId",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update the specified Secret Scanning Finding.",
params: z.object({
findingId: z.string().trim().min(1, "Finding ID required").describe(SecretScanningFindings.UPDATE.findingId)
}),
body: z.object({
status: z.nativeEnum(SecretScanningFindingStatus).optional().describe(SecretScanningFindings.UPDATE.status),
remarks: z.string().nullish().describe(SecretScanningFindings.UPDATE.remarks)
}),
response: {
200: z.object({ finding: SecretScanningFindingSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
params: { findingId },
body,
permission
} = req;
const { finding, projectId } = await server.services.secretScanningV2.updateSecretScanningFindingById(
{ findingId, ...body },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_UPDATE,
metadata: {
findingId,
...body
}
}
});
return { finding };
}
});
server.route({
method: "PATCH",
url: "/findings",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update one or more Secret Scanning Findings in a batch.",
body: z
.object({
findingId: z.string().trim().min(1, "Finding ID required").describe(SecretScanningFindings.UPDATE.findingId),
status: z.nativeEnum(SecretScanningFindingStatus).optional().describe(SecretScanningFindings.UPDATE.status),
remarks: z.string().nullish().describe(SecretScanningFindings.UPDATE.remarks)
})
.array()
.max(500),
response: {
200: z.object({ findings: SecretScanningFindingSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { body, permission } = req;
const updatedFindingPromises = body.map(async (findingUpdatePayload) => {
const { finding, projectId } = await server.services.secretScanningV2.updateSecretScanningFindingById(
findingUpdatePayload,
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_UPDATE,
metadata: findingUpdatePayload
}
});
return finding;
});
const findings = await Promise.all(updatedFindingPromises);
return { findings };
}
});
server.route({
method: "GET",
url: "/configs",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Get the Secret Scanning Config for the specified project.",
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningConfigs.GET_BY_PROJECT_ID.projectId)
}),
response: {
200: z.object({
config: z.object({ content: z.string().nullish(), projectId: z.string(), updatedAt: z.date().nullish() })
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const config = await server.services.secretScanningV2.findSecretScanningConfigByProjectId(projectId, permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_CONFIG_GET
}
});
return { config };
}
});
server.route({
method: "PATCH",
url: "/configs",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update the specified Secret Scanning Configuration.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningConfigs.UPDATE.projectId)
}),
body: z.object({
content: z.string().nullable().describe(SecretScanningConfigs.UPDATE.content)
}),
response: {
200: z.object({ config: SecretScanningConfigsSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
body,
permission
} = req;
const config = await server.services.secretScanningV2.upsertSecretScanningConfig(
{ projectId, ...body },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_CONFIG_UPDATE,
metadata: body
}
});
return { config };
}
});
// not exposed, for UI only
server.route({
method: "GET",
url: "/data-sources-dashboard",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required")
}),
response: {
200: z.object({
dataSources: z
.intersection(
SecretScanningDataSourceSchema,
z.object({
lastScannedAt: z.date().nullish(),
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
lastScanStatusMessage: z.string().nullish(),
unresolvedFindings: z.number().nullish()
})
)
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesWithDetailsByProjectId(
{ projectId },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
count: dataSources.length
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/unresolved-findings-count",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
}),
response: {
200: z.object({ unresolvedFindings: z.number() })
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const unresolvedFindings =
await server.services.secretScanningV2.getSecretScanningUnresolvedFindingsCountByProjectId(
projectId,
permission
);
return { unresolvedFindings };
}
});
};

View File

@ -1,17 +1,10 @@
import { TDbClient } from "@app/db"; import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas"; import { TableName } from "@app/db/schemas";
import { ormify, TOrmify } from "@app/lib/knex"; import { ormify } from "@app/lib/knex";
export type TAccessApprovalPolicyApproverDALFactory = TOrmify<TableName.AccessApprovalPolicyApprover>; export type TAccessApprovalPolicyApproverDALFactory = ReturnType<typeof accessApprovalPolicyApproverDALFactory>;
export const accessApprovalPolicyApproverDALFactory = (db: TDbClient) => { export const accessApprovalPolicyApproverDALFactory = (db: TDbClient) => {
const accessApprovalPolicyApproverOrm = ormify(db, TableName.AccessApprovalPolicyApprover); const accessApprovalPolicyApproverOrm = ormify(db, TableName.AccessApprovalPolicyApprover);
return { ...accessApprovalPolicyApproverOrm }; return { ...accessApprovalPolicyApproverOrm };
}; };
export type TAccessApprovalPolicyBypasserDALFactory = TOrmify<TableName.AccessApprovalPolicyBypasser>;
export const accessApprovalPolicyBypasserDALFactory = (db: TDbClient) => {
const accessApprovalPolicyBypasserOrm = ormify(db, TableName.AccessApprovalPolicyBypasser);
return { ...accessApprovalPolicyBypasserOrm };
};

View File

@ -1,365 +1,15 @@
import { Knex } from "knex"; import { Knex } from "knex";
import { TDbClient } from "@app/db"; import { TDbClient } from "@app/db";
import { AccessApprovalPoliciesSchema, TableName, TAccessApprovalPolicies, TUsers } from "@app/db/schemas"; import { AccessApprovalPoliciesSchema, TableName, TAccessApprovalPolicies } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors"; import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter, TOrmify } from "@app/lib/knex"; import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
import { import { ApproverType } from "./access-approval-policy-types";
ApproverType,
BypasserType,
TCreateAccessApprovalPolicy,
TDeleteAccessApprovalPolicy,
TGetAccessApprovalPolicyByIdDTO,
TGetAccessPolicyCountByEnvironmentDTO,
TListAccessApprovalPoliciesDTO,
TUpdateAccessApprovalPolicy
} from "./access-approval-policy-types";
export interface TAccessApprovalPolicyDALFactory export type TAccessApprovalPolicyDALFactory = ReturnType<typeof accessApprovalPolicyDALFactory>;
extends Omit<TOrmify<TableName.AccessApprovalPolicy>, "findById" | "find"> {
find: (
filter: TFindFilter<
TAccessApprovalPolicies & {
projectId: string;
}
>,
customFilter?: {
policyId?: string;
},
tx?: Knex
) => Promise<
{
approvers: (
| {
id: string | null | undefined;
type: ApproverType.User;
name: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
id: string | null | undefined;
type: ApproverType.Group;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
id: string | null | undefined;
type: BypasserType.User;
name: string;
}
| {
id: string | null | undefined;
type: BypasserType.Group;
}
)[];
}[]
>;
findById: (
policyId: string,
tx?: Knex
) => Promise<
| {
approvers: {
id: string | null | undefined;
type: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
}
| undefined
>;
softDeleteById: (
policyId: string,
tx?: Knex
) => Promise<{
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
findLastValidPolicy: (
{
envId,
secretPath
}: {
envId: string;
secretPath: string;
},
tx?: Knex
) => Promise<
| {
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}
| undefined
>;
}
export interface TAccessApprovalPolicyServiceFactory { export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
getAccessPolicyCountByEnvSlug: ({
actor,
actorOrgId,
actorAuthMethod,
projectSlug,
actorId,
envSlug
}: TGetAccessPolicyCountByEnvironmentDTO) => Promise<{
count: number;
}>;
createAccessApprovalPolicy: ({
name,
actor,
actorId,
actorOrgId,
secretPath,
actorAuthMethod,
approvals,
approvers,
bypassers,
projectSlug,
environment,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
}: TCreateAccessApprovalPolicy) => Promise<{
environment: {
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
projectId: string;
slug: string;
position: number;
};
projectId: string;
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
deleteAccessApprovalPolicy: ({
policyId,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TDeleteAccessApprovalPolicy) => Promise<{
approvers: {
id: string | null | undefined;
type: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
}>;
updateAccessApprovalPolicy: ({
policyId,
approvers,
bypassers,
secretPath,
name,
actorId,
actor,
actorOrgId,
actorAuthMethod,
approvals,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
}: TUpdateAccessApprovalPolicy) => Promise<{
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
getAccessApprovalPolicyByProjectSlug: ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
projectSlug
}: TListAccessApprovalPoliciesDTO) => Promise<
{
approvers: (
| {
id: string | null | undefined;
type: ApproverType;
name: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
id: string | null | undefined;
type: ApproverType;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
id: string | null | undefined;
type: BypasserType;
name: string;
}
| {
id: string | null | undefined;
type: BypasserType;
}
)[];
}[]
>;
getAccessApprovalPolicyById: ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
policyId
}: TGetAccessApprovalPolicyByIdDTO) => Promise<{
approvers: (
| {
id: string | null | undefined;
type: ApproverType.User;
name: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
id: string | null | undefined;
type: ApproverType.Group;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
id: string | null | undefined;
type: BypasserType.User;
name: string;
}
| {
id: string | null | undefined;
type: BypasserType.Group;
}
)[];
}>;
}
export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPolicyDALFactory => {
const accessApprovalPolicyOrm = ormify(db, TableName.AccessApprovalPolicy); const accessApprovalPolicyOrm = ormify(db, TableName.AccessApprovalPolicy);
const accessApprovalPolicyFindQuery = async ( const accessApprovalPolicyFindQuery = async (
@ -384,24 +34,9 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
`${TableName.AccessApprovalPolicyApprover}.policyId` `${TableName.AccessApprovalPolicyApprover}.policyId`
) )
.leftJoin(TableName.Users, `${TableName.AccessApprovalPolicyApprover}.approverUserId`, `${TableName.Users}.id`) .leftJoin(TableName.Users, `${TableName.AccessApprovalPolicyApprover}.approverUserId`, `${TableName.Users}.id`)
.leftJoin(
TableName.AccessApprovalPolicyBypasser,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyBypasser}.policyId`
)
.leftJoin<TUsers>(
db(TableName.Users).as("bypasserUsers"),
`${TableName.AccessApprovalPolicyBypasser}.bypasserUserId`,
`bypasserUsers.id`
)
.select(tx.ref("username").withSchema(TableName.Users).as("approverUsername")) .select(tx.ref("username").withSchema(TableName.Users).as("approverUsername"))
.select(tx.ref("username").withSchema("bypasserUsers").as("bypasserUsername"))
.select(tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover)) .select(tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
.select(tx.ref("approverGroupId").withSchema(TableName.AccessApprovalPolicyApprover)) .select(tx.ref("approverGroupId").withSchema(TableName.AccessApprovalPolicyApprover))
.select(tx.ref("sequence").withSchema(TableName.AccessApprovalPolicyApprover).as("approverSequence"))
.select(tx.ref("approvalsRequired").withSchema(TableName.AccessApprovalPolicyApprover))
.select(tx.ref("bypasserUserId").withSchema(TableName.AccessApprovalPolicyBypasser))
.select(tx.ref("bypasserGroupId").withSchema(TableName.AccessApprovalPolicyBypasser))
.select(tx.ref("name").withSchema(TableName.Environment).as("envName")) .select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug")) .select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(tx.ref("id").withSchema(TableName.Environment).as("envId")) .select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
@ -411,7 +46,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
return result; return result;
}; };
const findById: TAccessApprovalPolicyDALFactory["findById"] = async (policyId, tx) => { const findById = async (policyId: string, tx?: Knex) => {
try { try {
const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), { const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), {
[`${TableName.AccessApprovalPolicy}.id` as "id"]: policyId [`${TableName.AccessApprovalPolicy}.id` as "id"]: policyId
@ -432,37 +67,35 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
{ {
key: "approverUserId", key: "approverUserId",
label: "approvers" as const, label: "approvers" as const,
mapper: ({ approverUserId: id, approverSequence, approvalsRequired }) => ({ mapper: ({ approverUserId: id }) => ({
id, id,
type: "user", type: "user"
sequence: approverSequence,
approvalsRequired
}) })
}, },
{ {
key: "approverGroupId", key: "approverGroupId",
label: "approvers" as const, label: "approvers" as const,
mapper: ({ approverGroupId: id, approverSequence, approvalsRequired }) => ({ mapper: ({ approverGroupId: id }) => ({
id, id,
type: "group", type: "group"
sequence: approverSequence,
approvalsRequired
}) })
} }
] ]
}); });
if (!formattedDoc?.[0]) return;
return { return formattedDoc?.[0];
...formattedDoc?.[0],
approvers: formattedDoc?.[0]?.approvers.sort((a, b) => (a.sequence || 1) - (b.sequence || 1))
};
} catch (error) { } catch (error) {
throw new DatabaseError({ error, name: "FindById" }); throw new DatabaseError({ error, name: "FindById" });
} }
}; };
const find: TAccessApprovalPolicyDALFactory["find"] = async (filter, customFilter, tx) => { const find = async (
filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>,
customFilter?: {
policyId?: string;
},
tx?: Knex
) => {
try { try {
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter, customFilter); const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter, customFilter);
@ -483,83 +116,33 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
{ {
key: "approverUserId", key: "approverUserId",
label: "approvers" as const, label: "approvers" as const,
mapper: ({ approverUserId: id, approverUsername, approverSequence, approvalsRequired }) => ({ mapper: ({ approverUserId: id, approverUsername }) => ({
id, id,
type: ApproverType.User as const, type: ApproverType.User,
name: approverUsername, name: approverUsername
sequence: approverSequence,
approvalsRequired
}) })
}, },
{ {
key: "approverGroupId", key: "approverGroupId",
label: "approvers" as const, label: "approvers" as const,
mapper: ({ approverGroupId: id, approverSequence, approvalsRequired }) => ({ mapper: ({ approverGroupId: id }) => ({
id, id,
type: ApproverType.Group as const, type: ApproverType.Group
sequence: approverSequence,
approvalsRequired
})
},
{
key: "bypasserUserId",
label: "bypassers" as const,
mapper: ({ bypasserUserId: id, bypasserUsername }) => ({
id,
type: BypasserType.User as const,
name: bypasserUsername
})
},
{
key: "bypasserGroupId",
label: "bypassers" as const,
mapper: ({ bypasserGroupId: id }) => ({
id,
type: BypasserType.Group as const
}) })
} }
] ]
}); });
return formattedDocs.map((el) => ({ return formattedDocs;
...el,
approvers: el?.approvers.sort((a, b) => (a.sequence || 1) - (b.sequence || 1))
}));
} catch (error) { } catch (error) {
throw new DatabaseError({ error, name: "Find" }); throw new DatabaseError({ error, name: "Find" });
} }
}; };
const softDeleteById: TAccessApprovalPolicyDALFactory["softDeleteById"] = async (policyId, tx) => { const softDeleteById = async (policyId: string, tx?: Knex) => {
const softDeletedPolicy = await accessApprovalPolicyOrm.updateById(policyId, { deletedAt: new Date() }, tx); const softDeletedPolicy = await accessApprovalPolicyOrm.updateById(policyId, { deletedAt: new Date() }, tx);
return softDeletedPolicy; return softDeletedPolicy;
}; };
const findLastValidPolicy: TAccessApprovalPolicyDALFactory["findLastValidPolicy"] = async ( return { ...accessApprovalPolicyOrm, find, findById, softDeleteById };
{ envId, secretPath },
tx
) => {
try {
const result = await (tx || db.replicaNode())(TableName.AccessApprovalPolicy)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
envId,
secretPath
},
TableName.AccessApprovalPolicy
)
)
.orderBy("deletedAt", "desc")
.orderByRaw(`"deletedAt" IS NULL`)
.first();
return result;
} catch (error) {
throw new DatabaseError({ error, name: "FindLastValidPolicy" });
}
};
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById, findLastValidPolicy };
}; };

View File

@ -1,11 +1,9 @@
import { ForbiddenError } from "@casl/ability"; import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas"; import { ActionProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { ProjectPermissionApprovalActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal"; import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal"; import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
@ -16,16 +14,13 @@ import { TAccessApprovalRequestReviewerDALFactory } from "../access-approval-req
import { ApprovalStatus } from "../access-approval-request/access-approval-request-types"; import { ApprovalStatus } from "../access-approval-request/access-approval-request-types";
import { TGroupDALFactory } from "../group/group-dal"; import { TGroupDALFactory } from "../group/group-dal";
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal"; import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
import { import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
TAccessApprovalPolicyApproverDALFactory,
TAccessApprovalPolicyBypasserDALFactory
} from "./access-approval-policy-approver-dal";
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal"; import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
import { import {
ApproverType, ApproverType,
BypasserType, TCreateAccessApprovalPolicy,
TAccessApprovalPolicyServiceFactory,
TDeleteAccessApprovalPolicy, TDeleteAccessApprovalPolicy,
TGetAccessApprovalPolicyByIdDTO,
TGetAccessPolicyCountByEnvironmentDTO, TGetAccessPolicyCountByEnvironmentDTO,
TListAccessApprovalPoliciesDTO, TListAccessApprovalPoliciesDTO,
TUpdateAccessApprovalPolicy TUpdateAccessApprovalPolicy
@ -37,20 +32,19 @@ type TAccessApprovalPolicyServiceFactoryDep = {
accessApprovalPolicyDAL: TAccessApprovalPolicyDALFactory; accessApprovalPolicyDAL: TAccessApprovalPolicyDALFactory;
projectEnvDAL: Pick<TProjectEnvDALFactory, "find" | "findOne">; projectEnvDAL: Pick<TProjectEnvDALFactory, "find" | "findOne">;
accessApprovalPolicyApproverDAL: TAccessApprovalPolicyApproverDALFactory; accessApprovalPolicyApproverDAL: TAccessApprovalPolicyApproverDALFactory;
accessApprovalPolicyBypasserDAL: TAccessApprovalPolicyBypasserDALFactory;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">; projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
groupDAL: TGroupDALFactory; groupDAL: TGroupDALFactory;
userDAL: Pick<TUserDALFactory, "find">; userDAL: Pick<TUserDALFactory, "find">;
accessApprovalRequestDAL: Pick<TAccessApprovalRequestDALFactory, "update" | "find" | "resetReviewByPolicyId">; accessApprovalRequestDAL: Pick<TAccessApprovalRequestDALFactory, "update" | "find">;
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">; additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update" | "delete">; accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update">;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find">;
}; };
export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprovalPolicyServiceFactory>;
export const accessApprovalPolicyServiceFactory = ({ export const accessApprovalPolicyServiceFactory = ({
accessApprovalPolicyDAL, accessApprovalPolicyDAL,
accessApprovalPolicyApproverDAL, accessApprovalPolicyApproverDAL,
accessApprovalPolicyBypasserDAL,
groupDAL, groupDAL,
permissionService, permissionService,
projectEnvDAL, projectEnvDAL,
@ -58,10 +52,9 @@ export const accessApprovalPolicyServiceFactory = ({
userDAL, userDAL,
accessApprovalRequestDAL, accessApprovalRequestDAL,
additionalPrivilegeDAL, additionalPrivilegeDAL,
accessApprovalRequestReviewerDAL, accessApprovalRequestReviewerDAL
orgMembershipDAL }: TAccessApprovalPolicyServiceFactoryDep) => {
}: TAccessApprovalPolicyServiceFactoryDep): TAccessApprovalPolicyServiceFactory => { const createAccessApprovalPolicy = async ({
const createAccessApprovalPolicy: TAccessApprovalPolicyServiceFactory["createAccessApprovalPolicy"] = async ({
name, name,
actor, actor,
actorId, actorId,
@ -70,27 +63,30 @@ export const accessApprovalPolicyServiceFactory = ({
actorAuthMethod, actorAuthMethod,
approvals, approvals,
approvers, approvers,
bypassers,
projectSlug, projectSlug,
environment, environment,
enforcementLevel, enforcementLevel,
allowedSelfApprovals, allowedSelfApprovals
approvalsRequired }: TCreateAccessApprovalPolicy) => {
}) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
// If there is a group approver people might be added to the group later to meet the approvers quota // If there is a group approver people might be added to the group later to meet the approvers quota
const groupApprovers = approvers.filter((approver) => approver.type === ApproverType.Group); const groupApprovers = approvers
.filter((approver) => approver.type === ApproverType.Group)
.map((approver) => approver.id) as string[];
const userApprovers = approvers.filter((approver) => approver.type === ApproverType.User && approver.id) as { const userApprovers = approvers
id: string; .filter((approver) => approver.type === ApproverType.User)
sequence?: number; .map((approver) => approver.id)
}[]; .filter(Boolean) as string[];
const userApproverNames = approvers.filter( const userApproverNames = approvers
(approver) => approver.type === ApproverType.User && approver.username .map((approver) => (approver.type === ApproverType.User ? approver.name : undefined))
) as { username: string; sequence?: number }[]; .filter(Boolean) as string[];
if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length)
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
const { permission } = await permissionService.getProjectPermission({ const { permission } = await permissionService.getProjectPermission({
actor, actor,
@ -102,7 +98,7 @@ export const accessApprovalPolicyServiceFactory = ({
}); });
ForbiddenError.from(permission).throwUnlessCan( ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create, ProjectPermissionApprovalActions.Create,
ProjectPermissionSub.SecretApproval ProjectPermissionSub.SecretApproval
); );
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id }); const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
@ -110,13 +106,14 @@ export const accessApprovalPolicyServiceFactory = ({
let approverUserIds = userApprovers; let approverUserIds = userApprovers;
if (userApproverNames.length) { if (userApproverNames.length) {
const approverUsersInDB = await userDAL.find({ const approverUsers = await userDAL.find({
$in: { $in: {
username: userApproverNames.map((el) => el.username) username: userApproverNames
} }
}); });
const approverUsersInDBGroupByUsername = groupBy(approverUsersInDB, (i) => i.username);
const invalidUsernames = userApproverNames.filter((el) => !approverUsersInDBGroupByUsername?.[el.username]?.[0]); const approverNamesFromDb = approverUsers.map((user) => user.username);
const invalidUsernames = userApproverNames.filter((username) => !approverNamesFromDb.includes(username));
if (invalidUsernames.length) { if (invalidUsernames.length) {
throw new BadRequestError({ throw new BadRequestError({
@ -124,52 +121,32 @@ export const accessApprovalPolicyServiceFactory = ({
}); });
} }
approverUserIds = approverUserIds.concat( approverUserIds = approverUserIds.concat(approverUsers.map((user) => user.id));
userApproverNames.map((el) => ({ }
id: approverUsersInDBGroupByUsername[el.username]?.[0].id,
sequence: el.sequence const usersPromises: Promise<
})) {
id: string;
email: string | null | undefined;
username: string;
firstName: string | null | undefined;
lastName: string | null | undefined;
isPartOfGroup: boolean;
}[]
>[] = [];
const verifyAllApprovers = [...approverUserIds];
for (const groupId of groupApprovers) {
usersPromises.push(
groupDAL.findAllGroupPossibleMembers({ orgId: actorOrgId, groupId, offset: 0 }).then((group) => group.members)
); );
} }
let groupBypassers: string[] = []; const verifyGroupApprovers = (await Promise.all(usersPromises))
let bypasserUserIds: string[] = []; .flat()
.filter((user) => user.isPartOfGroup)
.map((user) => user.id);
verifyAllApprovers.push(...verifyGroupApprovers);
if (bypassers && bypassers.length) {
groupBypassers = bypassers
.filter((bypasser) => bypasser.type === BypasserType.Group)
.map((bypasser) => bypasser.id) as string[];
const userBypassers = bypassers
.filter((bypasser) => bypasser.type === BypasserType.User)
.map((bypasser) => bypasser.id)
.filter(Boolean) as string[];
const userBypasserNames = bypassers
.map((bypasser) => (bypasser.type === BypasserType.User ? bypasser.username : undefined))
.filter(Boolean) as string[];
bypasserUserIds = userBypassers;
if (userBypasserNames.length) {
const bypasserUsers = await userDAL.find({
$in: {
username: userBypasserNames
}
});
const bypasserNamesFromDb = bypasserUsers.map((user) => user.username);
const invalidUsernames = userBypasserNames.filter((username) => !bypasserNamesFromDb.includes(username));
if (invalidUsernames.length) {
throw new BadRequestError({
message: `Invalid bypasser user: ${invalidUsernames.join(", ")}`
});
}
bypasserUserIds = bypasserUserIds.concat(bypasserUsers.map((user) => user.id));
}
}
const approvalsRequiredGroupByStepNumber = groupBy(approvalsRequired || [], (i) => i.stepNumber);
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => { const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
const doc = await accessApprovalPolicyDAL.create( const doc = await accessApprovalPolicyDAL.create(
{ {
@ -182,16 +159,11 @@ export const accessApprovalPolicyServiceFactory = ({
}, },
tx tx
); );
if (approverUserIds.length) { if (approverUserIds.length) {
await accessApprovalPolicyApproverDAL.insertMany( await accessApprovalPolicyApproverDAL.insertMany(
approverUserIds.map((el) => ({ approverUserIds.map((userId) => ({
approverUserId: el.id, approverUserId: userId,
policyId: doc.id, policyId: doc.id
sequence: el.sequence,
approvalsRequired: el.sequence
? approvalsRequiredGroupByStepNumber?.[el.sequence]?.[0]?.numberOfApprovals
: approvals
})), })),
tx tx
); );
@ -199,32 +171,8 @@ export const accessApprovalPolicyServiceFactory = ({
if (groupApprovers) { if (groupApprovers) {
await accessApprovalPolicyApproverDAL.insertMany( await accessApprovalPolicyApproverDAL.insertMany(
groupApprovers.map((el) => ({ groupApprovers.map((groupId) => ({
approverGroupId: el.id, approverGroupId: groupId,
policyId: doc.id,
sequence: el.sequence,
approvalsRequired: el.sequence
? approvalsRequiredGroupByStepNumber?.[el.sequence]?.[0]?.numberOfApprovals
: approvals
})),
tx
);
}
if (bypasserUserIds.length) {
await accessApprovalPolicyBypasserDAL.insertMany(
bypasserUserIds.map((userId) => ({
bypasserUserId: userId,
policyId: doc.id
})),
tx
);
}
if (groupBypassers.length) {
await accessApprovalPolicyBypasserDAL.insertMany(
groupBypassers.map((groupId) => ({
bypasserGroupId: groupId,
policyId: doc.id policyId: doc.id
})), })),
tx tx
@ -233,33 +181,36 @@ export const accessApprovalPolicyServiceFactory = ({
return doc; return doc;
}); });
return { ...accessApproval, environment: env, projectId: project.id }; return { ...accessApproval, environment: env, projectId: project.id };
}; };
const getAccessApprovalPolicyByProjectSlug: TAccessApprovalPolicyServiceFactory["getAccessApprovalPolicyByProjectSlug"] = const getAccessApprovalPolicyByProjectSlug = async ({
async ({ actorId, actor, actorOrgId, actorAuthMethod, projectSlug }: TListAccessApprovalPoliciesDTO) => { actorId,
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); actor,
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); actorOrgId,
actorAuthMethod,
projectSlug
}: TListAccessApprovalPoliciesDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
// Anyone in the project should be able to get the policies. // Anyone in the project should be able to get the policies.
await permissionService.getProjectPermission({ await permissionService.getProjectPermission({
actor, actor,
actorId, actorId,
projectId: project.id, projectId: project.id,
actorAuthMethod, actorAuthMethod,
actorOrgId, actorOrgId,
actionProjectType: ActionProjectType.SecretManager actionProjectType: ActionProjectType.SecretManager
}); });
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null }); const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
return accessApprovalPolicies; return accessApprovalPolicies;
}; };
const updateAccessApprovalPolicy: TAccessApprovalPolicyServiceFactory["updateAccessApprovalPolicy"] = async ({ const updateAccessApprovalPolicy = async ({
policyId, policyId,
approvers, approvers,
bypassers,
secretPath, secretPath,
name, name,
actorId, actorId,
@ -268,27 +219,27 @@ export const accessApprovalPolicyServiceFactory = ({
actorAuthMethod, actorAuthMethod,
approvals, approvals,
enforcementLevel, enforcementLevel,
allowedSelfApprovals, allowedSelfApprovals
approvalsRequired
}: TUpdateAccessApprovalPolicy) => { }: TUpdateAccessApprovalPolicy) => {
const groupApprovers = approvers.filter((approver) => approver.type === ApproverType.Group); const groupApprovers = approvers
.filter((approver) => approver.type === ApproverType.Group)
.map((approver) => approver.id) as string[];
const userApprovers = approvers.filter((approver) => approver.type === ApproverType.User && approver.id) as { const userApprovers = approvers
id: string; .filter((approver) => approver.type === ApproverType.User)
sequence?: number; .map((approver) => approver.id)
}[]; .filter(Boolean) as string[];
const userApproverNames = approvers.filter(
(approver) => approver.type === ApproverType.User && approver.username const userApproverNames = approvers
) as { username: string; sequence?: number }[]; .map((approver) => (approver.type === ApproverType.User ? approver.name : undefined))
.filter(Boolean) as string[];
const accessApprovalPolicy = await accessApprovalPolicyDAL.findById(policyId); const accessApprovalPolicy = await accessApprovalPolicyDAL.findById(policyId);
if (!accessApprovalPolicy) throw new BadRequestError({ message: "Approval policy not found" }); const currentAppovals = approvals || accessApprovalPolicy.approvals;
const currentApprovals = approvals || accessApprovalPolicy.approvals;
if ( if (
groupApprovers?.length === 0 && groupApprovers?.length === 0 &&
userApprovers && userApprovers &&
currentApprovals > userApprovers.length + userApproverNames.length currentAppovals > userApprovers.length + userApproverNames.length
) { ) {
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" }); throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
} }
@ -305,81 +256,11 @@ export const accessApprovalPolicyServiceFactory = ({
actionProjectType: ActionProjectType.SecretManager actionProjectType: ActionProjectType.SecretManager
}); });
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval); ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionApprovalActions.Edit,
ProjectPermissionSub.SecretApproval
);
let groupBypassers: string[] = [];
let bypasserUserIds: string[] = [];
if (bypassers && bypassers.length) {
groupBypassers = bypassers
.filter((bypasser) => bypasser.type === BypasserType.Group)
.map((bypasser) => bypasser.id) as string[];
groupBypassers = [...new Set(groupBypassers)];
const userBypassers = bypassers
.filter((bypasser) => bypasser.type === BypasserType.User)
.map((bypasser) => bypasser.id)
.filter(Boolean) as string[];
const userBypasserNames = bypassers
.map((bypasser) => (bypasser.type === BypasserType.User ? bypasser.username : undefined))
.filter(Boolean) as string[];
bypasserUserIds = userBypassers;
if (userBypasserNames.length) {
const bypasserUsers = await userDAL.find({
$in: {
username: userBypasserNames
}
});
const bypasserNamesFromDb = bypasserUsers.map((user) => user.username);
const invalidUsernames = userBypasserNames.filter((username) => !bypasserNamesFromDb.includes(username));
if (invalidUsernames.length) {
throw new BadRequestError({
message: `Invalid bypasser user: ${invalidUsernames.join(", ")}`
});
}
bypasserUserIds = [...new Set(bypasserUserIds.concat(bypasserUsers.map((user) => user.id)))];
}
// Validate user bypassers
if (bypasserUserIds.length > 0) {
const orgMemberships = await orgMembershipDAL.find({
$in: { userId: bypasserUserIds },
orgId: actorOrgId
});
if (orgMemberships.length !== bypasserUserIds.length) {
const foundUserIdsInOrg = new Set(orgMemberships.map((mem) => mem.userId));
const missingUserIds = bypasserUserIds.filter((id) => !foundUserIdsInOrg.has(id));
throw new BadRequestError({
message: `One or more specified bypasser users are not part of the organization or do not exist. Invalid or non-member user IDs: ${missingUserIds.join(", ")}`
});
}
}
// Validate group bypassers
if (groupBypassers.length > 0) {
const orgGroups = await groupDAL.find({
$in: { id: groupBypassers },
orgId: actorOrgId
});
if (orgGroups.length !== groupBypassers.length) {
const foundGroupIdsInOrg = new Set(orgGroups.map((group) => group.id));
const missingGroupIds = groupBypassers.filter((id) => !foundGroupIdsInOrg.has(id));
throw new BadRequestError({
message: `One or more specified bypasser groups are not part of the organization or do not exist. Invalid or non-member group IDs: ${missingGroupIds.join(", ")}`
});
}
}
}
const approvalsRequiredGroupByStepNumber = groupBy(approvalsRequired || [], (i) => i.stepNumber);
const updatedPolicy = await accessApprovalPolicyDAL.transaction(async (tx) => { const updatedPolicy = await accessApprovalPolicyDAL.transaction(async (tx) => {
const doc = await accessApprovalPolicyDAL.updateById( const doc = await accessApprovalPolicyDAL.updateById(
accessApprovalPolicy.id, accessApprovalPolicy.id,
@ -396,18 +277,16 @@ export const accessApprovalPolicyServiceFactory = ({
await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx); await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
if (userApprovers.length || userApproverNames.length) { if (userApprovers.length || userApproverNames.length) {
let approverUserIds = userApprovers; let userApproverIds = userApprovers;
if (userApproverNames.length) { if (userApproverNames.length) {
const approverUsersInDB = await userDAL.find({ const approverUsers = await userDAL.find({
$in: { $in: {
username: userApproverNames.map((el) => el.username) username: userApproverNames
} }
}); });
const approverUsersInDBGroupByUsername = groupBy(approverUsersInDB, (i) => i.username);
const invalidUsernames = userApproverNames.filter( const approverNamesFromDb = approverUsers.map((user) => user.username);
(el) => !approverUsersInDBGroupByUsername?.[el.username]?.[0] const invalidUsernames = userApproverNames.filter((username) => !approverNamesFromDb.includes(username));
);
if (invalidUsernames.length) { if (invalidUsernames.length) {
throw new BadRequestError({ throw new BadRequestError({
@ -415,21 +294,13 @@ export const accessApprovalPolicyServiceFactory = ({
}); });
} }
approverUserIds = approverUserIds.concat( userApproverIds = userApproverIds.concat(approverUsers.map((user) => user.id));
userApproverNames.map((el) => ({
id: approverUsersInDBGroupByUsername[el.username]?.[0].id,
sequence: el.sequence
}))
);
} }
await accessApprovalPolicyApproverDAL.insertMany( await accessApprovalPolicyApproverDAL.insertMany(
approverUserIds.map((el) => ({ userApproverIds.map((userId) => ({
approverUserId: el.id, approverUserId: userId,
policyId: doc.id, policyId: doc.id
sequence: el.sequence,
approvalsRequired: el.sequence
? approvalsRequiredGroupByStepNumber?.[el.sequence]?.[0]?.numberOfApprovals
: approvals
})), })),
tx tx
); );
@ -437,45 +308,16 @@ export const accessApprovalPolicyServiceFactory = ({
if (groupApprovers) { if (groupApprovers) {
await accessApprovalPolicyApproverDAL.insertMany( await accessApprovalPolicyApproverDAL.insertMany(
groupApprovers.map((el) => ({ groupApprovers.map((groupId) => ({
approverGroupId: el.id, approverGroupId: groupId,
policyId: doc.id,
sequence: el.sequence,
approvalsRequired: el.sequence
? approvalsRequiredGroupByStepNumber?.[el.sequence]?.[0]?.numberOfApprovals
: approvals
})),
tx
);
}
await accessApprovalPolicyBypasserDAL.delete({ policyId: doc.id }, tx);
if (bypasserUserIds.length) {
await accessApprovalPolicyBypasserDAL.insertMany(
bypasserUserIds.map((userId) => ({
bypasserUserId: userId,
policyId: doc.id policyId: doc.id
})), })),
tx tx
); );
} }
if (groupBypassers.length) {
await accessApprovalPolicyBypasserDAL.insertMany(
groupBypassers.map((groupId) => ({
bypasserGroupId: groupId,
policyId: doc.id
})),
tx
);
}
await accessApprovalRequestDAL.resetReviewByPolicyId(doc.id, tx);
return doc; return doc;
}); });
return { return {
...updatedPolicy, ...updatedPolicy,
environment: accessApprovalPolicy.environment, environment: accessApprovalPolicy.environment,
@ -483,7 +325,7 @@ export const accessApprovalPolicyServiceFactory = ({
}; };
}; };
const deleteAccessApprovalPolicy: TAccessApprovalPolicyServiceFactory["deleteAccessApprovalPolicy"] = async ({ const deleteAccessApprovalPolicy = async ({
policyId, policyId,
actor, actor,
actorId, actorId,
@ -502,7 +344,7 @@ export const accessApprovalPolicyServiceFactory = ({
actionProjectType: ActionProjectType.SecretManager actionProjectType: ActionProjectType.SecretManager
}); });
ForbiddenError.from(permission).throwUnlessCan( ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete, ProjectPermissionApprovalActions.Delete,
ProjectPermissionSub.SecretApproval ProjectPermissionSub.SecretApproval
); );
@ -532,7 +374,7 @@ export const accessApprovalPolicyServiceFactory = ({
return policy; return policy;
}; };
const getAccessPolicyCountByEnvSlug: TAccessApprovalPolicyServiceFactory["getAccessPolicyCountByEnvSlug"] = async ({ const getAccessPolicyCountByEnvSlug = async ({
actor, actor,
actorOrgId, actorOrgId,
actorAuthMethod, actorAuthMethod,
@ -569,13 +411,13 @@ export const accessApprovalPolicyServiceFactory = ({
return { count: policies.length }; return { count: policies.length };
}; };
const getAccessApprovalPolicyById: TAccessApprovalPolicyServiceFactory["getAccessApprovalPolicyById"] = async ({ const getAccessApprovalPolicyById = async ({
actorId, actorId,
actor, actor,
actorOrgId, actorOrgId,
actorAuthMethod, actorAuthMethod,
policyId policyId
}) => { }: TGetAccessApprovalPolicyByIdDTO) => {
const [policy] = await accessApprovalPolicyDAL.find({}, { policyId }); const [policy] = await accessApprovalPolicyDAL.find({}, { policyId });
if (!policy) { if (!policy) {
@ -593,7 +435,10 @@ export const accessApprovalPolicyServiceFactory = ({
actionProjectType: ActionProjectType.SecretManager actionProjectType: ActionProjectType.SecretManager
}); });
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval); ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionApprovalActions.Read,
ProjectPermissionSub.SecretApproval
);
return policy; return policy;
}; };

View File

@ -1,7 +1,7 @@
import { EnforcementLevel, TProjectPermission } from "@app/lib/types"; import { EnforcementLevel, TProjectPermission } from "@app/lib/types";
import { ActorAuthMethod } from "@app/services/auth/auth-type"; import { ActorAuthMethod } from "@app/services/auth/auth-type";
import { TPermissionServiceFactory } from "../permission/permission-service-types"; import { TPermissionServiceFactory } from "../permission/permission-service";
export type TIsApproversValid = { export type TIsApproversValid = {
userIds: string[]; userIds: string[];
@ -18,46 +18,25 @@ export enum ApproverType {
User = "user" User = "user"
} }
export enum BypasserType {
Group = "group",
User = "user"
}
export type TCreateAccessApprovalPolicy = { export type TCreateAccessApprovalPolicy = {
approvals: number; approvals: number;
secretPath: string; secretPath: string;
environment: string; environment: string;
approvers: ( approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
| { type: ApproverType.Group; id: string; sequence?: number }
| { type: ApproverType.User; id?: string; username?: string; sequence?: number }
)[];
bypassers?: (
| { type: BypasserType.Group; id: string }
| { type: BypasserType.User; id?: string; username?: string }
)[];
projectSlug: string; projectSlug: string;
name: string; name: string;
enforcementLevel: EnforcementLevel; enforcementLevel: EnforcementLevel;
allowedSelfApprovals: boolean; allowedSelfApprovals: boolean;
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
} & Omit<TProjectPermission, "projectId">; } & Omit<TProjectPermission, "projectId">;
export type TUpdateAccessApprovalPolicy = { export type TUpdateAccessApprovalPolicy = {
policyId: string; policyId: string;
approvals?: number; approvals?: number;
approvers: ( approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
| { type: ApproverType.Group; id: string; sequence?: number }
| { type: ApproverType.User; id?: string; username?: string; sequence?: number }
)[];
bypassers?: (
| { type: BypasserType.Group; id: string }
| { type: BypasserType.User; id?: string; username?: string }
)[];
secretPath?: string; secretPath?: string;
name?: string; name?: string;
enforcementLevel?: EnforcementLevel; enforcementLevel?: EnforcementLevel;
allowedSelfApprovals: boolean; allowedSelfApprovals: boolean;
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
} & Omit<TProjectPermission, "projectId">; } & Omit<TProjectPermission, "projectId">;
export type TDeleteAccessApprovalPolicy = { export type TDeleteAccessApprovalPolicy = {
@ -76,217 +55,3 @@ export type TGetAccessApprovalPolicyByIdDTO = {
export type TListAccessApprovalPoliciesDTO = { export type TListAccessApprovalPoliciesDTO = {
projectSlug: string; projectSlug: string;
} & Omit<TProjectPermission, "projectId">; } & Omit<TProjectPermission, "projectId">;
export interface TAccessApprovalPolicyServiceFactory {
getAccessPolicyCountByEnvSlug: ({
actor,
actorOrgId,
actorAuthMethod,
projectSlug,
actorId,
envSlug
}: TGetAccessPolicyCountByEnvironmentDTO) => Promise<{
count: number;
}>;
createAccessApprovalPolicy: ({
name,
actor,
actorId,
actorOrgId,
secretPath,
actorAuthMethod,
approvals,
approvers,
bypassers,
projectSlug,
environment,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
}: TCreateAccessApprovalPolicy) => Promise<{
environment: {
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
projectId: string;
slug: string;
position: number;
};
projectId: string;
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
deleteAccessApprovalPolicy: ({
policyId,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TDeleteAccessApprovalPolicy) => Promise<{
approvers: {
id: string | null | undefined;
type: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
}>;
updateAccessApprovalPolicy: ({
policyId,
approvers,
bypassers,
secretPath,
name,
actorId,
actor,
actorOrgId,
actorAuthMethod,
approvals,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
}: TUpdateAccessApprovalPolicy) => Promise<{
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
getAccessApprovalPolicyByProjectSlug: ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
projectSlug
}: TListAccessApprovalPoliciesDTO) => Promise<
{
approvers: (
| {
id: string | null | undefined;
type: ApproverType;
name: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
id: string | null | undefined;
type: ApproverType;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
id: string | null | undefined;
type: BypasserType;
name: string;
}
| {
id: string | null | undefined;
type: BypasserType;
}
)[];
}[]
>;
getAccessApprovalPolicyById: ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
policyId
}: TGetAccessApprovalPolicyByIdDTO) => Promise<{
approvers: (
| {
id: string | null | undefined;
type: ApproverType.User;
name: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
id: string | null | undefined;
type: ApproverType.Group;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
id: string | null | undefined;
type: BypasserType.User;
name: string;
}
| {
id: string | null | undefined;
type: BypasserType.Group;
}
)[];
}>;
}

View File

@ -1,450 +1,177 @@
import { Knex } from "knex"; import { Knex } from "knex";
import { TDbClient } from "@app/db"; import { TDbClient } from "@app/db";
import { import { AccessApprovalRequestsSchema, TableName, TAccessApprovalRequests, TUsers } from "@app/db/schemas";
AccessApprovalRequestsSchema,
TableName,
TAccessApprovalRequests,
TUserGroupMembership,
TUsers
} from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors"; import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships, TFindFilter, TOrmify } from "@app/lib/knex"; import { ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
import { ApprovalStatus } from "./access-approval-request-types"; import { ApprovalStatus } from "./access-approval-request-types";
export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName.AccessApprovalRequest>, "findById"> { export type TAccessApprovalRequestDALFactory = ReturnType<typeof accessApprovalRequestDALFactory>;
findById: (
id: string,
tx?: Knex
) => Promise<
| {
policy: {
approvers: (
| {
userId: string | null | undefined;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
bypassers: (
| {
userId: string | null | undefined;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
}
| {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
}
)[];
id: string;
name: string;
approvals: number;
secretPath: string | null | undefined;
enforcementLevel: string;
allowedSelfApprovals: boolean;
deletedAt: Date | null | undefined;
};
projectId: string;
environment: string;
requestedByUser: {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
};
status: string;
id: string;
createdAt: Date;
updatedAt: Date;
policyId: string;
isTemporary: boolean;
requestedByUserId: string;
privilegeId?: string | null | undefined;
requestedBy?: string | null | undefined;
temporaryRange?: string | null | undefined;
permissions?: unknown;
note?: string | null | undefined;
privilegeDeletedAt?: Date | null | undefined;
reviewers: {
userId: string;
status: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
}[];
approvers: (
| {
userId: string | null | undefined;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
bypassers: (
| {
userId: string | null | undefined;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
}
| {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
}
)[];
}
| undefined
>;
findRequestsWithPrivilegeByPolicyIds: (policyIds: string[]) => Promise<
{
policy: {
approvers: (
| {
userId: string | null | undefined;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
| {
userId: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
)[];
bypassers: string[];
id: string;
name: string;
approvals: number;
secretPath: string | null | undefined;
enforcementLevel: string;
allowedSelfApprovals: boolean;
envId: string;
deletedAt: Date | null | undefined;
};
projectId: string;
environment: string;
environmentName: string;
requestedByUser: {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
};
privilege: {
membershipId: string;
userId: string;
projectId: string;
isTemporary: boolean;
temporaryMode: string | null | undefined;
temporaryRange: string | null | undefined;
temporaryAccessStartTime: Date | null | undefined;
temporaryAccessEndTime: Date | null | undefined;
permissions: unknown;
} | null;
isApproved: boolean;
status: string;
id: string;
createdAt: Date;
updatedAt: Date;
policyId: string;
isTemporary: boolean;
requestedByUserId: string;
privilegeId?: string | null | undefined;
requestedBy?: string | null | undefined;
temporaryRange?: string | null | undefined;
permissions?: unknown;
note?: string | null | undefined;
privilegeDeletedAt?: Date | null | undefined;
reviewers: {
userId: string;
status: string;
}[];
approvers: (
| {
userId: string | null | undefined;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
| {
userId: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
)[];
bypassers: string[];
}[]
>;
getCount: ({ projectId }: { projectId: string }) => Promise<{
pendingCount: number;
finalizedCount: number;
}>;
resetReviewByPolicyId: (policyId: string, tx?: Knex) => Promise<void>;
}
export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalRequestDALFactory => { export const accessApprovalRequestDALFactory = (db: TDbClient) => {
const accessApprovalRequestOrm = ormify(db, TableName.AccessApprovalRequest); const accessApprovalRequestOrm = ormify(db, TableName.AccessApprovalRequest);
const findRequestsWithPrivilegeByPolicyIds: TAccessApprovalRequestDALFactory["findRequestsWithPrivilegeByPolicyIds"] = const findRequestsWithPrivilegeByPolicyIds = async (policyIds: string[]) => {
async (policyIds) => { try {
try { const docs = await db
const docs = await db .replicaNode()(TableName.AccessApprovalRequest)
.replicaNode()(TableName.AccessApprovalRequest) .whereIn(`${TableName.AccessApprovalRequest}.policyId`, policyIds)
.whereIn(`${TableName.AccessApprovalRequest}.policyId`, policyIds)
.leftJoin( .leftJoin(
TableName.ProjectUserAdditionalPrivilege, TableName.ProjectUserAdditionalPrivilege,
`${TableName.AccessApprovalRequest}.privilegeId`, `${TableName.AccessApprovalRequest}.privilegeId`,
`${TableName.ProjectUserAdditionalPrivilege}.id` `${TableName.ProjectUserAdditionalPrivilege}.id`
) )
.leftJoin( .leftJoin(
TableName.AccessApprovalPolicy, TableName.AccessApprovalPolicy,
`${TableName.AccessApprovalRequest}.policyId`, `${TableName.AccessApprovalRequest}.policyId`,
`${TableName.AccessApprovalPolicy}.id` `${TableName.AccessApprovalPolicy}.id`
) )
.leftJoin(
TableName.AccessApprovalRequestReviewer,
`${TableName.AccessApprovalRequest}.id`,
`${TableName.AccessApprovalRequestReviewer}.requestId`
)
.leftJoin(
TableName.AccessApprovalPolicyApprover,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyApprover}.policyId`
)
.leftJoin<TUsers>(
db(TableName.Users).as("accessApprovalPolicyApproverUser"),
`${TableName.AccessApprovalPolicyApprover}.approverUserId`,
"accessApprovalPolicyApproverUser.id"
)
.leftJoin(
TableName.UserGroupMembership,
`${TableName.AccessApprovalPolicyApprover}.approverGroupId`,
`${TableName.UserGroupMembership}.groupId`
)
.leftJoin(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
.leftJoin( .leftJoin(
TableName.AccessApprovalPolicyBypasser, TableName.AccessApprovalRequestReviewer,
`${TableName.AccessApprovalPolicy}.id`, `${TableName.AccessApprovalRequest}.id`,
`${TableName.AccessApprovalPolicyBypasser}.policyId` `${TableName.AccessApprovalRequestReviewer}.requestId`
) )
.leftJoin<TUserGroupMembership>( .leftJoin(
db(TableName.UserGroupMembership).as("bypasserUserGroupMembership"), TableName.AccessApprovalPolicyApprover,
`${TableName.AccessApprovalPolicyBypasser}.bypasserGroupId`, `${TableName.AccessApprovalPolicy}.id`,
`bypasserUserGroupMembership.groupId` `${TableName.AccessApprovalPolicyApprover}.policyId`
) )
.leftJoin(
TableName.UserGroupMembership,
`${TableName.AccessApprovalPolicyApprover}.approverGroupId`,
`${TableName.UserGroupMembership}.groupId`
)
.leftJoin(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
.join<TUsers>( .join<TUsers>(
db(TableName.Users).as("requestedByUser"), db(TableName.Users).as("requestedByUser"),
`${TableName.AccessApprovalRequest}.requestedByUserId`, `${TableName.AccessApprovalRequest}.requestedByUserId`,
`requestedByUser.id` `requestedByUser.id`
) )
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`) .leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.select(selectAllTableCols(TableName.AccessApprovalRequest)) .select(selectAllTableCols(TableName.AccessApprovalRequest))
.select( .select(
db.ref("id").withSchema(TableName.AccessApprovalPolicy).as("policyId"), db.ref("id").withSchema(TableName.AccessApprovalPolicy).as("policyId"),
db.ref("name").withSchema(TableName.AccessApprovalPolicy).as("policyName"), db.ref("name").withSchema(TableName.AccessApprovalPolicy).as("policyName"),
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"), db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"), db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"), db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"), db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"), db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt") db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
) )
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
.select(db.ref("sequence").withSchema(TableName.AccessApprovalPolicyApprover).as("approverSequence"))
.select(db.ref("approvalsRequired").withSchema(TableName.AccessApprovalPolicyApprover))
.select(db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"))
.select(db.ref("bypasserUserId").withSchema(TableName.AccessApprovalPolicyBypasser))
.select(db.ref("userId").withSchema("bypasserUserGroupMembership").as("bypasserGroupUserId"))
.select(
db.ref("email").withSchema("accessApprovalPolicyApproverUser").as("approverEmail"),
db.ref("email").withSchema(TableName.Users).as("approverGroupEmail"),
db.ref("username").withSchema("accessApprovalPolicyApproverUser").as("approverUsername"),
db.ref("username").withSchema(TableName.Users).as("approverGroupUsername")
)
.select(
db.ref("projectId").withSchema(TableName.Environment),
db.ref("slug").withSchema(TableName.Environment).as("envSlug"),
db.ref("name").withSchema(TableName.Environment).as("envName")
)
.select( .select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"), .select(db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"))
db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus")
)
// TODO: ADD SUPPORT FOR GROUPS!!!! .select(
.select( db.ref("projectId").withSchema(TableName.Environment),
db.ref("email").withSchema("requestedByUser").as("requestedByUserEmail"), db.ref("slug").withSchema(TableName.Environment).as("envSlug"),
db.ref("username").withSchema("requestedByUser").as("requestedByUserUsername"), db.ref("name").withSchema(TableName.Environment).as("envName")
db.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"), )
db.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"),
db.ref("userId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeUserId"), .select(
db.ref("projectId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeMembershipId"), db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"),
db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus")
)
db.ref("isTemporary").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeIsTemporary"), // TODO: ADD SUPPORT FOR GROUPS!!!!
db.ref("temporaryMode").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryMode"), .select(
db.ref("temporaryRange").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryRange"), db.ref("email").withSchema("requestedByUser").as("requestedByUserEmail"),
db db.ref("username").withSchema("requestedByUser").as("requestedByUserUsername"),
.ref("temporaryAccessStartTime") db.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"),
.withSchema(TableName.ProjectUserAdditionalPrivilege) db.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"),
.as("privilegeTemporaryAccessStartTime"),
db
.ref("temporaryAccessEndTime")
.withSchema(TableName.ProjectUserAdditionalPrivilege)
.as("privilegeTemporaryAccessEndTime"),
db.ref("permissions").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegePermissions") db.ref("userId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeUserId"),
) db.ref("projectId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeMembershipId"),
.orderBy(`${TableName.AccessApprovalRequest}.createdAt`, "desc");
const formattedDocs = sqlNestRelationships({ db.ref("isTemporary").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeIsTemporary"),
data: docs, db.ref("temporaryMode").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryMode"),
key: "id", db.ref("temporaryRange").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryRange"),
parentMapper: (doc) => ({ db
...AccessApprovalRequestsSchema.parse(doc), .ref("temporaryAccessStartTime")
projectId: doc.projectId, .withSchema(TableName.ProjectUserAdditionalPrivilege)
environment: doc.envSlug, .as("privilegeTemporaryAccessStartTime"),
environmentName: doc.envName, db
policy: { .ref("temporaryAccessEndTime")
id: doc.policyId, .withSchema(TableName.ProjectUserAdditionalPrivilege)
name: doc.policyName, .as("privilegeTemporaryAccessEndTime"),
approvals: doc.policyApprovals,
secretPath: doc.policySecretPath,
enforcementLevel: doc.policyEnforcementLevel,
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
envId: doc.policyEnvId,
deletedAt: doc.policyDeletedAt
},
requestedByUser: {
userId: doc.requestedByUserId,
email: doc.requestedByUserEmail,
firstName: doc.requestedByUserFirstName,
lastName: doc.requestedByUserLastName,
username: doc.requestedByUserUsername
},
privilege: doc.privilegeId
? {
membershipId: doc.privilegeMembershipId,
userId: doc.privilegeUserId,
projectId: doc.projectId,
isTemporary: doc.privilegeIsTemporary,
temporaryMode: doc.privilegeTemporaryMode,
temporaryRange: doc.privilegeTemporaryRange,
temporaryAccessStartTime: doc.privilegeTemporaryAccessStartTime,
temporaryAccessEndTime: doc.privilegeTemporaryAccessEndTime,
permissions: doc.privilegePermissions
}
: null,
isApproved: doc.status === ApprovalStatus.APPROVED
}),
childrenMapper: [
{
key: "reviewerUserId",
label: "reviewers" as const,
mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined)
},
{
key: "approverUserId",
label: "approvers" as const,
mapper: ({ approverUserId, approverSequence, approvalsRequired, approverUsername, approverEmail }) => ({
userId: approverUserId,
sequence: approverSequence,
approvalsRequired,
email: approverEmail,
username: approverUsername
})
},
{
key: "approverGroupUserId",
label: "approvers" as const,
mapper: ({
approverGroupUserId,
approverSequence,
approvalsRequired,
approverGroupEmail,
approverGroupUsername
}) => ({
userId: approverGroupUserId,
sequence: approverSequence,
approvalsRequired,
email: approverGroupEmail,
username: approverGroupUsername
})
},
{ key: "bypasserUserId", label: "bypassers" as const, mapper: ({ bypasserUserId }) => bypasserUserId },
{
key: "bypasserGroupUserId",
label: "bypassers" as const,
mapper: ({ bypasserGroupUserId }) => bypasserGroupUserId
}
]
});
if (!formattedDocs) return []; db.ref("permissions").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegePermissions")
)
.orderBy(`${TableName.AccessApprovalRequest}.createdAt`, "desc");
return formattedDocs.map((doc) => ({ const formattedDocs = sqlNestRelationships({
...doc, data: docs,
key: "id",
parentMapper: (doc) => ({
...AccessApprovalRequestsSchema.parse(doc),
projectId: doc.projectId,
environment: doc.envSlug,
environmentName: doc.envName,
policy: { policy: {
...doc.policy, id: doc.policyId,
approvers: doc.approvers.filter((el) => el.userId).sort((a, b) => (a.sequence || 0) - (b.sequence || 0)), name: doc.policyName,
bypassers: doc.bypassers approvals: doc.policyApprovals,
secretPath: doc.policySecretPath,
enforcementLevel: doc.policyEnforcementLevel,
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
envId: doc.policyEnvId,
deletedAt: doc.policyDeletedAt
},
requestedByUser: {
userId: doc.requestedByUserId,
email: doc.requestedByUserEmail,
firstName: doc.requestedByUserFirstName,
lastName: doc.requestedByUserLastName,
username: doc.requestedByUserUsername
},
privilege: doc.privilegeId
? {
membershipId: doc.privilegeMembershipId,
userId: doc.privilegeUserId,
projectId: doc.projectId,
isTemporary: doc.privilegeIsTemporary,
temporaryMode: doc.privilegeTemporaryMode,
temporaryRange: doc.privilegeTemporaryRange,
temporaryAccessStartTime: doc.privilegeTemporaryAccessStartTime,
temporaryAccessEndTime: doc.privilegeTemporaryAccessEndTime,
permissions: doc.privilegePermissions
}
: null,
isApproved: !!doc.policyDeletedAt || !!doc.privilegeId
}),
childrenMapper: [
{
key: "reviewerUserId",
label: "reviewers" as const,
mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined)
},
{ key: "approverUserId", label: "approvers" as const, mapper: ({ approverUserId }) => approverUserId },
{
key: "approverGroupUserId",
label: "approvers" as const,
mapper: ({ approverGroupUserId }) => approverGroupUserId
} }
})); ]
} catch (error) { });
throw new DatabaseError({ error, name: "FindRequestsWithPrivilege" });
} if (!formattedDocs) return [];
};
return formattedDocs.map((doc) => ({
...doc,
policy: { ...doc.policy, approvers: doc.approvers }
}));
} catch (error) {
throw new DatabaseError({ error, name: "FindRequestsWithPrivilege" });
}
};
const findQuery = (filter: TFindFilter<TAccessApprovalRequests>, tx: Knex) => const findQuery = (filter: TFindFilter<TAccessApprovalRequests>, tx: Knex) =>
tx(TableName.AccessApprovalRequest) tx(TableName.AccessApprovalRequest)
@ -466,6 +193,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
`${TableName.AccessApprovalPolicy}.id`, `${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyApprover}.policyId` `${TableName.AccessApprovalPolicyApprover}.policyId`
) )
.leftJoin<TUsers>( .leftJoin<TUsers>(
db(TableName.Users).as("accessApprovalPolicyApproverUser"), db(TableName.Users).as("accessApprovalPolicyApproverUser"),
`${TableName.AccessApprovalPolicyApprover}.approverUserId`, `${TableName.AccessApprovalPolicyApprover}.approverUserId`,
@ -476,33 +204,13 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
`${TableName.AccessApprovalPolicyApprover}.approverGroupId`, `${TableName.AccessApprovalPolicyApprover}.approverGroupId`,
`${TableName.UserGroupMembership}.groupId` `${TableName.UserGroupMembership}.groupId`
) )
.leftJoin<TUsers>( .leftJoin<TUsers>(
db(TableName.Users).as("accessApprovalPolicyGroupApproverUser"), db(TableName.Users).as("accessApprovalPolicyGroupApproverUser"),
`${TableName.UserGroupMembership}.userId`, `${TableName.UserGroupMembership}.userId`,
"accessApprovalPolicyGroupApproverUser.id" "accessApprovalPolicyGroupApproverUser.id"
) )
.leftJoin(
TableName.AccessApprovalPolicyBypasser,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyBypasser}.policyId`
)
.leftJoin<TUsers>(
db(TableName.Users).as("accessApprovalPolicyBypasserUser"),
`${TableName.AccessApprovalPolicyBypasser}.bypasserUserId`,
"accessApprovalPolicyBypasserUser.id"
)
.leftJoin<TUserGroupMembership>(
db(TableName.UserGroupMembership).as("bypasserUserGroupMembership"),
`${TableName.AccessApprovalPolicyBypasser}.bypasserGroupId`,
`bypasserUserGroupMembership.groupId`
)
.leftJoin<TUsers>(
db(TableName.Users).as("accessApprovalPolicyGroupBypasserUser"),
`bypasserUserGroupMembership.userId`,
"accessApprovalPolicyGroupBypasserUser.id"
)
.leftJoin( .leftJoin(
TableName.AccessApprovalRequestReviewer, TableName.AccessApprovalRequestReviewer,
`${TableName.AccessApprovalRequest}.id`, `${TableName.AccessApprovalRequest}.id`,
@ -519,8 +227,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
.select(selectAllTableCols(TableName.AccessApprovalRequest)) .select(selectAllTableCols(TableName.AccessApprovalRequest))
.select( .select(
tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover), tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover),
tx.ref("sequence").withSchema(TableName.AccessApprovalPolicyApprover).as("approverSequence"),
tx.ref("approvalsRequired").withSchema(TableName.AccessApprovalPolicyApprover),
tx.ref("userId").withSchema(TableName.UserGroupMembership), tx.ref("userId").withSchema(TableName.UserGroupMembership),
tx.ref("email").withSchema("accessApprovalPolicyApproverUser").as("approverEmail"), tx.ref("email").withSchema("accessApprovalPolicyApproverUser").as("approverEmail"),
tx.ref("email").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupEmail"), tx.ref("email").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupEmail"),
@ -535,18 +241,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
tx.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"), tx.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"),
tx.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"), tx.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"),
// Bypassers
tx.ref("bypasserUserId").withSchema(TableName.AccessApprovalPolicyBypasser),
tx.ref("userId").withSchema("bypasserUserGroupMembership").as("bypasserGroupUserId"),
tx.ref("email").withSchema("accessApprovalPolicyBypasserUser").as("bypasserEmail"),
tx.ref("email").withSchema("accessApprovalPolicyGroupBypasserUser").as("bypasserGroupEmail"),
tx.ref("username").withSchema("accessApprovalPolicyBypasserUser").as("bypasserUsername"),
tx.ref("username").withSchema("accessApprovalPolicyGroupBypasserUser").as("bypasserGroupUsername"),
tx.ref("firstName").withSchema("accessApprovalPolicyBypasserUser").as("bypasserFirstName"),
tx.ref("firstName").withSchema("accessApprovalPolicyGroupBypasserUser").as("bypasserGroupFirstName"),
tx.ref("lastName").withSchema("accessApprovalPolicyBypasserUser").as("bypasserLastName"),
tx.ref("lastName").withSchema("accessApprovalPolicyGroupBypasserUser").as("bypasserGroupLastName"),
tx.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer), tx.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer),
tx.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"), tx.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"),
@ -567,11 +261,11 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt") tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
); );
const findById: TAccessApprovalRequestDALFactory["findById"] = async (id, tx) => { const findById = async (id: string, tx?: Knex) => {
try { try {
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode()); const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode());
const docs = await sql; const docs = await sql;
const formattedDoc = sqlNestRelationships({ const formatedDoc = sqlNestRelationships({
data: docs, data: docs,
key: "id", key: "id",
parentMapper: (el) => ({ parentMapper: (el) => ({
@ -616,17 +310,13 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
approverEmail: email, approverEmail: email,
approverUsername: username, approverUsername: username,
approverLastName: lastName, approverLastName: lastName,
approverFirstName: firstName, approverFirstName: firstName
approverSequence,
approvalsRequired
}) => ({ }) => ({
userId: approverUserId, userId: approverUserId,
email, email,
firstName, firstName,
lastName, lastName,
username, username
sequence: approverSequence,
approvalsRequired
}) })
}, },
{ {
@ -637,45 +327,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
approverGroupEmail: email, approverGroupEmail: email,
approverGroupUsername: username, approverGroupUsername: username,
approverGroupLastName: lastName, approverGroupLastName: lastName,
approverFirstName: firstName, approverFirstName: firstName
approverSequence,
approvalsRequired
}) => ({
userId,
email,
firstName,
lastName,
username,
sequence: approverSequence,
approvalsRequired
})
},
{
key: "bypasserUserId",
label: "bypassers" as const,
mapper: ({
bypasserUserId,
bypasserEmail: email,
bypasserUsername: username,
bypasserLastName: lastName,
bypasserFirstName: firstName
}) => ({
userId: bypasserUserId,
email,
firstName,
lastName,
username
})
},
{
key: "bypasserGroupUserId",
label: "bypassers" as const,
mapper: ({
userId,
bypasserGroupEmail: email,
bypasserGroupUsername: username,
bypasserGroupLastName: lastName,
bypasserFirstName: firstName
}) => ({ }) => ({
userId, userId,
email, email,
@ -686,23 +338,17 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
} }
] ]
}); });
if (!formattedDoc?.[0]) return; if (!formatedDoc?.[0]) return;
return { return {
...formattedDoc[0], ...formatedDoc[0],
policy: { policy: { ...formatedDoc[0].policy, approvers: formatedDoc[0].approvers }
...formattedDoc[0].policy,
approvers: formattedDoc[0].approvers
.filter((el) => el.userId)
.sort((a, b) => (a.sequence || 0) - (b.sequence || 0)),
bypassers: formattedDoc[0].bypassers
}
}; };
} catch (error) { } catch (error) {
throw new DatabaseError({ error, name: "FindByIdAccessApprovalRequest" }); throw new DatabaseError({ error, name: "FindByIdAccessApprovalRequest" });
} }
}; };
const getCount: TAccessApprovalRequestDALFactory["getCount"] = async ({ projectId }) => { const getCount = async ({ projectId }: { projectId: string }) => {
try { try {
const accessRequests = await db const accessRequests = await db
.replicaNode()(TableName.AccessApprovalRequest) .replicaNode()(TableName.AccessApprovalRequest)
@ -746,20 +392,14 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
] ]
}); });
// an approval is pending if there is no reviewer rejections, no privilege ID is set and the status is pending // an approval is pending if there is no reviewer rejections and no privilege ID is set
const pendingApprovals = formattedRequests.filter( const pendingApprovals = formattedRequests.filter(
(req) => (req) => !req.privilegeId && !req.reviewers.some((r) => r.status === ApprovalStatus.REJECTED)
!req.privilegeId &&
!req.reviewers.some((r) => r.status === ApprovalStatus.REJECTED) &&
req.status === ApprovalStatus.PENDING
); );
// an approval is finalized if there are any rejections, a privilege ID is set or the number of approvals is equal to the number of approvals required. // an approval is finalized if there are any rejections or a privilege ID is set
const finalizedApprovals = formattedRequests.filter( const finalizedApprovals = formattedRequests.filter(
(req) => (req) => req.privilegeId || req.reviewers.some((r) => r.status === ApprovalStatus.REJECTED)
req.privilegeId ||
req.reviewers.some((r) => r.status === ApprovalStatus.REJECTED) ||
req.status !== ApprovalStatus.PENDING
); );
return { pendingCount: pendingApprovals.length, finalizedCount: finalizedApprovals.length }; return { pendingCount: pendingApprovals.length, finalizedCount: finalizedApprovals.length };
@ -768,27 +408,5 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
} }
}; };
const resetReviewByPolicyId: TAccessApprovalRequestDALFactory["resetReviewByPolicyId"] = async (policyId, tx) => { return { ...accessApprovalRequestOrm, findById, findRequestsWithPrivilegeByPolicyIds, getCount };
try {
await (tx || db)(TableName.AccessApprovalRequestReviewer)
.leftJoin(
TableName.AccessApprovalRequest,
`${TableName.AccessApprovalRequest}.id`,
`${TableName.AccessApprovalRequestReviewer}.requestId`
)
.where(`${TableName.AccessApprovalRequest}.status` as "status", ApprovalStatus.PENDING)
.where(`${TableName.AccessApprovalRequest}.policyId` as "policyId", policyId)
.del();
} catch (error) {
throw new DatabaseError({ error, name: "ResetReviewByPolicyId" });
}
};
return {
...accessApprovalRequestOrm,
findById,
findRequestsWithPrivilegeByPolicyIds,
getCount,
resetReviewByPolicyId
};
}; };

View File

@ -1,10 +1,10 @@
import { TDbClient } from "@app/db"; import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas"; import { TableName } from "@app/db/schemas";
import { ormify, TOrmify } from "@app/lib/knex"; import { ormify } from "@app/lib/knex";
export type TAccessApprovalRequestReviewerDALFactory = TOrmify<TableName.AccessApprovalRequestReviewer>; export type TAccessApprovalRequestReviewerDALFactory = ReturnType<typeof accessApprovalRequestReviewerDALFactory>;
export const accessApprovalRequestReviewerDALFactory = (db: TDbClient): TAccessApprovalRequestReviewerDALFactory => { export const accessApprovalRequestReviewerDALFactory = (db: TDbClient) => {
const secretApprovalRequestReviewerOrm = ormify(db, TableName.AccessApprovalRequestReviewer); const secretApprovalRequestReviewerOrm = ormify(db, TableName.AccessApprovalRequestReviewer);
return secretApprovalRequestReviewerOrm; return secretApprovalRequestReviewerOrm;
}; };

View File

@ -4,7 +4,6 @@ import msFn from "ms";
import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas"; import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env"; import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { ms } from "@app/lib/ms"; import { ms } from "@app/lib/ms";
import { alphaNumericNanoId } from "@app/lib/nanoid"; import { alphaNumericNanoId } from "@app/lib/nanoid";
import { EnforcementLevel } from "@app/lib/types"; import { EnforcementLevel } from "@app/lib/types";
@ -23,13 +22,20 @@ import { TUserDALFactory } from "@app/services/user/user-dal";
import { TAccessApprovalPolicyApproverDALFactory } from "../access-approval-policy/access-approval-policy-approver-dal"; import { TAccessApprovalPolicyApproverDALFactory } from "../access-approval-policy/access-approval-policy-approver-dal";
import { TAccessApprovalPolicyDALFactory } from "../access-approval-policy/access-approval-policy-dal"; import { TAccessApprovalPolicyDALFactory } from "../access-approval-policy/access-approval-policy-dal";
import { TGroupDALFactory } from "../group/group-dal"; import { TGroupDALFactory } from "../group/group-dal";
import { TPermissionServiceFactory } from "../permission/permission-service-types"; import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionApprovalActions, ProjectPermissionSub } from "../permission/project-permission";
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal"; import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "../project-user-additional-privilege/project-user-additional-privilege-types"; import { ProjectUserAdditionalPrivilegeTemporaryMode } from "../project-user-additional-privilege/project-user-additional-privilege-types";
import { TAccessApprovalRequestDALFactory } from "./access-approval-request-dal"; import { TAccessApprovalRequestDALFactory } from "./access-approval-request-dal";
import { verifyRequestedPermissions } from "./access-approval-request-fns"; import { verifyRequestedPermissions } from "./access-approval-request-fns";
import { TAccessApprovalRequestReviewerDALFactory } from "./access-approval-request-reviewer-dal"; import { TAccessApprovalRequestReviewerDALFactory } from "./access-approval-request-reviewer-dal";
import { ApprovalStatus, TAccessApprovalRequestServiceFactory } from "./access-approval-request-types"; import {
ApprovalStatus,
TCreateAccessApprovalRequestDTO,
TGetAccessRequestCountDTO,
TListApprovalRequestsDTO,
TReviewAccessRequestDTO
} from "./access-approval-request-types";
type TSecretApprovalRequestServiceFactoryDep = { type TSecretApprovalRequestServiceFactoryDep = {
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "create" | "findById">; additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "create" | "findById">;
@ -51,7 +57,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
| "findOne" | "findOne"
| "getCount" | "getCount"
>; >;
accessApprovalPolicyDAL: Pick<TAccessApprovalPolicyDALFactory, "findOne" | "find" | "findLastValidPolicy">; accessApprovalPolicyDAL: Pick<TAccessApprovalPolicyDALFactory, "findOne" | "find">;
accessApprovalRequestReviewerDAL: Pick< accessApprovalRequestReviewerDAL: Pick<
TAccessApprovalRequestReviewerDALFactory, TAccessApprovalRequestReviewerDALFactory,
"create" | "find" | "findOne" | "transaction" "create" | "find" | "findOne" | "transaction"
@ -69,6 +75,8 @@ type TSecretApprovalRequestServiceFactoryDep = {
projectMicrosoftTeamsConfigDAL: Pick<TProjectMicrosoftTeamsConfigDALFactory, "getIntegrationDetailsByProject">; projectMicrosoftTeamsConfigDAL: Pick<TProjectMicrosoftTeamsConfigDALFactory, "getIntegrationDetailsByProject">;
}; };
export type TAccessApprovalRequestServiceFactory = ReturnType<typeof accessApprovalRequestServiceFactory>;
export const accessApprovalRequestServiceFactory = ({ export const accessApprovalRequestServiceFactory = ({
groupDAL, groupDAL,
projectDAL, projectDAL,
@ -85,8 +93,8 @@ export const accessApprovalRequestServiceFactory = ({
microsoftTeamsService, microsoftTeamsService,
projectMicrosoftTeamsConfigDAL, projectMicrosoftTeamsConfigDAL,
projectSlackConfigDAL projectSlackConfigDAL
}: TSecretApprovalRequestServiceFactoryDep): TAccessApprovalRequestServiceFactory => { }: TSecretApprovalRequestServiceFactoryDep) => {
const createAccessApprovalRequest: TAccessApprovalRequestServiceFactory["createAccessApprovalRequest"] = async ({ const createAccessApprovalRequest = async ({
isTemporary, isTemporary,
temporaryRange, temporaryRange,
actorId, actorId,
@ -96,7 +104,7 @@ export const accessApprovalRequestServiceFactory = ({
actorAuthMethod, actorAuthMethod,
projectSlug, projectSlug,
note note
}) => { }: TCreateAccessApprovalRequestDTO) => {
const cfg = getConfig(); const cfg = getConfig();
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@ -124,7 +132,7 @@ export const accessApprovalRequestServiceFactory = ({
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` }); if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
const policy = await accessApprovalPolicyDAL.findLastValidPolicy({ const policy = await accessApprovalPolicyDAL.findOne({
envId: environment.id, envId: environment.id,
secretPath secretPath
}); });
@ -196,7 +204,7 @@ export const accessApprovalRequestServiceFactory = ({
const isRejected = reviewers.some((reviewer) => reviewer.status === ApprovalStatus.REJECTED); const isRejected = reviewers.some((reviewer) => reviewer.status === ApprovalStatus.REJECTED);
if (!isRejected && duplicateRequest.status === ApprovalStatus.PENDING) { if (!isRejected) {
throw new BadRequestError({ message: "You already have a pending access request with the same criteria" }); throw new BadRequestError({ message: "You already have a pending access request with the same criteria" });
} }
} }
@ -273,7 +281,7 @@ export const accessApprovalRequestServiceFactory = ({
return { request: approval }; return { request: approval };
}; };
const listApprovalRequests: TAccessApprovalRequestServiceFactory["listApprovalRequests"] = async ({ const listApprovalRequests = async ({
projectSlug, projectSlug,
authorProjectMembershipId, authorProjectMembershipId,
envSlug, envSlug,
@ -281,7 +289,7 @@ export const accessApprovalRequestServiceFactory = ({
actorOrgId, actorOrgId,
actorId, actorId,
actorAuthMethod actorAuthMethod
}) => { }: TListApprovalRequestsDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@ -311,7 +319,7 @@ export const accessApprovalRequestServiceFactory = ({
return { requests }; return { requests };
}; };
const reviewAccessRequest: TAccessApprovalRequestServiceFactory["reviewAccessRequest"] = async ({ const reviewAccessRequest = async ({
requestId, requestId,
actor, actor,
status, status,
@ -319,7 +327,7 @@ export const accessApprovalRequestServiceFactory = ({
actorAuthMethod, actorAuthMethod,
actorOrgId, actorOrgId,
bypassReason bypassReason
}) => { }: TReviewAccessRequestDTO) => {
const accessApprovalRequest = await accessApprovalRequestDAL.findById(requestId); const accessApprovalRequest = await accessApprovalRequestDAL.findById(requestId);
if (!accessApprovalRequest) { if (!accessApprovalRequest) {
throw new NotFoundError({ message: `Secret approval request with ID '${requestId}' not found` }); throw new NotFoundError({ message: `Secret approval request with ID '${requestId}' not found` });
@ -332,7 +340,7 @@ export const accessApprovalRequestServiceFactory = ({
}); });
} }
const { membership, hasRole } = await permissionService.getProjectPermission({ const { membership, hasRole, permission } = await permissionService.getProjectPermission({
actor, actor,
actorId, actorId,
projectId: accessApprovalRequest.projectId, projectId: accessApprovalRequest.projectId,
@ -347,12 +355,13 @@ export const accessApprovalRequestServiceFactory = ({
const isSelfApproval = actorId === accessApprovalRequest.requestedByUserId; const isSelfApproval = actorId === accessApprovalRequest.requestedByUserId;
const isSoftEnforcement = policy.enforcementLevel === EnforcementLevel.Soft; const isSoftEnforcement = policy.enforcementLevel === EnforcementLevel.Soft;
const canBypass = !policy.bypassers.length || policy.bypassers.some((bypasser) => bypasser.userId === actorId); const canBypassApproval = permission.can(
const cannotBypassUnderSoftEnforcement = !(isSoftEnforcement && canBypass); ProjectPermissionApprovalActions.AllowAccessBypass,
ProjectPermissionSub.SecretApproval
);
const cannotBypassUnderSoftEnforcement = !(isSoftEnforcement && canBypassApproval);
const isApprover = policy.approvers.find((approver) => approver.userId === actorId); if (!policy.allowedSelfApprovals && isSelfApproval && cannotBypassUnderSoftEnforcement) {
// If user is (not an approver OR cant self approve) AND can't bypass policy
if ((!isApprover || (!policy.allowedSelfApprovals && isSelfApproval)) && cannotBypassUnderSoftEnforcement) {
throw new BadRequestError({ throw new BadRequestError({
message: "Failed to review access approval request. Users are not authorized to review their own request." message: "Failed to review access approval request. Users are not authorized to review their own request."
}); });
@ -361,7 +370,7 @@ export const accessApprovalRequestServiceFactory = ({
if ( if (
!hasRole(ProjectMembershipRole.Admin) && !hasRole(ProjectMembershipRole.Admin) &&
accessApprovalRequest.requestedByUserId !== actorId && // The request wasn't made by the current user accessApprovalRequest.requestedByUserId !== actorId && // The request wasn't made by the current user
!isApprover // The request isn't performed by an assigned approver !policy.approvers.find((approver) => approver.userId === actorId) // The request isn't performed by an assigned approver
) { ) {
throw new ForbiddenRequestError({ message: "You are not authorized to approve this request" }); throw new ForbiddenRequestError({ message: "You are not authorized to approve this request" });
} }
@ -372,44 +381,8 @@ export const accessApprovalRequestServiceFactory = ({
} }
const existingReviews = await accessApprovalRequestReviewerDAL.find({ requestId: accessApprovalRequest.id }); const existingReviews = await accessApprovalRequestReviewerDAL.find({ requestId: accessApprovalRequest.id });
if (accessApprovalRequest.status !== ApprovalStatus.PENDING) { if (existingReviews.some((review) => review.status === ApprovalStatus.REJECTED)) {
throw new BadRequestError({ message: "The request has been closed" }); throw new BadRequestError({ message: "The request has already been rejected by another reviewer" });
}
const reviewsGroupById = groupBy(
existingReviews.filter((review) => review.status === ApprovalStatus.APPROVED),
(i) => i.reviewerUserId
);
const approvedSequences = policy.approvers.reduce(
(acc, curr) => {
const hasApproved = reviewsGroupById?.[curr.userId as string]?.[0];
if (acc?.[acc.length - 1]?.step === curr.sequence) {
if (hasApproved) {
acc[acc.length - 1].approvals += 1;
}
return acc;
}
acc.push({
step: curr.sequence || 1,
approvals: hasApproved ? 1 : 0,
requiredApprovals: curr.approvalsRequired || 1
});
return acc;
},
[] as { step: number; approvals: number; requiredApprovals: number }[]
);
const presentSequence = approvedSequences.find((el) => el.approvals < el.requiredApprovals) || {
step: 1,
approvals: 0,
requiredApprovals: 1
};
if (presentSequence) {
const isApproverOfTheSequence = policy.approvers.find(
(el) => el.sequence === presentSequence.step && el.userId === actorId
);
if (!isApproverOfTheSequence) throw new BadRequestError({ message: "You are not reviewer in this step" });
} }
const reviewStatus = await accessApprovalRequestReviewerDAL.transaction(async (tx) => { const reviewStatus = await accessApprovalRequestReviewerDAL.transaction(async (tx) => {
@ -454,14 +427,11 @@ export const accessApprovalRequestServiceFactory = ({
); );
} }
if (status === ApprovalStatus.REJECTED) { const otherReviews = existingReviews.filter((er) => er.reviewerUserId !== actorId);
await accessApprovalRequestDAL.updateById(accessApprovalRequest.id, { status: ApprovalStatus.REJECTED }, tx); const allUniqueReviews = [...otherReviews, reviewForThisActorProcessing];
return reviewForThisActorProcessing;
}
const meetsStandardApprovalThreshold = const approvedReviews = allUniqueReviews.filter((r) => r.status === ApprovalStatus.APPROVED);
(presentSequence?.approvals || 0) + 1 >= presentSequence.requiredApprovals && const meetsStandardApprovalThreshold = approvedReviews.length >= policy.approvals;
approvedSequences.at(-1)?.step === presentSequence?.step;
if ( if (
reviewForThisActorProcessing.status === ApprovalStatus.APPROVED && reviewForThisActorProcessing.status === ApprovalStatus.APPROVED &&
@ -508,11 +478,7 @@ export const accessApprovalRequestServiceFactory = ({
); );
privilegeIdToSet = privilege.id; privilegeIdToSet = privilege.id;
} }
await accessApprovalRequestDAL.updateById( await accessApprovalRequestDAL.updateById(accessApprovalRequest.id, { privilegeId: privilegeIdToSet }, tx);
accessApprovalRequest.id,
{ privilegeId: privilegeIdToSet, status: ApprovalStatus.APPROVED },
tx
);
} }
} }
@ -558,13 +524,7 @@ export const accessApprovalRequestServiceFactory = ({
return reviewStatus; return reviewStatus;
}; };
const getCount: TAccessApprovalRequestServiceFactory["getCount"] = async ({ const getCount = async ({ projectSlug, actor, actorAuthMethod, actorId, actorOrgId }: TGetAccessRequestCountDTO) => {
projectSlug,
actor,
actorAuthMethod,
actorId,
actorOrgId
}) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });

View File

@ -34,124 +34,3 @@ export type TListApprovalRequestsDTO = {
authorProjectMembershipId?: string; authorProjectMembershipId?: string;
envSlug?: string; envSlug?: string;
} & Omit<TProjectPermission, "projectId">; } & Omit<TProjectPermission, "projectId">;
export interface TAccessApprovalRequestServiceFactory {
createAccessApprovalRequest: (arg: TCreateAccessApprovalRequestDTO) => Promise<{
request: {
status: string;
id: string;
createdAt: Date;
updatedAt: Date;
policyId: string;
isTemporary: boolean;
requestedByUserId: string;
privilegeId?: string | null | undefined;
requestedBy?: string | null | undefined;
temporaryRange?: string | null | undefined;
permissions?: unknown;
note?: string | null | undefined;
privilegeDeletedAt?: Date | null | undefined;
};
}>;
listApprovalRequests: (arg: TListApprovalRequestsDTO) => Promise<{
requests: {
policy: {
approvers: (
| {
userId: string | null | undefined;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
| {
userId: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
)[];
bypassers: string[];
id: string;
name: string;
approvals: number;
secretPath: string | null | undefined;
enforcementLevel: string;
allowedSelfApprovals: boolean;
envId: string;
deletedAt: Date | null | undefined;
};
projectId: string;
environment: string;
environmentName: string;
requestedByUser: {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
};
privilege: {
membershipId: string;
userId: string;
projectId: string;
isTemporary: boolean;
temporaryMode: string | null | undefined;
temporaryRange: string | null | undefined;
temporaryAccessStartTime: Date | null | undefined;
temporaryAccessEndTime: Date | null | undefined;
permissions: unknown;
} | null;
isApproved: boolean;
status: string;
id: string;
createdAt: Date;
updatedAt: Date;
policyId: string;
isTemporary: boolean;
requestedByUserId: string;
privilegeId?: string | null | undefined;
requestedBy?: string | null | undefined;
temporaryRange?: string | null | undefined;
permissions?: unknown;
note?: string | null | undefined;
privilegeDeletedAt?: Date | null | undefined;
reviewers: {
userId: string;
status: string;
}[];
approvers: (
| {
userId: string | null | undefined;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
| {
userId: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
)[];
bypassers: string[];
}[];
}>;
reviewAccessRequest: (arg: TReviewAccessRequestDTO) => Promise<{
id: string;
requestId: string;
reviewerUserId: string;
status: string;
createdAt: Date;
updatedAt: Date;
}>;
getCount: (arg: TGetAccessRequestCountDTO) => Promise<{
count: {
pendingCount: number;
finalizedCount: number;
};
}>;
}

View File

@ -1,4 +0,0 @@
export * from "./oracledb-connection-enums";
export * from "./oracledb-connection-fns";
export * from "./oracledb-connection-schemas";
export * from "./oracledb-connection-types";

View File

@ -1,3 +0,0 @@
export enum OracleDBConnectionMethod {
UsernameAndPassword = "username-and-password"
}

View File

@ -1,12 +0,0 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { OracleDBConnectionMethod } from "./oracledb-connection-enums";
export const getOracleDBConnectionListItem = () => {
return {
name: "OracleDB" as const,
app: AppConnection.OracleDB as const,
methods: Object.values(OracleDBConnectionMethod) as [OracleDBConnectionMethod.UsernameAndPassword],
supportsPlatformManagement: true as const
};
};

View File

@ -1,64 +0,0 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { BaseSqlUsernameAndPasswordConnectionSchema } from "@app/services/app-connection/shared/sql";
import { OracleDBConnectionMethod } from "./oracledb-connection-enums";
export const OracleDBConnectionCredentialsSchema = BaseSqlUsernameAndPasswordConnectionSchema;
const BaseOracleDBConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.OracleDB) });
export const OracleDBConnectionSchema = BaseOracleDBConnectionSchema.extend({
method: z.literal(OracleDBConnectionMethod.UsernameAndPassword),
credentials: OracleDBConnectionCredentialsSchema
});
export const SanitizedOracleDBConnectionSchema = z.discriminatedUnion("method", [
BaseOracleDBConnectionSchema.extend({
method: z.literal(OracleDBConnectionMethod.UsernameAndPassword),
credentials: OracleDBConnectionCredentialsSchema.pick({
host: true,
database: true,
port: true,
username: true,
sslEnabled: true,
sslRejectUnauthorized: true,
sslCertificate: true
})
})
]);
export const ValidateOracleDBConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
.literal(OracleDBConnectionMethod.UsernameAndPassword)
.describe(AppConnections.CREATE(AppConnection.OracleDB).method),
credentials: OracleDBConnectionCredentialsSchema.describe(AppConnections.CREATE(AppConnection.OracleDB).credentials)
})
]);
export const CreateOracleDBConnectionSchema = ValidateOracleDBConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.OracleDB, { supportsPlatformManagedCredentials: true })
);
export const UpdateOracleDBConnectionSchema = z
.object({
credentials: OracleDBConnectionCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.OracleDB).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.OracleDB, { supportsPlatformManagedCredentials: true }));
export const OracleDBConnectionListItemSchema = z.object({
name: z.literal("OracleDB"),
app: z.literal(AppConnection.OracleDB),
methods: z.nativeEnum(OracleDBConnectionMethod).array(),
supportsPlatformManagement: z.literal(true)
});

View File

@ -1,17 +0,0 @@
import z from "zod";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateOracleDBConnectionSchema,
OracleDBConnectionSchema,
ValidateOracleDBConnectionCredentialsSchema
} from "./oracledb-connection-schemas";
export type TOracleDBConnection = z.infer<typeof OracleDBConnectionSchema>;
export type TOracleDBConnectionInput = z.infer<typeof CreateOracleDBConnectionSchema> & {
app: AppConnection.OracleDB;
};
export type TValidateOracleDBConnectionCredentialsSchema = typeof ValidateOracleDBConnectionCredentialsSchema;

View File

@ -7,30 +7,29 @@ import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type"; import { ActorType } from "@app/services/auth/auth-type";
import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TPermissionServiceFactory } from "../permission/permission-service-types"; import { TPermissionServiceFactory } from "../permission/permission-service";
import { import {
ProjectPermissionIdentityActions, ProjectPermissionIdentityActions,
ProjectPermissionMemberActions, ProjectPermissionMemberActions,
ProjectPermissionSub ProjectPermissionSub
} from "../permission/project-permission"; } from "../permission/project-permission";
import { TAssumePrivilegeServiceFactory } from "./assume-privilege-types"; import { TAssumeProjectPrivilegeDTO } from "./assume-privilege-types";
type TAssumePrivilegeServiceFactoryDep = { type TAssumePrivilegeServiceFactoryDep = {
projectDAL: Pick<TProjectDALFactory, "findById">; projectDAL: Pick<TProjectDALFactory, "findById">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">; permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
}; };
export const assumePrivilegeServiceFactory = ({ export type TAssumePrivilegeServiceFactory = ReturnType<typeof assumePrivilegeServiceFactory>;
projectDAL,
permissionService export const assumePrivilegeServiceFactory = ({ projectDAL, permissionService }: TAssumePrivilegeServiceFactoryDep) => {
}: TAssumePrivilegeServiceFactoryDep): TAssumePrivilegeServiceFactory => { const assumeProjectPrivileges = async ({
const assumeProjectPrivileges: TAssumePrivilegeServiceFactory["assumeProjectPrivileges"] = async ({
targetActorType, targetActorType,
targetActorId, targetActorId,
projectId, projectId,
actorPermissionDetails, actorPermissionDetails,
tokenVersionId tokenVersionId
}) => { }: TAssumeProjectPrivilegeDTO) => {
const project = await projectDAL.findById(projectId); const project = await projectDAL.findById(projectId);
if (!project) throw new NotFoundError({ message: `Project with ID '${projectId}' not found` }); if (!project) throw new NotFoundError({ message: `Project with ID '${projectId}' not found` });
const { permission } = await permissionService.getProjectPermission({ const { permission } = await permissionService.getProjectPermission({
@ -80,10 +79,7 @@ export const assumePrivilegeServiceFactory = ({
return { actorType: targetActorType, actorId: targetActorId, projectId, assumePrivilegesToken }; return { actorType: targetActorType, actorId: targetActorId, projectId, assumePrivilegesToken };
}; };
const verifyAssumePrivilegeToken: TAssumePrivilegeServiceFactory["verifyAssumePrivilegeToken"] = ( const verifyAssumePrivilegeToken = (token: string, tokenVersionId: string) => {
token,
tokenVersionId
) => {
const appCfg = getConfig(); const appCfg = getConfig();
const decodedToken = jwt.verify(token, appCfg.AUTH_SECRET) as { const decodedToken = jwt.verify(token, appCfg.AUTH_SECRET) as {
tokenVersionId: string; tokenVersionId: string;

View File

@ -8,28 +8,3 @@ export type TAssumeProjectPrivilegeDTO = {
tokenVersionId: string; tokenVersionId: string;
actorPermissionDetails: OrgServiceActor; actorPermissionDetails: OrgServiceActor;
}; };
export interface TAssumePrivilegeServiceFactory {
assumeProjectPrivileges: ({
targetActorType,
targetActorId,
projectId,
actorPermissionDetails,
tokenVersionId
}: TAssumeProjectPrivilegeDTO) => Promise<{
actorType: ActorType.USER | ActorType.IDENTITY;
actorId: string;
projectId: string;
assumePrivilegesToken: string;
}>;
verifyAssumePrivilegeToken: (
token: string,
tokenVersionId: string
) => {
tokenVersionId: string;
projectId: string;
requesterId: string;
actorType: ActorType;
actorId: string;
};
}

View File

@ -1,10 +1,10 @@
import { TDbClient } from "@app/db"; import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas"; import { TableName } from "@app/db/schemas";
import { ormify, TOrmify } from "@app/lib/knex"; import { ormify } from "@app/lib/knex";
export type TAuditLogStreamDALFactory = TOrmify<TableName.AuditLogStream>; export type TAuditLogStreamDALFactory = ReturnType<typeof auditLogStreamDALFactory>;
export const auditLogStreamDALFactory = (db: TDbClient): TAuditLogStreamDALFactory => { export const auditLogStreamDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.AuditLogStream); const orm = ormify(db, TableName.AuditLogStream);
return orm; return orm;

View File

@ -11,9 +11,16 @@ import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { AUDIT_LOG_STREAM_TIMEOUT } from "../audit-log/audit-log-queue"; import { AUDIT_LOG_STREAM_TIMEOUT } from "../audit-log/audit-log-queue";
import { TLicenseServiceFactory } from "../license/license-service"; import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission"; import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service-types"; import { TPermissionServiceFactory } from "../permission/permission-service";
import { TAuditLogStreamDALFactory } from "./audit-log-stream-dal"; import { TAuditLogStreamDALFactory } from "./audit-log-stream-dal";
import { LogStreamHeaders, TAuditLogStreamServiceFactory } from "./audit-log-stream-types"; import {
LogStreamHeaders,
TCreateAuditLogStreamDTO,
TDeleteAuditLogStreamDTO,
TGetDetailsAuditLogStreamDTO,
TListAuditLogStreamDTO,
TUpdateAuditLogStreamDTO
} from "./audit-log-stream-types";
type TAuditLogStreamServiceFactoryDep = { type TAuditLogStreamServiceFactoryDep = {
auditLogStreamDAL: TAuditLogStreamDALFactory; auditLogStreamDAL: TAuditLogStreamDALFactory;
@ -21,19 +28,21 @@ type TAuditLogStreamServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "getPlan">; licenseService: Pick<TLicenseServiceFactory, "getPlan">;
}; };
export type TAuditLogStreamServiceFactory = ReturnType<typeof auditLogStreamServiceFactory>;
export const auditLogStreamServiceFactory = ({ export const auditLogStreamServiceFactory = ({
auditLogStreamDAL, auditLogStreamDAL,
permissionService, permissionService,
licenseService licenseService
}: TAuditLogStreamServiceFactoryDep): TAuditLogStreamServiceFactory => { }: TAuditLogStreamServiceFactoryDep) => {
const create: TAuditLogStreamServiceFactory["create"] = async ({ const create = async ({
url, url,
actor, actor,
headers = [], headers = [],
actorId, actorId,
actorOrgId, actorOrgId,
actorAuthMethod actorAuthMethod
}) => { }: TCreateAuditLogStreamDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" }); if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
const plan = await licenseService.getPlan(actorOrgId); const plan = await licenseService.getPlan(actorOrgId);
@ -101,7 +110,7 @@ export const auditLogStreamServiceFactory = ({
return logStream; return logStream;
}; };
const updateById: TAuditLogStreamServiceFactory["updateById"] = async ({ const updateById = async ({
id, id,
url, url,
actor, actor,
@ -109,7 +118,7 @@ export const auditLogStreamServiceFactory = ({
actorId, actorId,
actorOrgId, actorOrgId,
actorAuthMethod actorAuthMethod
}) => { }: TUpdateAuditLogStreamDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" }); if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
const plan = await licenseService.getPlan(actorOrgId); const plan = await licenseService.getPlan(actorOrgId);
@ -166,13 +175,7 @@ export const auditLogStreamServiceFactory = ({
return updatedLogStream; return updatedLogStream;
}; };
const deleteById: TAuditLogStreamServiceFactory["deleteById"] = async ({ const deleteById = async ({ id, actor, actorId, actorOrgId, actorAuthMethod }: TDeleteAuditLogStreamDTO) => {
id,
actor,
actorId,
actorOrgId,
actorAuthMethod
}) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" }); if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
const logStream = await auditLogStreamDAL.findById(id); const logStream = await auditLogStreamDAL.findById(id);
@ -186,13 +189,7 @@ export const auditLogStreamServiceFactory = ({
return deletedLogStream; return deletedLogStream;
}; };
const getById: TAuditLogStreamServiceFactory["getById"] = async ({ const getById = async ({ id, actor, actorId, actorOrgId, actorAuthMethod }: TGetDetailsAuditLogStreamDTO) => {
id,
actor,
actorId,
actorOrgId,
actorAuthMethod
}) => {
const logStream = await auditLogStreamDAL.findById(id); const logStream = await auditLogStreamDAL.findById(id);
if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` }); if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` });
@ -215,7 +212,7 @@ export const auditLogStreamServiceFactory = ({
return { ...logStream, headers }; return { ...logStream, headers };
}; };
const list: TAuditLogStreamServiceFactory["list"] = async ({ actor, actorId, actorOrgId, actorAuthMethod }) => { const list = async ({ actor, actorId, actorOrgId, actorAuthMethod }: TListAuditLogStreamDTO) => {
const { permission } = await permissionService.getOrgPermission( const { permission } = await permissionService.getOrgPermission(
actor, actor,
actorId, actorId,

View File

@ -1,4 +1,3 @@
import { TAuditLogStreams } from "@app/db/schemas";
import { TOrgPermission } from "@app/lib/types"; import { TOrgPermission } from "@app/lib/types";
export type LogStreamHeaders = { export type LogStreamHeaders = {
@ -26,23 +25,3 @@ export type TListAuditLogStreamDTO = Omit<TOrgPermission, "orgId">;
export type TGetDetailsAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & { export type TGetDetailsAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
id: string; id: string;
}; };
export type TAuditLogStreamServiceFactory = {
create: (arg: TCreateAuditLogStreamDTO) => Promise<TAuditLogStreams>;
updateById: (arg: TUpdateAuditLogStreamDTO) => Promise<TAuditLogStreams>;
deleteById: (arg: TDeleteAuditLogStreamDTO) => Promise<TAuditLogStreams>;
getById: (arg: TGetDetailsAuditLogStreamDTO) => Promise<{
headers: LogStreamHeaders[] | undefined;
orgId: string;
url: string;
id: string;
createdAt: Date;
updatedAt: Date;
encryptedHeadersCiphertext?: string | null | undefined;
encryptedHeadersIV?: string | null | undefined;
encryptedHeadersTag?: string | null | undefined;
encryptedHeadersAlgorithm?: string | null | undefined;
encryptedHeadersKeyEncoding?: string | null | undefined;
}>;
list: (arg: TListAuditLogStreamDTO) => Promise<TAuditLogStreams[]>;
};

View File

@ -2,29 +2,16 @@
import knex from "knex"; import knex from "knex";
import { TDbClient } from "@app/db"; import { TDbClient } from "@app/db";
import { TableName, TAuditLogs } from "@app/db/schemas"; import { TableName } from "@app/db/schemas";
import { DatabaseError, GatewayTimeoutError } from "@app/lib/errors"; import { DatabaseError, GatewayTimeoutError } from "@app/lib/errors";
import { ormify, selectAllTableCols, TOrmify } from "@app/lib/knex"; import { ormify, selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger"; import { logger } from "@app/lib/logger";
import { QueueName } from "@app/queue"; import { QueueName } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type"; import { ActorType } from "@app/services/auth/auth-type";
import { EventType, filterableSecretEvents } from "./audit-log-types"; import { EventType, filterableSecretEvents } from "./audit-log-types";
export interface TAuditLogDALFactory extends Omit<TOrmify<TableName.AuditLog>, "find"> { export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
pruneAuditLog: (tx?: knex.Knex) => Promise<void>;
find: (
arg: Omit<TFindQuery, "actor" | "eventType"> & {
actorId?: string | undefined;
actorType?: ActorType | undefined;
secretPath?: string | undefined;
secretKey?: string | undefined;
eventType?: EventType[] | undefined;
eventMetadata?: Record<string, string> | undefined;
},
tx?: knex.Knex
) => Promise<TAuditLogs[]>;
}
type TFindQuery = { type TFindQuery = {
actor?: string; actor?: string;
@ -42,7 +29,7 @@ type TFindQuery = {
export const auditLogDALFactory = (db: TDbClient) => { export const auditLogDALFactory = (db: TDbClient) => {
const auditLogOrm = ormify(db, TableName.AuditLog); const auditLogOrm = ormify(db, TableName.AuditLog);
const find: TAuditLogDALFactory["find"] = async ( const find = async (
{ {
orgId, orgId,
projectId, projectId,
@ -58,8 +45,15 @@ export const auditLogDALFactory = (db: TDbClient) => {
secretKey, secretKey,
eventType, eventType,
eventMetadata eventMetadata
}: Omit<TFindQuery, "actor" | "eventType"> & {
actorId?: string;
actorType?: ActorType;
secretPath?: string;
secretKey?: string;
eventType?: EventType[];
eventMetadata?: Record<string, string>;
}, },
tx tx?: knex.Knex
) => { ) => {
if (!orgId && !projectId) { if (!orgId && !projectId) {
throw new Error("Either orgId or projectId must be provided"); throw new Error("Either orgId or projectId must be provided");
@ -160,7 +154,7 @@ export const auditLogDALFactory = (db: TDbClient) => {
}; };
// delete all audit log that have expired // delete all audit log that have expired
const pruneAuditLog: TAuditLogDALFactory["pruneAuditLog"] = async (tx) => { const pruneAuditLog = async (tx?: knex.Knex) => {
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000; const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
const MAX_RETRY_ON_FAILURE = 3; const MAX_RETRY_ON_FAILURE = 3;

View File

@ -21,9 +21,7 @@ type TAuditLogQueueServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "getPlan">; licenseService: Pick<TLicenseServiceFactory, "getPlan">;
}; };
export type TAuditLogQueueServiceFactory = { export type TAuditLogQueueServiceFactory = Awaited<ReturnType<typeof auditLogQueueServiceFactory>>;
pushToLog: (data: TCreateAuditLogDTO) => Promise<void>;
};
// keep this timeout 5s it must be fast because else the queue will take time to finish // keep this timeout 5s it must be fast because else the queue will take time to finish
// audit log is a crowded queue thus needs to be fast // audit log is a crowded queue thus needs to be fast
@ -35,7 +33,7 @@ export const auditLogQueueServiceFactory = async ({
projectDAL, projectDAL,
licenseService, licenseService,
auditLogStreamDAL auditLogStreamDAL
}: TAuditLogQueueServiceFactoryDep): Promise<TAuditLogQueueServiceFactory> => { }: TAuditLogQueueServiceFactoryDep) => {
const appCfg = getConfig(); const appCfg = getConfig();
const pushToLog = async (data: TCreateAuditLogDTO) => { const pushToLog = async (data: TCreateAuditLogDTO) => {

View File

@ -7,11 +7,11 @@ import { BadRequestError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type"; import { ActorType } from "@app/services/auth/auth-type";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission"; import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service-types"; import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission"; import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import { TAuditLogDALFactory } from "./audit-log-dal"; import { TAuditLogDALFactory } from "./audit-log-dal";
import { TAuditLogQueueServiceFactory } from "./audit-log-queue"; import { TAuditLogQueueServiceFactory } from "./audit-log-queue";
import { EventType, TAuditLogServiceFactory } from "./audit-log-types"; import { EventType, TCreateAuditLogDTO, TListProjectAuditLogDTO } from "./audit-log-types";
type TAuditLogServiceFactoryDep = { type TAuditLogServiceFactoryDep = {
auditLogDAL: TAuditLogDALFactory; auditLogDAL: TAuditLogDALFactory;
@ -19,18 +19,14 @@ type TAuditLogServiceFactoryDep = {
auditLogQueue: TAuditLogQueueServiceFactory; auditLogQueue: TAuditLogQueueServiceFactory;
}; };
export type TAuditLogServiceFactory = ReturnType<typeof auditLogServiceFactory>;
export const auditLogServiceFactory = ({ export const auditLogServiceFactory = ({
auditLogDAL, auditLogDAL,
auditLogQueue, auditLogQueue,
permissionService permissionService
}: TAuditLogServiceFactoryDep): TAuditLogServiceFactory => { }: TAuditLogServiceFactoryDep) => {
const listAuditLogs: TAuditLogServiceFactory["listAuditLogs"] = async ({ const listAuditLogs = async ({ actorAuthMethod, actorId, actorOrgId, actor, filter }: TListProjectAuditLogDTO) => {
actorAuthMethod,
actorId,
actorOrgId,
actor,
filter
}) => {
// Filter logs for specific project // Filter logs for specific project
if (filter.projectId) { if (filter.projectId) {
const { permission } = await permissionService.getProjectPermission({ const { permission } = await permissionService.getProjectPermission({
@ -79,7 +75,7 @@ export const auditLogServiceFactory = ({
})); }));
}; };
const createAuditLog: TAuditLogServiceFactory["createAuditLog"] = async (data) => { const createAuditLog = async (data: TCreateAuditLogDTO) => {
const appCfg = getConfig(); const appCfg = getConfig();
if (appCfg.DISABLE_AUDIT_LOG_GENERATION) { if (appCfg.DISABLE_AUDIT_LOG_GENERATION) {
return; return;

Some files were not shown because too many files have changed in this diff Show More