Compare commits

..

7 Commits

Author SHA1 Message Date
bcf86aea90 wip 2025-03-18 20:21:05 -07:00
06024065cd wip 2025-03-18 18:04:16 -07:00
2d207147a6 wip 2025-03-17 19:49:34 -07:00
4c04d0f871 wip 2025-03-17 19:39:23 -07:00
484a9b28ef wip 2025-03-17 19:34:43 -07:00
94f79ade4a wip 2025-03-17 19:17:39 -07:00
5a63a6dbe4 wip 2025-03-17 16:15:18 -07:00
708 changed files with 7372 additions and 27177 deletions

View File

@ -1,102 +0,0 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL Advanced"
on:
push:
branches: [ "main", "development" ]
pull_request:
branches: [ "main", "development" ]
schedule:
- cron: '33 7 * * 3'
jobs:
analyze:
name: Analyze (${{ matrix.language }})
# Runner size impacts CodeQL analysis time. To learn more, please see:
# - https://gh.io/recommended-hardware-resources-for-running-codeql
# - https://gh.io/supported-runners-and-hardware-resources
# - https://gh.io/using-larger-runners (GitHub.com only)
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
permissions:
# required for all workflows
security-events: write
# required to fetch internal or private CodeQL packs
packages: read
# only required for workflows in private repositories
actions: read
contents: read
strategy:
fail-fast: false
matrix:
include:
- language: actions
build-mode: none
- language: go
build-mode: autobuild
- language: javascript-typescript
build-mode: none
# CodeQL supports the following values keywords for 'language': 'actions', 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
# Use `c-cpp` to analyze code written in C, C++ or both
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Add any setup steps before running the `github/codeql-action/init` action.
# This includes steps like installing compilers or runtimes (`actions/setup-node`
# or others). This is typically only required for manual builds.
# - name: Setup runtime (example)
# uses: actions/setup-example@v1
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# If the analyze step fails for one of the languages you are analyzing with
# "We were unable to automatically build your code", modify the matrix above
# to set the build mode to "manual" for that language. Then modify this step
# to build your code.
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
- if: matrix.build-mode == 'manual'
shell: bash
run: |
echo 'If you are using a "manual" build mode for one or more of the' \
'languages you are analyzing, replace this with the commands to build' \
'your code, for example:'
echo ' make bootstrap'
echo ' make release'
exit 1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

View File

@ -1,27 +0,0 @@
name: Release K8 Operator Helm Chart
on:
workflow_dispatch:
jobs:
release-helm:
name: Release Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,146 +1,132 @@
name: Build and release CLI
on:
workflow_dispatch:
workflow_dispatch:
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
permissions:
contents: write
contents: write
jobs:
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-latest
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
npm-release:
runs-on: ubuntu-latest
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Print version
run: echo ${{ env.CLI_VERSION }}
goreleaser:
runs-on: ubuntu-latest
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: Setup for libssl1.0-dev
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- uses: ruby/setup-ruby@ec02537da5712d66d4d50a0f33b7eb52773b5ed1
with:
ruby-version: "3.1" # Not needed with a .ruby-version file
- name: Install deb-s3
run: gem install deb-s3
- name: Configure GPG Key
run: echo -n "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --passphrase "$GPG_SIGNING_KEY_PASSPHRASE" --import
env:
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-latest
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: Setup for libssl1.0-dev
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,107 +1,52 @@
name: Release K8 Operator Docker Image
name: Release image + Helm chart K8s Operator
on:
push:
tags:
- "infisical-k8-operator/v*.*.*"
permissions:
contents: write
pull-requests: write
push:
tags:
- "infisical-k8-operator/v*.*.*"
jobs:
release-image:
name: Generate Helm Chart PR
runs-on: ubuntu-latest
outputs:
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
release:
runs-on: ubuntu-latest
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
- uses: actions/checkout@v2
- name: Checkout code
uses: actions/checkout@v2
- name: 🔧 Set up QEMU
uses: docker/setup-qemu-action@v1
# Dependency for helm generation
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v1
# Dependency for helm generation
- name: Install Go
uses: actions/setup-go@v4
with:
go-version: 1.21
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# Install binaries for helm generation
- name: Install dependencies
working-directory: k8-operator
run: |
make helmify
make kustomize
make controller-gen
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: k8-operator
push: true
platforms: linux/amd64,linux/arm64
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Generate Helm Chart
working-directory: k8-operator
run: make helm
- name: Update Helm Chart Version
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
- name: Debug - Check file changes
run: |
echo "Current git status:"
git status
echo ""
echo "Modified files:"
git diff --name-only
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
if [ -z "$(git diff --name-only)" ]; then
echo "No helm changes or version changes. Invalid release detected, Exiting."
exit 1
fi
- name: Create Helm Chart PR
id: create-pr
uses: peter-evans/create-pull-request@v5
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
committer: GitHub <noreply@github.com>
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
branch: helm-update-${{ steps.extract_version.outputs.version }}
delete-branch: true
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
body: |
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
Additionally the helm chart has been updated to match the latest operator code changes.
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
Once you have approved this PR, you can trigger the helm release workflow manually.
base: main
- name: 🔧 Set up QEMU
uses: docker/setup-qemu-action@v1
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: k8-operator
push: true
platforms: linux/amd64,linux/arm64
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -8,9 +8,3 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
docs/mint.json:generic-api-key:651
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
docs/cli/commands/bootstrap.mdx:jwt:86
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:102
docs/self-hosting/guides/automated-bootstrapping.mdx:jwt:74
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretListView/SecretDetailSidebar.tsx:generic-api-key:72
k8-operator/config/samples/crd/pushsecret/source-secret-with-templating.yaml:private-key:11
k8-operator/config/samples/crd/pushsecret/push-secret-with-template.yaml:private-key:52

View File

@ -1,85 +0,0 @@
FROM node:20-slim
# ? Setup a test SoftHSM module. In production a real HSM is used.
ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apt-get update && apt-get install -y \
build-essential \
autoconf \
automake \
git \
libtool \
libssl-dev \
python3 \
make \
g++ \
openssh-client \
curl \
pkg-config \
perl \
wget
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
&& sh autogen.sh \
&& ./configure --prefix=/usr/local --disable-gost \
&& make \
&& make install
WORKDIR /root
RUN rm -fr ${SOFTHSM2_SOURCES}
# Install pkcs11-tool
RUN apt-get install -y opensc
RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
WORKDIR /openssl-build
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
&& tar -xf openssl-3.1.2.tar.gz \
&& cd openssl-3.1.2 \
&& ./Configure enable-fips \
&& make \
&& make install_fips
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json
RUN npm install
COPY . .
ENV HOST=0.0.0.0
ENV OPENSSL_CONF=/app/nodejs.cnf
ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
ENV NODE_OPTIONS=--force-fips
CMD ["npm", "run", "dev:docker"]

View File

@ -11,7 +11,6 @@ export const mockQueue = (): TQueueServiceFactory => {
job[name] = jobData;
},
queuePg: async () => {},
schedulePg: async () => {},
initialize: async () => {},
shutdown: async () => undefined,
stopRepeatableJob: async () => true,

View File

@ -1,16 +0,0 @@
nodejs_conf = nodejs_init
.include /usr/local/ssl/fipsmodule.cnf
[nodejs_init]
providers = provider_sect
[provider_sect]
default = default_sect
fips = fips_sect
[default_sect]
activate = 1
[algorithm_sect]
default_properties = fips=yes

View File

@ -0,0 +1,7 @@
import "@fastify/request-context";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}

View File

@ -101,13 +101,6 @@ import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integ
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
identityAuthInfo?: {
identityId: string;
oidc?: {
claims: Record<string, string>;
};
};
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
}
}

View File

@ -17,9 +17,6 @@ import {
TApiKeys,
TApiKeysInsert,
TApiKeysUpdate,
TAppConnections,
TAppConnectionsInsert,
TAppConnectionsUpdate,
TAuditLogs,
TAuditLogsInsert,
TAuditLogStreams,
@ -68,9 +65,6 @@ import {
TDynamicSecrets,
TDynamicSecretsInsert,
TDynamicSecretsUpdate,
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
TExternalGroupOrgRoleMappingsUpdate,
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
@ -305,12 +299,6 @@ import {
TSecretRotations,
TSecretRotationsInsert,
TSecretRotationsUpdate,
TSecretRotationsV2,
TSecretRotationsV2Insert,
TSecretRotationsV2Update,
TSecretRotationV2SecretMappings,
TSecretRotationV2SecretMappingsInsert,
TSecretRotationV2SecretMappingsUpdate,
TSecrets,
TSecretScanningGitRisks,
TSecretScanningGitRisksInsert,
@ -332,27 +320,15 @@ import {
TSecretSnapshotsInsert,
TSecretSnapshotsUpdate,
TSecretsUpdate,
TSecretsV2,
TSecretsV2Insert,
TSecretsV2Update,
TSecretSyncs,
TSecretSyncsInsert,
TSecretSyncsUpdate,
TSecretTagJunction,
TSecretTagJunctionInsert,
TSecretTagJunctionUpdate,
TSecretTags,
TSecretTagsInsert,
TSecretTagsUpdate,
TSecretV2TagJunction,
TSecretV2TagJunctionInsert,
TSecretV2TagJunctionUpdate,
TSecretVersions,
TSecretVersionsInsert,
TSecretVersionsUpdate,
TSecretVersionsV2,
TSecretVersionsV2Insert,
TSecretVersionsV2Update,
TSecretVersionTagJunction,
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate,
@ -411,6 +387,29 @@ import {
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
import {
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
TExternalGroupOrgRoleMappingsUpdate
} from "@app/db/schemas/external-group-org-role-mappings";
import {
TSecretRotationsV2,
TSecretRotationsV2Insert,
TSecretRotationsV2Update
} from "@app/db/schemas/secret-rotations-v2";
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
import {
TSecretV2TagJunction,
TSecretV2TagJunctionInsert,
TSecretV2TagJunctionUpdate
} from "@app/db/schemas/secret-v2-tag-junction";
import {
TSecretVersionsV2,
TSecretVersionsV2Insert,
TSecretVersionsV2Update
} from "@app/db/schemas/secret-versions-v2";
import { TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas/secrets-v2";
declare module "knex" {
namespace Knex {
@ -961,10 +960,5 @@ declare module "knex/types/tables" {
TSecretRotationsV2Insert,
TSecretRotationsV2Update
>;
[TableName.SecretRotationV2SecretMapping]: KnexOriginal.CompositeTableType<
TSecretRotationV2SecretMappings,
TSecretRotationV2SecretMappingsInsert,
TSecretRotationV2SecretMappingsUpdate
>;
}
}

View File

@ -16,7 +16,7 @@ const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Dat
const startDateStr = formatPartitionDate(startDate);
const endDateStr = formatPartitionDate(endDate);
const partitionName = `${TableName.AuditLog}_${startDateStr.replaceAll("-", "")}_${endDateStr.replaceAll("-", "")}`;
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
await knex.schema.raw(
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`

View File

@ -1,31 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem"))) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("shouldUseNewPrivilegeSystem");
t.string("privilegeUpgradeInitiatedByUsername");
t.dateTime("privilegeUpgradeInitiatedAt");
});
await knex(TableName.Organization).update({
shouldUseNewPrivilegeSystem: false
});
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("shouldUseNewPrivilegeSystem").defaultTo(true).notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem")) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("shouldUseNewPrivilegeSystem");
t.dropColumn("privilegeUpgradeInitiatedByUsername");
t.dropColumn("privilegeUpgradeInitiatedAt");
});
}
}

View File

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
if (!hasMappingField) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
t.jsonb("claimMetadataMapping");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
if (hasMappingField) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
t.dropColumn("claimMetadataMapping");
});
}
}

View File

@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas/models";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds"))) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.specificType("adminIdentityIds", "text[]");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds")) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("adminIdentityIds");
});
}
}

View File

@ -3,17 +3,17 @@ import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.AppConnection, "isPlatformManagedCredentials"))) {
if (!(await knex.schema.hasColumn(TableName.AppConnection, "isPlatformManaged"))) {
await knex.schema.alterTable(TableName.AppConnection, (t) => {
t.boolean("isPlatformManagedCredentials").defaultTo(false);
t.boolean("isPlatformManaged").defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.AppConnection, "isPlatformManagedCredentials")) {
if (await knex.schema.hasColumn(TableName.AppConnection, "isPlatformManaged")) {
await knex.schema.alterTable(TableName.AppConnection, (t) => {
t.dropColumn("isPlatformManagedCredentials");
t.dropColumn("isPlatformManaged");
});
}
}

View File

@ -0,0 +1,44 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretRotationV2))) {
await knex.schema.createTable(TableName.SecretRotationV2, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name", 32).notNullable();
t.string("description");
t.string("type").notNullable();
t.integer("interval").notNullable();
t.jsonb("parameters").notNullable();
t.binary("encryptedGeneratedCredentials").notNullable();
t.boolean("isAutoRotationEnabled").notNullable().defaultTo(true);
t.integer("activeIndex").notNullable().defaultTo(0);
// we're including projectId in addition to folder ID because we allow folderId to be null (if the folder
// is deleted), to preserve configuration
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("folderId");
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("SET NULL");
t.uuid("connectionId").notNullable();
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
t.timestamps(true, true, true);
t.string("rotationStatus");
t.string("lastRotationJobId");
t.string("lastRotationMessage", 1024);
t.datetime("lastRotatedAt");
});
await createOnUpdateTrigger(knex, TableName.SecretRotationV2);
await knex.schema.alterTable(TableName.SecretRotationV2, (t) => {
t.unique(["projectId", "name"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretRotationV2);
await dropOnUpdateTrigger(knex, TableName.SecretRotationV2);
}

View File

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
if (doesParentColumExist && doesNameColumnExist) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.index(["parentId", "name"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
if (doesParentColumExist && doesNameColumnExist) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropIndex(["parentId", "name"]);
});
}
}

View File

@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasReviewerJwtCol = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
if (hasReviewerJwtCol) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
t.binary("encryptedKubernetesTokenReviewerJwt").nullable().alter();
});
}
}
export async function down(): Promise<void> {
// we can't make it back to non nullable, it will fail
}

View File

@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas/models";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals"))) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
});
}
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals"))) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals")) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("allowedSelfApprovals");
});
}
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals")) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropColumn("allowedSelfApprovals");
});
}
}

View File

@ -1,58 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretRotationV2))) {
await knex.schema.createTable(TableName.SecretRotationV2, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name", 32).notNullable();
t.string("description");
t.string("type").notNullable();
t.jsonb("parameters").notNullable();
t.jsonb("secretsMapping").notNullable();
t.binary("encryptedGeneratedCredentials").notNullable();
t.boolean("isAutoRotationEnabled").notNullable().defaultTo(true);
t.integer("activeIndex").notNullable().defaultTo(0);
t.uuid("folderId").notNullable();
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE");
t.uuid("connectionId").notNullable();
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
t.timestamps(true, true, true);
t.integer("rotationInterval").notNullable();
t.jsonb("rotateAtUtc").notNullable(); // { hours: number; minutes: number }
t.string("rotationStatus").notNullable();
t.datetime("lastRotationAttemptedAt").notNullable();
t.datetime("lastRotatedAt").notNullable();
t.binary("encryptedLastRotationMessage"); // we encrypt this because it may contain sensitive info (SQL errors showing credentials)
t.string("lastRotationJobId");
t.datetime("nextRotationAt");
t.boolean("isLastRotationManual").notNullable().defaultTo(true); // creation is considered a "manual" rotation
});
await createOnUpdateTrigger(knex, TableName.SecretRotationV2);
await knex.schema.alterTable(TableName.SecretRotationV2, (t) => {
t.unique(["folderId", "name"]);
});
}
if (!(await knex.schema.hasTable(TableName.SecretRotationV2SecretMapping))) {
await knex.schema.createTable(TableName.SecretRotationV2SecretMapping, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("secretId").notNullable();
// scott: this is deferred to block secret deletion but not prevent folder/environment/project deletion
// ie, if rotation is being deleted as well we permit it, otherwise throw
t.foreign("secretId").references("id").inTable(TableName.SecretV2).deferrable("deferred");
t.uuid("rotationId").notNullable();
t.foreign("rotationId").references("id").inTable(TableName.SecretRotationV2).onDelete("CASCADE");
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretRotationV2SecretMapping);
await knex.schema.dropTableIfExists(TableName.SecretRotationV2);
await dropOnUpdateTrigger(knex, TableName.SecretRotationV2);
}

View File

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
if (!hasCol) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.datetime("lastSecretModified");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
if (hasCol) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropColumn("lastSecretModified");
});
}
}

View File

@ -16,8 +16,7 @@ export const AccessApprovalPoliciesSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional(),
allowedSelfApprovals: z.boolean().default(true)
deletedAt: z.date().nullable().optional()
});
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;

View File

@ -20,7 +20,7 @@ export const AppConnectionsSchema = z.object({
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
isPlatformManagedCredentials: z.boolean().default(false).nullable().optional()
isPlatformManaged: z.boolean().default(false).nullable().optional()
});
export type TAppConnections = z.infer<typeof AppConnectionsSchema>;

View File

@ -28,7 +28,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
allowedNamespaces: z.string(),
allowedNames: z.string(),
allowedAudience: z.string(),
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
encryptedKubernetesTokenReviewerJwt: zodBuffer,
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
});

View File

@ -26,8 +26,7 @@ export const IdentityOidcAuthsSchema = z.object({
boundSubject: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
encryptedCaCertificate: zodBuffer.nullable().optional(),
claimMetadataMapping: z.unknown().nullable().optional()
encryptedCaCertificate: zodBuffer.nullable().optional()
});
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;

View File

@ -3,7 +3,6 @@ export * from "./access-approval-policies-approvers";
export * from "./access-approval-requests";
export * from "./access-approval-requests-reviewers";
export * from "./api-keys";
export * from "./app-connections";
export * from "./audit-log-streams";
export * from "./audit-logs";
export * from "./auth-token-sessions";
@ -20,7 +19,6 @@ export * from "./certificate-templates";
export * from "./certificates";
export * from "./dynamic-secret-leases";
export * from "./dynamic-secrets";
export * from "./external-group-org-role-mappings";
export * from "./external-kms";
export * from "./gateways";
export * from "./git-app-install-sessions";
@ -99,16 +97,13 @@ export * from "./secret-references";
export * from "./secret-references-v2";
export * from "./secret-rotation-output-v2";
export * from "./secret-rotation-outputs";
export * from "./secret-rotation-v2-secret-mappings";
export * from "./secret-rotations";
export * from "./secret-rotations-v2";
export * from "./secret-scanning-git-risks";
export * from "./secret-sharing";
export * from "./secret-snapshot-folders";
export * from "./secret-snapshot-secrets";
export * from "./secret-snapshot-secrets-v2";
export * from "./secret-snapshots";
export * from "./secret-syncs";
export * from "./secret-tag-junction";
export * from "./secret-tags";
export * from "./secret-v2-tag-junction";

View File

@ -141,8 +141,7 @@ export enum TableName {
KmipOrgConfig = "kmip_org_configs",
KmipOrgServerCertificates = "kmip_org_server_certificates",
KmipClientCertificates = "kmip_client_certificates",
SecretRotationV2 = "secret_rotations_v2",
SecretRotationV2SecretMapping = "secret_rotation_v2_secret_mappings"
SecretRotationV2 = "secret_rotations_v2"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
@ -235,8 +234,3 @@ export enum ActionProjectType {
// project operations that happen on all types
Any = "any"
}
export enum SortDirection {
ASC = "asc",
DESC = "desc"
}

View File

@ -23,10 +23,7 @@ export const OrganizationsSchema = z.object({
defaultMembershipRole: z.string().default("member"),
enforceMfa: z.boolean().default(false),
selectedMfaMethod: z.string().nullable().optional(),
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional(),
shouldUseNewPrivilegeSystem: z.boolean().default(true),
privilegeUpgradeInitiatedByUsername: z.string().nullable().optional(),
privilegeUpgradeInitiatedAt: z.date().nullable().optional()
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional()
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;

View File

@ -16,8 +16,7 @@ export const SecretApprovalPoliciesSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional(),
allowedSelfApprovals: z.boolean().default(true)
deletedAt: z.date().nullable().optional()
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;

View File

@ -16,8 +16,7 @@ export const SecretFoldersSchema = z.object({
envId: z.string().uuid(),
parentId: z.string().uuid().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional(),
description: z.string().nullable().optional(),
lastSecretModified: z.date().nullable().optional()
description: z.string().nullable().optional()
});
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;

View File

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretRotationV2SecretMappingsSchema = z.object({
id: z.string().uuid(),
secretId: z.string().uuid(),
rotationId: z.string().uuid()
});
export type TSecretRotationV2SecretMappings = z.infer<typeof SecretRotationV2SecretMappingsSchema>;
export type TSecretRotationV2SecretMappingsInsert = Omit<
z.input<typeof SecretRotationV2SecretMappingsSchema>,
TImmutableDBKeys
>;
export type TSecretRotationV2SecretMappingsUpdate = Partial<
Omit<z.input<typeof SecretRotationV2SecretMappingsSchema>, TImmutableDBKeys>
>;

View File

@ -14,24 +14,20 @@ export const SecretRotationsV2Schema = z.object({
name: z.string(),
description: z.string().nullable().optional(),
type: z.string(),
interval: z.number(),
parameters: z.unknown(),
secretsMapping: z.unknown(),
encryptedGeneratedCredentials: zodBuffer,
isAutoRotationEnabled: z.boolean().default(true),
activeIndex: z.number().default(0),
folderId: z.string().uuid(),
projectId: z.string(),
folderId: z.string().uuid().nullable().optional(),
connectionId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
rotationInterval: z.number(),
rotateAtUtc: z.unknown(),
rotationStatus: z.string(),
lastRotationAttemptedAt: z.date(),
lastRotatedAt: z.date(),
encryptedLastRotationMessage: zodBuffer.nullable().optional(),
rotationStatus: z.string().nullable().optional(),
lastRotationJobId: z.string().nullable().optional(),
nextRotationAt: z.date().nullable().optional(),
isLastRotationManual: z.boolean().default(true)
lastRotationMessage: z.string().nullable().optional(),
lastRotatedAt: z.date().nullable().optional()
});
export type TSecretRotationsV2 = z.infer<typeof SecretRotationsV2Schema>;

View File

@ -25,8 +25,7 @@ export const SuperAdminSchema = z.object({
encryptedSlackClientId: zodBuffer.nullable().optional(),
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
authConsentContent: z.string().nullable().optional(),
pageFrameContent: z.string().nullable().optional(),
adminIdentityIds: z.string().array().nullable().optional()
pageFrameContent: z.string().nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

View File

@ -16,7 +16,7 @@ export const registerCertificateEstRouter = async (server: FastifyZodProvider) =
// for CSRs sent in PEM, we leave them as is
// for CSRs sent in base64, we preprocess them to remove new lines and spaces
if (!csrBody.includes("BEGIN CERTIFICATE REQUEST")) {
csrBody = csrBody.replaceAll("\n", "").replaceAll(" ", "");
csrBody = csrBody.replace(/\n/g, "").replace(/ /g, "");
}
done(null, csrBody);

View File

@ -29,8 +29,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
}),
response: {
200: z.object({
@ -148,8 +147,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).optional(),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
}),
response: {
200: z.object({

View File

@ -110,8 +110,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
secretPath: z.string().nullish(),
envId: z.string(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
deletedAt: z.date().nullish()
}),
reviewers: z
.object({

View File

@ -61,8 +61,8 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
if (ldapConfig.groupSearchBase) {
const groupFilter = "(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))";
const groupSearchFilter = (ldapConfig.groupSearchFilter || groupFilter)
.replaceAll("{{.Username}}", user.uid)
.replaceAll("{{.UserDN}}", user.dn);
.replace(/{{\.Username}}/g, user.uid)
.replace(/{{\.UserDN}}/g, user.dn);
if (!isValidLdapFilter(groupSearchFilter)) {
throw new Error("Generated LDAP search filter is invalid.");

View File

@ -35,8 +35,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
}),
response: {
200: z.object({
@ -86,8 +85,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val))
.transform((val) => (val === "" ? "/" : val)),
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
allowedSelfApprovals: z.boolean().default(true)
enforcementLevel: z.nativeEnum(EnforcementLevel).optional()
}),
response: {
200: z.object({

View File

@ -49,8 +49,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
deletedAt: z.date().nullish()
}),
committerUser: approvalRequestUser,
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
@ -268,8 +267,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
approvers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
deletedAt: z.date().nullish()
}),
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),
@ -277,10 +275,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
secretPath: z.string(),
commits: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true, version: true, secretValue: true })
.omit({ _id: true, environment: true, workspace: true, type: true, version: true })
.extend({
secretValue: z.string().optional(),
isRotatedSecret: z.boolean().optional(),
op: z.string(),
tags: SanitizedTagSchema.array().optional(),
secretMetadata: ResourceMetadataSchema.nullish(),

View File

@ -1,7 +1,6 @@
import { z } from "zod";
import { SecretRotationOutputsSchema, SecretRotationsSchema } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -41,10 +40,16 @@ export const registerSecretRotationRouter = async (server: FastifyZodProvider) =
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async () => {
throw new BadRequestError({
message: `This version of Secret Rotations has been deprecated. Please see docs for new version.`
handler: async (req) => {
const secretRotation = await server.services.secretRotation.createRotation({
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
...req.body,
projectId: req.body.workspaceId
});
return { secretRotation };
}
});

View File

@ -33,8 +33,7 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
.extend({
secretValueHidden: z.boolean(),
secretId: z.string(),
tags: SanitizedTagSchema.array(),
isRotatedSecret: z.boolean().optional()
tags: SanitizedTagSchema.array()
})
.array(),
folderVersion: z.object({ id: z.string(), name: z.string() }).array(),

View File

@ -5,11 +5,9 @@ import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-type
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
server.route({
@ -75,16 +73,6 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SignSshKey,
distinctId: getTelemetryDistinctId(req),
properties: {
certificateTemplateId: req.body.certificateTemplateId,
principals: req.body.principals,
...req.auditLogInfo
}
});
return {
serialNumber,
signedKey: signedPublicKey
@ -164,16 +152,6 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshCreds,
distinctId: getTelemetryDistinctId(req),
properties: {
certificateTemplateId: req.body.certificateTemplateId,
principals: req.body.principals,
...req.auditLogInfo
}
});
return {
serialNumber,
signedKey: signedPublicKey,

View File

@ -21,7 +21,7 @@ export const registerV2EERoutes = async (server: FastifyZodProvider) => {
await server.register(
async (secretRotationV2Router) => {
// register generic secret rotation endpoints
// register generic secret sync endpoints
await secretRotationV2Router.register(registerSecretRotationV2Router);
// register service specific secret rotation endpoints (secret-rotations/postgres-credentials, etc.)

View File

@ -1,6 +1,5 @@
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { registerMsSqlCredentialsRotationRouter } from "./mssql-credentials-rotation-router";
import { registerPostgresCredentialsRotationRouter } from "./postgres-credentials-rotation-router";
export * from "./secret-rotation-v2-router";
@ -9,6 +8,5 @@ export const SECRET_ROTATION_REGISTER_ROUTER_MAP: Record<
SecretRotation,
(server: FastifyZodProvider) => Promise<void>
> = {
[SecretRotation.PostgresCredentials]: registerPostgresCredentialsRotationRouter,
[SecretRotation.MsSqlCredentials]: registerMsSqlCredentialsRotationRouter
[SecretRotation.PostgresCredentials]: registerPostgresCredentialsRotationRouter
};

View File

@ -1,19 +0,0 @@
import {
CreateMsSqlCredentialsRotationSchema,
MsSqlCredentialsRotationSchema,
UpdateMsSqlCredentialsRotationSchema
} from "@app/ee/services/secret-rotation-v2/mssql-credentials";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
export const registerMsSqlCredentialsRotationRouter = async (server: FastifyZodProvider) =>
registerSecretRotationEndpoints({
type: SecretRotation.MsSqlCredentials,
server,
responseSchema: MsSqlCredentialsRotationSchema,
createSchema: CreateMsSqlCredentialsRotationSchema,
updateSchema: UpdateMsSqlCredentialsRotationSchema,
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
});

View File

@ -4,7 +4,6 @@ import {
UpdatePostgresCredentialsRotationSchema
} from "@app/ee/services/secret-rotation-v2/postgres-credentials";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
@ -14,6 +13,5 @@ export const registerPostgresCredentialsRotationRouter = async (server: FastifyZ
server,
responseSchema: PostgresCredentialsRotationSchema,
createSchema: CreatePostgresCredentialsRotationSchema,
updateSchema: UpdatePostgresCredentialsRotationSchema,
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
updateSchema: UpdatePostgresCredentialsRotationSchema
});

View File

@ -4,9 +4,7 @@ import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { SECRET_ROTATION_NAME_MAP } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-maps";
import {
TRotateAtUtc,
TSecretRotationV2,
TSecretRotationV2GeneratedCredentials,
TSecretRotationV2Input
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
import { SecretRotations } from "@app/lib/api-docs";
@ -15,17 +13,12 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSecretRotationEndpoints = <
T extends TSecretRotationV2,
I extends TSecretRotationV2Input,
C extends TSecretRotationV2GeneratedCredentials
>({
export const registerSecretRotationEndpoints = <T extends TSecretRotationV2, I extends TSecretRotationV2Input>({
server,
type,
createSchema,
updateSchema,
responseSchema,
generatedCredentialsSchema
responseSchema
}: {
type: SecretRotation;
server: FastifyZodProvider;
@ -36,11 +29,9 @@ export const registerSecretRotationEndpoints = <
projectId: string;
connectionId: string;
parameters: I["parameters"];
secretsMapping: I["secretsMapping"];
description?: string | null;
isAutoRotationEnabled?: boolean;
rotationInterval: number;
rotateAtUtc?: TRotateAtUtc;
interval: number;
}>;
updateSchema: z.ZodType<{
connectionId?: string;
@ -48,14 +39,11 @@ export const registerSecretRotationEndpoints = <
environment?: string;
secretPath?: string;
parameters?: I["parameters"];
secretsMapping?: I["secretsMapping"];
description?: string | null;
isAutoRotationEnabled?: boolean;
rotationInterval?: number;
rotateAtUtc?: TRotateAtUtc;
interval?: number;
}>;
responseSchema: z.ZodTypeAny;
generatedCredentialsSchema: z.ZodTypeAny;
}) => {
const rotationType = SECRET_ROTATION_NAME_MAP[type];
@ -133,9 +121,7 @@ export const registerSecretRotationEndpoints = <
type: EventType.GET_SECRET_ROTATION,
metadata: {
rotationId,
type,
secretPath: secretRotation.folder.path,
environment: secretRotation.environment.slug
type
}
}
});
@ -151,7 +137,7 @@ export const registerSecretRotationEndpoints = <
rateLimit: readLimit
},
schema: {
description: `Get the specified ${rotationType} Rotation by name, secret path, environment and project ID.`,
description: `Get the specified ${rotationType} Rotation by name and project ID.`,
params: z.object({
rotationName: z
.string()
@ -160,21 +146,7 @@ export const registerSecretRotationEndpoints = <
.describe(SecretRotations.GET_BY_NAME(type).rotationName)
}),
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretRotations.GET_BY_NAME(type).projectId),
secretPath: z
.string()
.trim()
.min(1, "Secret path required")
.describe(SecretRotations.GET_BY_NAME(type).secretPath),
environment: z
.string()
.trim()
.min(1, "Environment required")
.describe(SecretRotations.GET_BY_NAME(type).environment)
projectId: z.string().trim().min(1, "Project ID required").describe(SecretRotations.GET_BY_NAME(type).projectId)
}),
response: {
200: z.object({ secretRotation: responseSchema })
@ -183,10 +155,10 @@ export const registerSecretRotationEndpoints = <
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { rotationName } = req.params;
const { projectId, secretPath, environment } = req.query;
const { projectId } = req.query;
const secretRotation = (await server.services.secretRotationV2.findSecretRotationByName(
{ rotationName, projectId, type, secretPath, environment },
{ rotationName, projectId, type },
req.permission
)) as T;
@ -197,9 +169,7 @@ export const registerSecretRotationEndpoints = <
type: EventType.GET_SECRET_ROTATION,
metadata: {
rotationId: secretRotation.id,
type,
secretPath,
environment
type
}
}
});
@ -301,14 +271,11 @@ export const registerSecretRotationEndpoints = <
rotationId: z.string().uuid().describe(SecretRotations.DELETE(type).rotationId)
}),
querystring: z.object({
deleteSecrets: z
removeSecrets: z
.enum(["true", "false"])
.default("false")
.transform((value) => value === "true")
.describe(SecretRotations.DELETE(type).deleteSecrets),
revokeGeneratedCredentials: z
.enum(["true", "false"])
.transform((value) => value === "true")
.describe(SecretRotations.DELETE(type).revokeGeneratedCredentials)
.describe(SecretRotations.DELETE(type).removeSecrets)
}),
response: {
200: z.object({ secretRotation: responseSchema })
@ -317,23 +284,22 @@ export const registerSecretRotationEndpoints = <
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { rotationId } = req.params;
const { deleteSecrets, revokeGeneratedCredentials } = req.query;
const { removeSecrets } = req.query;
const secretRotation = (await server.services.secretRotationV2.deleteSecretRotation(
{ type, rotationId, deleteSecrets, revokeGeneratedCredentials },
{ type, rotationId, removeSecrets },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: secretRotation.projectId,
orgId: req.permission.orgId,
event: {
type: EventType.DELETE_SECRET_ROTATION,
metadata: {
type,
rotationId,
deleteSecrets,
revokeGeneratedCredentials
removeSecrets
}
}
});
@ -344,65 +310,58 @@ export const registerSecretRotationEndpoints = <
server.route({
method: "GET",
url: "/:rotationId/generated-credentials",
url: "/:rotationId/credentials",
config: {
rateLimit: readLimit
},
schema: {
description: `Get the generated credentials for the specified ${rotationType} Rotation.`,
description: `Get the active and inactive credentials for the specified ${rotationType} Rotation.`,
params: z.object({
rotationId: z.string().uuid().describe(SecretRotations.GET_GENERATED_CREDENTIALS_BY_ID(type).rotationId)
rotationId: z.string().uuid().describe(SecretRotations.GET_CREDENTIALS_BY_ID(type).rotationId)
}),
response: {
200: z.object({
generatedCredentials: generatedCredentialsSchema,
activeIndex: z.number(),
rotationId: z.string().uuid(),
type: z.literal(type)
})
200: z.object({ secretRotation: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { rotationId } = req.params;
const {
generatedCredentials,
secretRotation: { activeIndex, projectId, folder, environment }
} = await server.services.secretRotationV2.findSecretRotationGeneratedCredentialsById(
{
rotationId,
type
},
req.permission
);
// TODO!
// const secretRotation = (await server.services.secretRotation.triggerSecretRotationRotationSecretsById(
// {
// rotationId,
// type,
// auditLogInfo: req.auditLogInfo
// },
// req.permission
// )) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.GET_SECRET_ROTATION_GENERATED_CREDENTIALS,
metadata: {
type,
rotationId,
secretPath: folder.path,
environment: environment.slug
}
}
});
// await server.services.auditLog.createAuditLog({
// ...req.auditLogInfo,
// orgId: req.permission.orgId,
// event: {
// type: EventType.DELETE_SECRET_ROTATION,
// metadata: {
// type,
// rotationId,
// removeSecrets
// }
// }
// });
return { generatedCredentials: generatedCredentials as C, activeIndex, rotationId, type };
return { secretRotation: null };
}
});
server.route({
method: "POST",
url: "/:rotationId/rotate-secrets",
url: "/:rotationId/rotate",
config: {
rateLimit: writeLimit
},
schema: {
description: `Rotate the generated credentials for the specified ${rotationType} Rotation.`,
description: `Rotate the credentials for the specified ${rotationType} Rotation.`,
params: z.object({
rotationId: z.string().uuid().describe(SecretRotations.ROTATE(type).rotationId)
}),
@ -414,16 +373,30 @@ export const registerSecretRotationEndpoints = <
handler: async (req) => {
const { rotationId } = req.params;
const secretRotation = (await server.services.secretRotationV2.rotateSecretRotation(
{
rotationId,
type,
auditLogInfo: req.auditLogInfo
},
req.permission
)) as T;
// TODO!
// const secretRotation = (await server.services.secretRotation.triggerSecretRotationRotationSecretsById(
// {
// rotationId,
// type,
// auditLogInfo: req.auditLogInfo
// },
// req.permission
// )) as T;
return { secretRotation };
// await server.services.auditLog.createAuditLog({
// ...req.auditLogInfo,
// orgId: req.permission.orgId,
// event: {
// type: EventType.DELETE_SECRET_ROTATION,
// metadata: {
// type,
// rotationId,
// removeSecrets
// }
// }
// });
return { secretRotation: null };
}
});
};

View File

@ -1,18 +1,18 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { MsSqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
import { PostgresCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema";
import {
PostgresCredentialsRotationListItemSchema,
PostgresCredentialsRotationSchema
} from "@app/ee/services/secret-rotation-v2/postgres-credentials";
import { SecretRotations } from "@app/lib/api-docs";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [
PostgresCredentialsRotationListItemSchema,
MsSqlCredentialsRotationListItemSchema
]);
const SecretRotationV2Schema = z.discriminatedUnion("type", [PostgresCredentialsRotationSchema]);
const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [PostgresCredentialsRotationListItemSchema]);
export const registerSecretRotationV2Router = async (server: FastifyZodProvider) => {
server.route({

View File

@ -65,8 +65,7 @@ export const accessApprovalPolicyServiceFactory = ({
approvers,
projectSlug,
environment,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
}: TCreateAccessApprovalPolicy) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@ -154,8 +153,7 @@ export const accessApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
},
tx
);
@ -218,8 +216,7 @@ export const accessApprovalPolicyServiceFactory = ({
actorOrgId,
actorAuthMethod,
approvals,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
}: TUpdateAccessApprovalPolicy) => {
const groupApprovers = approvers
.filter((approver) => approver.type === ApproverType.Group)
@ -265,8 +262,7 @@ export const accessApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
},
tx
);

View File

@ -26,7 +26,6 @@ export type TCreateAccessApprovalPolicy = {
projectSlug: string;
name: string;
enforcementLevel: EnforcementLevel;
allowedSelfApprovals: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateAccessApprovalPolicy = {
@ -36,7 +35,6 @@ export type TUpdateAccessApprovalPolicy = {
secretPath?: string;
name?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteAccessApprovalPolicy = {

View File

@ -61,7 +61,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
)
@ -120,7 +119,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approvals: doc.policyApprovals,
secretPath: doc.policySecretPath,
enforcementLevel: doc.policyEnforcementLevel,
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
envId: doc.policyEnvId,
deletedAt: doc.policyDeletedAt
},
@ -256,7 +254,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
);
@ -278,7 +275,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals,
deletedAt: el.policyDeletedAt
},
requestedByUser: {

View File

@ -320,11 +320,6 @@ export const accessApprovalRequestServiceFactory = ({
message: "The policy associated with this access request has been deleted."
});
}
if (!policy.allowedSelfApprovals && actorId === accessApprovalRequest.requestedByUserId) {
throw new BadRequestError({
message: "Failed to review access approval request. Users are not authorized to review their own request."
});
}
const { membership, hasRole } = await permissionService.getProjectPermission({
actor,

View File

@ -45,6 +45,7 @@ export const auditLogStreamServiceFactory = ({
}: TCreateAuditLogStreamDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
const appCfg = getConfig();
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.auditLogStreams) {
throw new BadRequestError({
@ -61,8 +62,9 @@ export const auditLogStreamServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
const appCfg = getConfig();
if (appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
if (appCfg.isCloud) {
blockLocalAndPrivateIpAddresses(url);
}
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
if (totalStreams.length >= plan.auditLogStreamLimit) {
@ -133,8 +135,9 @@ export const auditLogStreamServiceFactory = ({
const { orgId } = logStream;
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
const appCfg = getConfig();
if (url && appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
if (url && appCfg.isCloud) blockLocalAndPrivateIpAddresses(url);
// testing connection first
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };

View File

@ -9,14 +9,13 @@ import { logger } from "@app/lib/logger";
import { QueueName } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { EventType, filterableSecretEvents } from "./audit-log-types";
import { EventType } from "./audit-log-types";
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
type TFindQuery = {
actor?: string;
projectId?: string;
environment?: string;
orgId?: string;
eventType?: string;
startDate?: string;
@ -33,7 +32,6 @@ export const auditLogDALFactory = (db: TDbClient) => {
{
orgId,
projectId,
environment,
userAgentType,
startDate,
endDate,
@ -42,14 +40,12 @@ export const auditLogDALFactory = (db: TDbClient) => {
actorId,
actorType,
secretPath,
secretKey,
eventType,
eventMetadata
}: Omit<TFindQuery, "actor" | "eventType"> & {
actorId?: string;
actorType?: ActorType;
secretPath?: string;
secretKey?: string;
eventType?: EventType[];
eventMetadata?: Record<string, string>;
},
@ -94,29 +90,8 @@ export const auditLogDALFactory = (db: TDbClient) => {
});
}
const eventIsSecretType = !eventType?.length || eventType.some((event) => filterableSecretEvents.includes(event));
// We only want to filter for environment/secretPath/secretKey if the user is either checking for all event types
// ? Note(daniel): use the `eventMetadata" @> ?::jsonb` approach to properly use our GIN index
if (projectId && eventIsSecretType) {
if (environment || secretPath) {
// Handle both environment and secret path together to only use the GIN index once
void sqlQuery.whereRaw(`"eventMetadata" @> ?::jsonb`, [
JSON.stringify({
...(environment && { environment }),
...(secretPath && { secretPath })
})
]);
}
// Handle secret key separately to include the OR condition
if (secretKey) {
void sqlQuery.whereRaw(
`("eventMetadata" @> ?::jsonb
OR "eventMetadata"->'secrets' @> ?::jsonb)`,
[JSON.stringify({ secretKey }), JSON.stringify([{ secretKey }])]
);
}
if (projectId && secretPath) {
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object('secretPath', ?::text)`, [secretPath]);
}
// Filter by actor type

View File

@ -1,10 +1,8 @@
import { ForbiddenError } from "@casl/ability";
import { requestContext } from "@fastify/request-context";
import { ActionProjectType } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
@ -63,8 +61,6 @@ export const auditLogServiceFactory = ({
actorType: filter.actorType,
eventMetadata: filter.eventMetadata,
secretPath: filter.secretPath,
secretKey: filter.secretKey,
environment: filter.environment,
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
});
@ -85,12 +81,8 @@ export const auditLogServiceFactory = ({
if (!data.projectId && !data.orgId)
throw new BadRequestError({ message: "Must specify either project id or org id" });
}
const el = { ...data };
if (el.actor.type === ActorType.USER || el.actor.type === ActorType.IDENTITY) {
const permissionMetadata = requestContext.get("identityPermissionMetadata");
el.actor.metadata.permission = permissionMetadata;
}
return auditLogQueue.pushToLog(el);
return auditLogQueue.pushToLog(data);
};
return {

View File

@ -2,11 +2,11 @@ import {
TCreateProjectTemplateDTO,
TUpdateProjectTemplateDTO
} from "@app/ee/services/project-template/project-template-types";
import { SecretRotation, SecretRotationStatus } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import {
TCreateSecretRotationV2DTO,
TDeleteSecretRotationV2DTO,
TSecretRotationV2Raw,
TSecretRotationV2,
TUpdateSecretRotationV2DTO
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
@ -40,11 +40,9 @@ export type TListProjectAuditLogDTO = {
endDate?: string;
startDate?: string;
projectId?: string;
environment?: string;
auditLogActorId?: string;
actorType?: ActorType;
secretPath?: string;
secretKey?: string;
eventMetadata?: Record<string, string>;
};
} & Omit<TProjectPermission, "projectId">;
@ -63,8 +61,6 @@ export type TCreateAuditLogDTO = {
projectId?: string;
} & BaseAuthData;
export type AuditLogInfo = Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
interface BaseAuthData {
ipAddress?: string;
userAgent?: string;
@ -298,30 +294,17 @@ export enum EventType {
GET_SECRET_ROTATIONS = "get-secret-rotations",
GET_SECRET_ROTATION = "get-secret-rotation",
GET_SECRET_ROTATION_GENERATED_CREDENTIALS = "get-secret-rotation-generated-credentials",
GET_SECRET_ROTATION_CREDENTIALS = "get-secret-rotation-credentials",
CREATE_SECRET_ROTATION = "create-secret-rotation",
UPDATE_SECRET_ROTATION = "update-secret-rotation",
DELETE_SECRET_ROTATION = "delete-secret-rotation",
SECRET_ROTATION_ROTATE_SECRETS = "secret-rotation-rotate-secrets",
PROJECT_ACCESS_REQUEST = "project-access-request"
ROTATE_SECRET_ROTATION = "rotate-secret-rotation"
}
export const filterableSecretEvents: EventType[] = [
EventType.GET_SECRET,
EventType.DELETE_SECRETS,
EventType.CREATE_SECRETS,
EventType.UPDATE_SECRETS,
EventType.CREATE_SECRET,
EventType.UPDATE_SECRET,
EventType.DELETE_SECRET
];
interface UserActorMetadata {
userId: string;
email?: string | null;
username: string;
permission?: Record<string, unknown>;
}
interface ServiceActorMetadata {
@ -332,7 +315,6 @@ interface ServiceActorMetadata {
interface IdentityActorMetadata {
identityId: string;
name: string;
permission?: Record<string, unknown>;
}
interface ScimClientActorMetadata {}
@ -999,7 +981,6 @@ interface LoginIdentityOidcAuthEvent {
identityId: string;
identityOidcAuthId: string;
identityAccessTokenId: string;
oidcClaimsReceived: Record<string, unknown>;
};
}
@ -1012,7 +993,6 @@ interface AddIdentityOidcAuthEvent {
boundIssuer: string;
boundAudiences: string;
boundClaims: Record<string, string>;
claimMetadataMapping: Record<string, string>;
boundSubject: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
@ -1037,7 +1017,6 @@ interface UpdateIdentityOidcAuthEvent {
boundIssuer?: string;
boundAudiences?: string;
boundClaims?: Record<string, string>;
claimMetadataMapping?: Record<string, string>;
boundSubject?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
@ -2296,15 +2275,6 @@ interface KmipOperationRegisterEvent {
};
}
interface ProjectAccessRequestEvent {
type: EventType.PROJECT_ACCESS_REQUEST;
metadata: {
projectId: string;
requesterId: string;
requesterEmail: string;
};
}
interface SetupKmipEvent {
type: EventType.SETUP_KMIP;
metadata: {
@ -2336,8 +2306,6 @@ interface GetSecretRotationsEvent {
type?: SecretRotation;
count: number;
rotationIds: string[];
secretPath?: string;
environment?: string;
};
}
@ -2346,18 +2314,14 @@ interface GetSecretRotationEvent {
metadata: {
type: SecretRotation;
rotationId: string;
secretPath: string;
environment: string;
};
}
interface GetSecretRotationCredentialsEvent {
type: EventType.GET_SECRET_ROTATION_GENERATED_CREDENTIALS;
type: EventType.GET_SECRET_ROTATION_CREDENTIALS;
metadata: {
type: SecretRotation;
rotationId: string;
secretPath: string;
environment: string;
};
}
@ -2377,13 +2341,12 @@ interface DeleteSecretRotationEvent {
}
interface RotateSecretRotationEvent {
type: EventType.SECRET_ROTATION_ROTATE_SECRETS;
metadata: Pick<TSecretRotationV2Raw, "parameters" | "secretsMapping" | "type" | "connectionId" | "folderId"> & {
status: SecretRotationStatus;
type: EventType.ROTATE_SECRET_ROTATION;
metadata: Pick<TSecretRotationV2, "parameters" | "type" | "rotationStatus" | "connectionId" | "folderId"> & {
rotationId: string;
jobId?: string | undefined;
occurredAt: Date;
message?: string | null | undefined;
rotationMessage: string | null;
jobId?: string;
rotatedAt: Date;
};
}
@ -2596,7 +2559,6 @@ export type Event =
| KmipOperationRevokeEvent
| KmipOperationLocateEvent
| KmipOperationRegisterEvent
| ProjectAccessRequestEvent
| CreateSecretRequestEvent
| SecretApprovalRequestReview
| GetSecretRotationsEvent

View File

@ -1,6 +1,5 @@
import * as x509 from "@peculiar/x509";
import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate";
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { isCertChainValid } from "@app/services/certificate/certificate-fns";
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
@ -68,7 +67,9 @@ export const certificateEstServiceFactory = ({
const certTemplate = await certificateTemplateDAL.findById(certificateTemplateId);
const leafCertificate = extractX509CertFromChain(decodeURIComponent(sslClientCert))?.[0];
const leafCertificate = decodeURIComponent(sslClientCert).match(
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
)?.[0];
if (!leafCertificate) {
throw new UnauthorizedError({ message: "Missing client certificate" });
@ -87,7 +88,10 @@ export const certificateEstServiceFactory = ({
const verifiedChains = await Promise.all(
caCertChains.map((chain) => {
const caCert = new x509.X509Certificate(chain.certificate);
const caChain = extractX509CertFromChain(chain.certificateChain)?.map((c) => new x509.X509Certificate(c)) || [];
const caChain =
chain.certificateChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((c) => new x509.X509Certificate(c)) || [];
return isCertChainValid([cert, caCert, ...caChain]);
})
@ -168,15 +172,19 @@ export const certificateEstServiceFactory = ({
}
if (!estConfig.disableBootstrapCertValidation) {
const caCerts = extractX509CertFromChain(estConfig.caChain)?.map((cert) => {
return new x509.X509Certificate(cert);
});
const caCerts = estConfig.caChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((cert) => {
return new x509.X509Certificate(cert);
});
if (!caCerts) {
throw new BadRequestError({ message: "Failed to parse certificate chain" });
}
const leafCertificate = extractX509CertFromChain(decodeURIComponent(sslClientCert))?.[0];
const leafCertificate = decodeURIComponent(sslClientCert).match(
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
)?.[0];
if (!leafCertificate) {
throw new BadRequestError({ message: "Missing client certificate" });
@ -242,7 +250,13 @@ export const certificateEstServiceFactory = ({
kmsService
});
const certificates = extractX509CertFromChain(caCertChain).map((cert) => new x509.X509Certificate(cert));
const certificates = caCertChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((cert) => new x509.X509Certificate(cert));
if (!certificates) {
throw new BadRequestError({ message: "Failed to parse certificate chain" });
}
const caCertificate = new x509.X509Certificate(caCert);
return convertRawCertsToPkcs7([caCertificate.rawData, ...certificates.map((cert) => cert.rawData)]);

View File

@ -183,7 +183,7 @@ export const dynamicSecretLeaseServiceFactory = ({
});
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease || dynamicSecretLease.dynamicSecret.folderId !== folder.id) {
if (!dynamicSecretLease) {
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
}
@ -256,7 +256,7 @@ export const dynamicSecretLeaseServiceFactory = ({
});
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease || dynamicSecretLease.dynamicSecret.folderId !== folder.id)
if (!dynamicSecretLease)
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;

View File

@ -1,53 +1,31 @@
import dns from "node:dns/promises";
import net from "node:net";
import crypto from "node:crypto";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { isPrivateIp } from "@app/lib/ip/ipRange";
import { getDbConnectionHost } from "@app/lib/knex";
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
export const verifyHostInputValidity = (host: string, isGateway = false) => {
const appCfg = getConfig();
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
// no need for validation when it's dev
if (appCfg.NODE_ENV === "development") return;
if (appCfg.isDevelopmentMode) return [host];
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
const reservedHosts = [appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI)].concat(
(appCfg.DB_READ_REPLICAS || []).map((el) => getDbConnectionHost(el.DB_CONNECTION_URI)),
getDbConnectionHost(appCfg.REDIS_URL),
getDbConnectionHost(appCfg.AUDIT_LOGS_DB_CONNECTION_URI)
);
if (
appCfg.isCloud &&
!isGateway &&
// localhost
// internal ips
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
// get host db ip
const exclusiveIps: string[] = [];
for await (const el of reservedHosts) {
if (el) {
if (net.isIPv4(el)) {
exclusiveIps.push(el);
} else {
const resolvedIps = await dns.resolve4(el);
exclusiveIps.push(...resolvedIps);
}
}
if (
host === "localhost" ||
host === "127.0.0.1" ||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
) {
throw new BadRequestError({ message: "Invalid db host" });
}
const normalizedHost = host.split(":")[0];
const inputHostIps: string[] = [];
if (net.isIPv4(host)) {
inputHostIps.push(host);
} else {
if (normalizedHost === "localhost" || normalizedHost === "host.docker.internal") {
throw new BadRequestError({ message: "Invalid db host" });
}
const resolvedIps = await dns.resolve4(host);
inputHostIps.push(...resolvedIps);
}
if (!isGateway && !appCfg.DYNAMIC_SECRET_ALLOW_INTERNAL_IP) {
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
if (isInternalIp) throw new BadRequestError({ message: "Invalid db host" });
}
const isAppUsedIps = inputHostIps.some((el) => exclusiveIps.includes(el));
if (isAppUsedIps) throw new BadRequestError({ message: "Invalid db host" });
return inputHostIps;
};

View File

@ -13,7 +13,6 @@ import { customAlphabet } from "nanoid";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
@ -145,14 +144,6 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
// We can't return the parsed statements here because we need to use the handlebars template to generate the username and password, before we can use the parsed statements.
CreateElastiCacheUserSchema.parse(JSON.parse(providerInputs.creationStatement));
DeleteElasticCacheUserSchema.parse(JSON.parse(providerInputs.revocationStatement));
validateHandlebarTemplate("AWS ElastiCache creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.revocationStatement) {
validateHandlebarTemplate("AWS ElastiCache revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
}
return providerInputs;
};

View File

@ -3,10 +3,9 @@ import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
@ -21,28 +20,14 @@ const generateUsername = () => {
export const CassandraProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
const hostIps = await Promise.all(
providerInputs.host
.split(",")
.filter(Boolean)
.map((el) => verifyHostInputValidity(el).then((ip) => ip[0]))
);
validateHandlebarTemplate("Cassandra creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration", "keyspace"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("Cassandra renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration", "keyspace"].includes(val)
});
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
throw new BadRequestError({ message: "Invalid db host" });
}
validateHandlebarTemplate("Cassandra revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return { ...providerInputs, hostIps };
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema> & { hostIps: string[] }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const client = new cassandra.Client({
sslOptions,
@ -55,7 +40,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
},
keyspace: providerInputs.keyspace,
localDataCenter: providerInputs?.localDataCenter,
contactPoints: providerInputs.hostIps
contactPoints: providerInputs.host.split(",").filter(Boolean)
});
return client;
};

View File

@ -19,14 +19,15 @@ const generateUsername = () => {
export const ElasticSearchProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const connection = new ElasticSearchClient({
node: {
url: new URL(`${providerInputs.hostIp}:${providerInputs.port}`),
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
...(providerInputs.ca && {
ssl: {
rejectUnauthorized: false,

View File

@ -19,15 +19,15 @@ const generateUsername = () => {
export const MongoDBProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const isSrv = !providerInputs.port;
const uri = isSrv
? `mongodb+srv://${providerInputs.hostIp}`
: `mongodb://${providerInputs.hostIp}:${providerInputs.port}`;
? `mongodb+srv://${providerInputs.host}`
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
const client = new MongoClient(uri, {
auth: {

View File

@ -3,6 +3,7 @@ import https from "https";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { removeTrailingSlash } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
@ -78,13 +79,14 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
export const RabbitMqProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const axiosInstance = axios.create({
baseURL: `${providerInputs.hostIp}:${providerInputs.port}/api`,
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
auth: {
username: providerInputs.username,
password: providerInputs.password

View File

@ -5,7 +5,6 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
@ -52,28 +51,16 @@ const executeTransactions = async (connection: Redis, commands: string[]): Promi
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("Redis creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("Redis renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration"].includes(val)
});
}
validateHandlebarTemplate("Redis revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
let connection: Redis | null = null;
try {
connection = new Redis({
username: providerInputs.username,
host: providerInputs.hostIp,
host: providerInputs.host,
port: providerInputs.port,
password: providerInputs.password,
...(providerInputs.ca && {

View File

@ -5,7 +5,6 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
@ -28,25 +27,14 @@ export const SapAseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("SAP ASE creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password"].includes(val)
});
if (providerInputs.revocationStatement) {
validateHandlebarTemplate("SAP ASE revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
}
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (
providerInputs: z.infer<typeof DynamicSecretSapAseSchema> & { hostIp: string },
useMaster?: boolean
) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
const connectionString =
`DRIVER={FreeTDS};` +
`SERVER=${providerInputs.hostIp};` +
`SERVER=${providerInputs.host};` +
`PORT=${providerInputs.port};` +
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
`UID=${providerInputs.username};` +
@ -95,7 +83,7 @@ export const SapAseProvider = (): TDynamicProviderFns => {
password
});
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
for await (const query of queries) {
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
@ -116,7 +104,7 @@ export const SapAseProvider = (): TDynamicProviderFns => {
username
});
const queries = revokeStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);

View File

@ -11,7 +11,6 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
@ -29,24 +28,13 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("SAP Hana creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("SAP Hana renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration"].includes(val)
});
}
validateHandlebarTemplate("SAP Hana revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
const client = hdb.createClient({
host: providerInputs.hostIp,
host: providerInputs.host,
port: providerInputs.port,
user: providerInputs.username,
password: providerInputs.password,

View File

@ -5,7 +5,6 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
@ -32,18 +31,6 @@ const getDaysToExpiry = (expiryDate: Date) => {
export const SnowflakeProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSnowflakeSchema.parseAsync(inputs);
validateHandlebarTemplate("Snowflake creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("Snowflake renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration"].includes(val)
});
}
validateHandlebarTemplate("Snowflake revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return providerInputs;
};

View File

@ -5,7 +5,6 @@ import { z } from "zod";
import { withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
@ -118,21 +117,8 @@ type TSqlDatabaseProviderDTO = {
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
validateHandlebarTemplate("SQL creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration", "database"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("SQL renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration", "database"].includes(val)
});
}
validateHandlebarTemplate("SQL revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username", "database"].includes(val)
});
return { ...providerInputs, hostIp };
verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
@ -158,8 +144,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
}
: undefined
},
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT,
pool: { min: 0, max: 7 }
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT
});
return db;
};
@ -193,7 +178,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
let isConnected = false;
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";

View File

@ -3,7 +3,8 @@ import slugify from "@sindresorhus/slugify";
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
@ -13,8 +14,7 @@ import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionGroupActions, OrgPermissionSubjects } from "../permission/org-permission";
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { TGroupDALFactory } from "./group-dal";
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "./group-fns";
@ -67,14 +67,14 @@ export const groupServiceFactory = ({
const createGroup = async ({ name, slug, role, actor, actorId, actorAuthMethod, actorOrgId }: TCreateGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission, membership } = await permissionService.getOrgPermission(
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Create, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Groups);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.groups)
@ -87,26 +87,14 @@ export const groupServiceFactory = ({
actorOrgId
);
const isCustomRole = Boolean(customRole);
if (role !== OrgMembershipRole.NoAccess) {
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.GrantPrivileges,
OrgPermissionSubjects.Groups,
permission,
rolePermission
);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to create group",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.GrantPrivileges,
OrgPermissionSubjects.Groups
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to create a more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const group = await groupDAL.transaction(async (tx) => {
const existingGroup = await groupDAL.findOne({ orgId: actorOrgId, name }, tx);
@ -145,15 +133,14 @@ export const groupServiceFactory = ({
}: TUpdateGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission, membership } = await permissionService.getOrgPermission(
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.groups)
@ -174,21 +161,11 @@ export const groupServiceFactory = ({
);
const isCustomRole = Boolean(customOrgRole);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.GrantPrivileges,
OrgPermissionSubjects.Groups,
permission,
rolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update group",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.GrantPrivileges,
OrgPermissionSubjects.Groups
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update a more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
if (isCustomRole) customRole = customOrgRole;
@ -238,7 +215,7 @@ export const groupServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Delete, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Groups);
const plan = await licenseService.getPlan(actorOrgId);
@ -265,7 +242,7 @@ export const groupServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
const group = await groupDAL.findById(id);
if (!group) {
@ -298,7 +275,7 @@ export const groupServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
const group = await groupDAL.findOne({
orgId: actorOrgId,
@ -326,14 +303,14 @@ export const groupServiceFactory = ({
const addUserToGroup = async ({ id, username, actor, actorId, actorAuthMethod, actorOrgId }: TAddUserToGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission, membership } = await permissionService.getOrgPermission(
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
// check if group with slug exists
const group = await groupDAL.findOne({
@ -361,22 +338,11 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.AddMembers,
OrgPermissionSubjects.Groups,
permission,
groupRolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to add user to more privileged group",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.AddMembers,
OrgPermissionSubjects.Groups
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to add user to more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -408,14 +374,14 @@ export const groupServiceFactory = ({
}: TRemoveUserFromGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission, membership } = await permissionService.getOrgPermission(
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
// check if group with slug exists
const group = await groupDAL.findOne({
@ -443,21 +409,11 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.RemoveMembers,
OrgPermissionSubjects.Groups,
permission,
groupRolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to delete user from more privileged group",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.RemoveMembers,
OrgPermissionSubjects.Groups
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to delete user from more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});

View File

@ -2,17 +2,16 @@ import { ForbiddenError, subject } from "@casl/ability";
import { packRules } from "@casl/ability/extra";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionIdentityActions, ProjectPermissionSub } from "../permission/project-permission";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import { TIdentityProjectAdditionalPrivilegeV2DALFactory } from "./identity-project-additional-privilege-v2-dal";
import {
IdentityProjectAdditionalPrivilegeTemporaryMode,
@ -65,10 +64,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
const { permission: targetIdentityPermission, membership } = await permissionService.getProjectPermission({
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission({
actor: ActorType.IDENTITY,
actorId: identityId,
projectId: identityProjectMembership.projectId,
@ -80,26 +79,13 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
targetIdentityPermission
);
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@ -164,10 +150,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
const { permission: targetIdentityPermission, membership } = await permissionService.getProjectPermission({
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission({
actor: ActorType.IDENTITY,
actorId: identityProjectMembership.identityId,
projectId: identityProjectMembership.projectId,
@ -179,28 +165,14 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
targetIdentityPermission
);
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
if (data?.slug) {
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug: data.slug,
@ -255,7 +227,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}`
});
const { permission, membership } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: identityProjectMembership.projectId,
@ -264,7 +236,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
const { permission: identityRolePermission } = await permissionService.getProjectPermission({
@ -275,21 +247,11 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
identityRolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -325,7 +287,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
@ -360,7 +322,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
@ -396,7 +358,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);

View File

@ -2,21 +2,16 @@ import { ForbiddenError, MongoAbility, RawRuleOf, subject } from "@casl/ability"
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import {
ProjectPermissionIdentityActions,
ProjectPermissionSet,
ProjectPermissionSub
} from "../permission/project-permission";
import { ProjectPermissionActions, ProjectPermissionSet, ProjectPermissionSub } from "../permission/project-permission";
import { TIdentityProjectAdditionalPrivilegeDALFactory } from "./identity-project-additional-privilege-dal";
import {
IdentityProjectAdditionalPrivilegeTemporaryMode,
@ -68,7 +63,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
if (!identityProjectMembership)
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
const { permission, membership } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: identityProjectMembership.projectId,
@ -76,9 +71,8 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
@ -94,21 +88,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
targetIdentityPermission
);
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -118,10 +102,6 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
});
if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const packedPermission = JSON.stringify(packRules(customPermission));
if (!dto.isTemporary) {
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({
@ -170,7 +150,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
if (!identityProjectMembership)
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
const { permission, membership } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: identityProjectMembership.projectId,
@ -180,7 +160,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
@ -196,21 +176,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
targetIdentityPermission
);
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -233,9 +203,6 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
}
const isTemporary = typeof data?.isTemporary !== "undefined" ? data.isTemporary : identityPrivilege.isTemporary;
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const packedPermission = data.permissions ? JSON.stringify(packRules(data.permissions)) : undefined;
if (isTemporary) {
@ -288,7 +255,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
if (!identityProjectMembership)
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
const { permission, membership } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: identityProjectMembership.projectId,
@ -297,7 +264,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
@ -309,21 +276,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
identityRolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to edit more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to edit more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -370,7 +327,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId })
);
@ -414,7 +371,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId })
);

View File

@ -4,9 +4,8 @@ import crypto, { KeyObject } from "crypto";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
import { isValidIp } from "@app/lib/ip";
import { isValidHostname, isValidIp } from "@app/lib/ip";
import { ms } from "@app/lib/ms";
import { isFQDN } from "@app/lib/validator/validate-url";
import { constructPemChainFromCerts } from "@app/services/certificate/certificate-fns";
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
import {
@ -666,7 +665,7 @@ export const kmipServiceFactory = ({
.split(",")
.map((name) => name.trim())
.map((altName) => {
if (isFQDN(altName, { allow_wildcard: true })) {
if (isValidHostname(altName)) {
return {
type: "dns",
value: altName

View File

@ -97,14 +97,12 @@ export const searchGroups = async (
res.on("searchEntry", (entry) => {
const dn = entry.dn.toString();
const cnStartIndex = dn.indexOf("cn=");
const regex = /cn=([^,]+)/;
const match = dn.match(regex);
// parse the cn from the dn
const cn = (match && match[1]) as string;
if (cnStartIndex !== -1) {
const valueStartIndex = cnStartIndex + 3;
const commaIndex = dn.indexOf(",", valueStartIndex);
const cn = dn.substring(valueStartIndex, commaIndex === -1 ? undefined : commaIndex);
groups.push({ dn, cn });
}
groups.push({ dn, cn });
});
res.on("error", (error) => {
ldapClient.unbind();

View File

@ -1,24 +0,0 @@
export const BillingPlanRows = {
MemberLimit: { name: "Organization member limit", field: "memberLimit" },
IdentityLimit: { name: "Organization identity limit", field: "identityLimit" },
WorkspaceLimit: { name: "Project limit", field: "workspaceLimit" },
EnvironmentLimit: { name: "Environment limit", field: "environmentLimit" },
SecretVersioning: { name: "Secret versioning", field: "secretVersioning" },
PitRecovery: { name: "Point in time recovery", field: "pitRecovery" },
Rbac: { name: "RBAC", field: "rbac" },
CustomRateLimits: { name: "Custom rate limits", field: "customRateLimits" },
CustomAlerts: { name: "Custom alerts", field: "customAlerts" },
AuditLogs: { name: "Audit logs", field: "auditLogs" },
SamlSSO: { name: "SAML SSO", field: "samlSSO" },
Hsm: { name: "Hardware Security Module (HSM)", field: "hsm" },
OidcSSO: { name: "OIDC SSO", field: "oidcSSO" },
SecretApproval: { name: "Secret approvals", field: "secretApproval" },
SecretRotation: { name: "Secret rotation", field: "secretRotation" },
InstanceUserManagement: { name: "Instance User Management", field: "instanceUserManagement" },
ExternalKms: { name: "External KMS", field: "externalKms" }
} as const;
export const BillingPlanTableHead = {
Allowed: { name: "Allowed" },
Used: { name: "Used" }
} as const;

View File

@ -39,7 +39,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
trial_end: null,
has_used_trial: true,
secretApproval: false,
secretRotation: false,
secretRotation: true,
caCrl: false,
instanceUserManagement: false,
externalKms: false,

View File

@ -5,7 +5,6 @@
// TODO(akhilmhdh): With tony find out the api structure and fill it here
import { ForbiddenError } from "@casl/ability";
import { CronJob } from "cron";
import { Knex } from "knex";
import { TKeyStoreFactory } from "@app/keystore/keystore";
@ -13,13 +12,10 @@ import { getConfig } from "@app/lib/config/env";
import { verifyOfflineLicense } from "@app/lib/crypto";
import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums";
import { TLicenseDALFactory } from "./license-dal";
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
import {
@ -32,7 +28,6 @@ import {
TFeatureSet,
TGetOrgBillInfoDTO,
TGetOrgTaxIdDTO,
TOfflineLicense,
TOfflineLicenseContents,
TOrgInvoiceDTO,
TOrgLicensesDTO,
@ -44,12 +39,10 @@ import {
} from "./license-types";
type TLicenseServiceFactoryDep = {
orgDAL: Pick<TOrgDALFactory, "findOrgById" | "countAllOrgMembers">;
orgDAL: Pick<TOrgDALFactory, "findOrgById">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseDAL: TLicenseDALFactory;
keyStore: Pick<TKeyStoreFactory, "setItemWithExpiry" | "getItem" | "deleteItem">;
identityOrgMembershipDAL: TIdentityOrgDALFactory;
projectDAL: TProjectDALFactory;
};
export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
@ -57,21 +50,18 @@ export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
const LICENSE_SERVER_CLOUD_LOGIN = "/api/auth/v1/license-server-login";
const LICENSE_SERVER_ON_PREM_LOGIN = "/api/auth/v1/license-login";
const LICENSE_SERVER_CLOUD_PLAN_TTL = 5 * 60; // 5 mins
const LICENSE_SERVER_CLOUD_PLAN_TTL = 30; // 30 second
const FEATURE_CACHE_KEY = (orgId: string) => `infisical-cloud-plan-${orgId}`;
export const licenseServiceFactory = ({
orgDAL,
permissionService,
licenseDAL,
keyStore,
identityOrgMembershipDAL,
projectDAL
keyStore
}: TLicenseServiceFactoryDep) => {
let isValidLicense = false;
let instanceType = InstanceType.OnPrem;
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
let selfHostedLicense: TOfflineLicense | null = null;
const appCfg = getConfig();
const licenseServerCloudApi = setupLicenseRequestWithStore(
@ -86,20 +76,6 @@ export const licenseServiceFactory = ({
appCfg.LICENSE_KEY || ""
);
const syncLicenseKeyOnPremFeatures = async (shouldThrow: boolean = false) => {
logger.info("Start syncing license key features");
try {
const {
data: { currentPlan }
} = await licenseServerOnPremApi.request.get<{ currentPlan: TFeatureSet }>("/api/license/v1/plan");
onPremFeatures = currentPlan;
logger.info("Successfully synchronized license key features");
} catch (error) {
logger.error(error, "Failed to synchronize license key features");
if (shouldThrow) throw error;
}
};
const init = async () => {
try {
if (appCfg.LICENSE_SERVER_KEY) {
@ -113,7 +89,10 @@ export const licenseServiceFactory = ({
if (appCfg.LICENSE_KEY) {
const token = await licenseServerOnPremApi.refreshLicense();
if (token) {
await syncLicenseKeyOnPremFeatures(true);
const {
data: { currentPlan }
} = await licenseServerOnPremApi.request.get<{ currentPlan: TFeatureSet }>("/api/license/v1/plan");
onPremFeatures = currentPlan;
instanceType = InstanceType.EnterpriseOnPrem;
logger.info(`Instance type: ${InstanceType.EnterpriseOnPrem}`);
isValidLicense = true;
@ -146,7 +125,6 @@ export const licenseServiceFactory = ({
instanceType = InstanceType.EnterpriseOnPremOffline;
logger.info(`Instance type: ${InstanceType.EnterpriseOnPremOffline}`);
isValidLicense = true;
selfHostedLicense = contents.license;
return;
}
}
@ -159,24 +137,12 @@ export const licenseServiceFactory = ({
}
};
const initializeBackgroundSync = async () => {
if (appCfg.LICENSE_KEY) {
logger.info("Setting up background sync process for refresh onPremFeatures");
const job = new CronJob("*/10 * * * *", syncLicenseKeyOnPremFeatures);
job.start();
return job;
}
};
const getPlan = async (orgId: string, projectId?: string) => {
logger.info(`getPlan: attempting to fetch plan for [orgId=${orgId}] [projectId=${projectId}]`);
try {
if (instanceType === InstanceType.Cloud) {
const cachedPlan = await keyStore.getItem(FEATURE_CACHE_KEY(orgId));
if (cachedPlan) {
logger.info(`getPlan: plan fetched from cache [orgId=${orgId}] [projectId=${projectId}]`);
return JSON.parse(cachedPlan) as TFeatureSet;
}
if (cachedPlan) return JSON.parse(cachedPlan) as TFeatureSet;
const org = await orgDAL.findOrgById(orgId);
if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` });
@ -204,8 +170,6 @@ export const licenseServiceFactory = ({
JSON.stringify(onPremFeatures)
);
return onPremFeatures;
} finally {
logger.info(`getPlan: Process done for [orgId=${orgId}] [projectId=${projectId}]`);
}
return onPremFeatures;
};
@ -379,21 +343,10 @@ export const licenseServiceFactory = ({
message: `Organization with ID '${orgId}' not found`
});
}
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
);
return data;
}
return {
currentPeriodStart: selfHostedLicense?.issuedAt ? Date.parse(selfHostedLicense?.issuedAt) / 1000 : undefined,
currentPeriodEnd: selfHostedLicense?.expiresAt ? Date.parse(selfHostedLicense?.expiresAt) / 1000 : undefined,
interval: "month",
intervalCount: 1,
amount: 0,
quantity: 1
};
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
);
return data;
};
// returns org current plan feature table
@ -407,41 +360,10 @@ export const licenseServiceFactory = ({
message: `Organization with ID '${orgId}' not found`
});
}
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
);
return data;
}
const mappedRows = await Promise.all(
Object.values(BillingPlanRows).map(async ({ name, field }: { name: string; field: string }) => {
const allowed = onPremFeatures[field as keyof TFeatureSet];
let used = "-";
if (field === BillingPlanRows.MemberLimit.field) {
const orgMemberships = await orgDAL.countAllOrgMembers(orgId);
used = orgMemberships.toString();
} else if (field === BillingPlanRows.WorkspaceLimit.field) {
const projects = await projectDAL.find({ orgId });
used = projects.length.toString();
} else if (field === BillingPlanRows.IdentityLimit.field) {
const identities = await identityOrgMembershipDAL.countAllOrgIdentities({ orgId });
used = identities.toString();
}
return {
name,
allowed,
used
};
})
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
);
return {
head: Object.values(BillingPlanTableHead),
rows: mappedRows
};
return data;
};
const getOrgBillingDetails = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgBillInfoDTO) => {
@ -683,7 +605,6 @@ export const licenseServiceFactory = ({
getOrgTaxInvoices,
getOrgTaxIds,
addOrgTaxId,
delOrgTaxId,
initializeBackgroundSync
delOrgTaxId
};
};

View File

@ -56,7 +56,7 @@ export type TFeatureSet = {
trial_end: null;
has_used_trial: true;
secretApproval: false;
secretRotation: false;
secretRotation: true;
caCrl: false;
instanceUserManagement: false;
externalKms: false;

View File

@ -44,28 +44,6 @@ export enum OrgPermissionGatewayActions {
DeleteGateways = "delete-gateways"
}
export enum OrgPermissionIdentityActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges",
RevokeAuth = "revoke-auth",
CreateToken = "create-token",
GetToken = "get-token",
DeleteToken = "delete-token"
}
export enum OrgPermissionGroupActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges",
AddMembers = "add-members",
RemoveMembers = "remove-members"
}
export enum OrgPermissionSubjects {
Workspace = "workspace",
Role = "role",
@ -102,10 +80,10 @@ export type OrgPermissionSet =
| [OrgPermissionActions, OrgPermissionSubjects.Sso]
| [OrgPermissionActions, OrgPermissionSubjects.Scim]
| [OrgPermissionActions, OrgPermissionSubjects.Ldap]
| [OrgPermissionGroupActions, OrgPermissionSubjects.Groups]
| [OrgPermissionActions, OrgPermissionSubjects.Groups]
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
| [OrgPermissionActions, OrgPermissionSubjects.Billing]
| [OrgPermissionIdentityActions, OrgPermissionSubjects.Identity]
| [OrgPermissionActions, OrgPermissionSubjects.Identity]
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
@ -278,28 +256,20 @@ const buildAdminPermission = () => {
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Ldap);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Ldap);
can(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.Create, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.Delete, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.GrantPrivileges, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.AddMembers, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.RemoveMembers, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Billing);
can(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.GrantPrivileges, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.RevokeAuth, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.CreateToken, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.GetToken, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.DeleteToken, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Kms);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Kms);
@ -346,7 +316,7 @@ const buildMemberPermission = () => {
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
can(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
@ -357,10 +327,10 @@ const buildMemberPermission = () => {
can(OrgPermissionActions.Edit, OrgPermissionSubjects.SecretScanning);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.SecretScanning);
can(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);

View File

@ -49,7 +49,6 @@ export const permissionDALFactory = (db: TDbClient) => {
.join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.OrgMembership}.orgId`)
.select(
selectAllTableCols(TableName.OrgMembership),
db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization),
db.ref("slug").withSchema(TableName.OrgRoles).withSchema(TableName.OrgRoles).as("customRoleSlug"),
db.ref("permissions").withSchema(TableName.OrgRoles),
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
@ -71,8 +70,7 @@ export const permissionDALFactory = (db: TDbClient) => {
OrgMembershipsSchema.extend({
permissions: z.unknown(),
orgAuthEnforced: z.boolean().optional().nullable(),
customRoleSlug: z.string().optional().nullable(),
shouldUseNewPrivilegeSystem: z.boolean()
customRoleSlug: z.string().optional().nullable()
}).parse(el),
childrenMapper: [
{
@ -120,9 +118,7 @@ export const permissionDALFactory = (db: TDbClient) => {
.select(selectAllTableCols(TableName.IdentityOrgMembership))
.select(db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"))
.select("permissions")
.select(db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization))
.first();
return membership;
} catch (error) {
throw new DatabaseError({ error, name: "GetOrgIdentityPermission" });
@ -672,8 +668,7 @@ export const permissionDALFactory = (db: TDbClient) => {
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
db.ref("orgId").withSchema(TableName.Project),
db.ref("type").withSchema(TableName.Project).as("projectType"),
db.ref("id").withSchema(TableName.Project).as("projectId"),
db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization)
db.ref("id").withSchema(TableName.Project).as("projectId")
);
const [userPermission] = sqlNestRelationships({
@ -689,8 +684,7 @@ export const permissionDALFactory = (db: TDbClient) => {
groupMembershipCreatedAt,
groupMembershipUpdatedAt,
membershipUpdatedAt,
projectType,
shouldUseNewPrivilegeSystem
projectType
}) => ({
orgId,
orgAuthEnforced,
@ -700,8 +694,7 @@ export const permissionDALFactory = (db: TDbClient) => {
projectType,
id: membershipId || groupMembershipId,
createdAt: membershipCreatedAt || groupMembershipCreatedAt,
updatedAt: membershipUpdatedAt || groupMembershipUpdatedAt,
shouldUseNewPrivilegeSystem
updatedAt: membershipUpdatedAt || groupMembershipUpdatedAt
}),
childrenMapper: [
{
@ -1002,7 +995,6 @@ export const permissionDALFactory = (db: TDbClient) => {
`${TableName.IdentityProjectMembership}.projectId`,
`${TableName.Project}.id`
)
.join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`)
.leftJoin(TableName.IdentityMetadata, (queryBuilder) => {
void queryBuilder
.on(`${TableName.Identity}.id`, `${TableName.IdentityMetadata}.identityId`)
@ -1020,7 +1012,6 @@ export const permissionDALFactory = (db: TDbClient) => {
db.ref("updatedAt").withSchema(TableName.IdentityProjectMembership).as("membershipUpdatedAt"),
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"),
db.ref("permissions").withSchema(TableName.ProjectRoles),
db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization),
db.ref("id").withSchema(TableName.IdentityProjectAdditionalPrivilege).as("identityApId"),
db.ref("permissions").withSchema(TableName.IdentityProjectAdditionalPrivilege).as("identityApPermissions"),
db
@ -1054,8 +1045,7 @@ export const permissionDALFactory = (db: TDbClient) => {
membershipUpdatedAt,
orgId,
identityName,
projectType,
shouldUseNewPrivilegeSystem
projectType
}) => ({
id: membershipId,
identityId,
@ -1065,7 +1055,6 @@ export const permissionDALFactory = (db: TDbClient) => {
updatedAt: membershipUpdatedAt,
orgId,
projectType,
shouldUseNewPrivilegeSystem,
// just a prefilled value
orgAuthEnforced: false
}),

View File

@ -3,11 +3,9 @@ import { ForbiddenError, MongoAbility, PureAbility, subject } from "@casl/abilit
import { z } from "zod";
import { TOrganizations } from "@app/db/schemas";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
import { ActorAuthMethod, AuthMethod } from "@app/services/auth/auth-type";
import { OrgPermissionSet } from "./org-permission";
import {
ProjectPermissionSecretActions,
ProjectPermissionSet,
@ -133,12 +131,12 @@ function validateOrgSSO(actorAuthMethod: ActorAuthMethod, isOrgSsoEnforced: TOrg
}
}
const escapeHandlebarsMissingDict = (obj: Record<string, string>, key: string) => {
const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
const handler = {
get(target: Record<string, string>, prop: string) {
if (!Object.hasOwn(target, prop)) {
if (!(prop in target)) {
// eslint-disable-next-line no-param-reassign
target[prop] = `{{${key}.${prop}}}`; // Add missing key as an "own" property
target[prop] = `{{identity.metadata.${prop}}}`; // Add missing key as an "own" property
}
return target[prop];
}
@ -147,57 +145,4 @@ const escapeHandlebarsMissingDict = (obj: Record<string, string>, key: string) =
return new Proxy(obj, handler);
};
// This function serves as a transition layer between the old and new privilege management system
// the old privilege management system is based on the actor having more privileges than the managed permission
// the new privilege management system is based on the actor having the appropriate permission to perform the privilege change,
// regardless of the actor's privilege level.
const validatePrivilegeChangeOperation = (
shouldUseNewPrivilegeSystem: boolean,
opAction: OrgPermissionSet[0] | ProjectPermissionSet[0],
opSubject: OrgPermissionSet[1] | ProjectPermissionSet[1],
actorPermission: MongoAbility,
managedPermission: MongoAbility
) => {
if (shouldUseNewPrivilegeSystem) {
if (actorPermission.can(opAction, opSubject)) {
return {
isValid: true,
missingPermissions: []
};
}
return {
isValid: false,
missingPermissions: [
{
action: opAction,
subject: opSubject
}
]
};
}
// if not, we check if the actor is indeed more privileged than the managed permission - this is the old system
return validatePermissionBoundary(actorPermission, managedPermission);
};
const constructPermissionErrorMessage = (
baseMessage: string,
shouldUseNewPrivilegeSystem: boolean,
opAction: OrgPermissionSet[0] | ProjectPermissionSet[0],
opSubject: OrgPermissionSet[1] | ProjectPermissionSet[1]
) => {
return `${baseMessage}${
shouldUseNewPrivilegeSystem
? `. Actor is missing permission ${opAction as string} on ${opSubject as string}`
: ". Actor privilege level is not high enough to perform this action"
}`;
};
export {
constructPermissionErrorMessage,
escapeHandlebarsMissingDict,
isAuthMethodSaml,
validateOrgSSO,
validatePrivilegeChangeOperation
};
export { escapeHandlebarsMissingMetadata, isAuthMethodSaml, validateOrgSSO };

View File

@ -1,6 +1,5 @@
import { createMongoAbility, MongoAbility, RawRuleOf } from "@casl/ability";
import { PackRule, unpackRules } from "@casl/ability/extra";
import { requestContext } from "@fastify/request-context";
import { MongoQuery } from "@ucast/mongo2js";
import handlebars from "handlebars";
@ -23,7 +22,7 @@ import { TServiceTokenDALFactory } from "@app/services/service-token/service-tok
import { orgAdminPermissions, orgMemberPermissions, orgNoAccessPermissions, OrgPermissionSet } from "./org-permission";
import { TPermissionDALFactory } from "./permission-dal";
import { escapeHandlebarsMissingDict, validateOrgSSO } from "./permission-fns";
import { escapeHandlebarsMissingMetadata, validateOrgSSO } from "./permission-fns";
import {
TBuildOrgPermissionDTO,
TBuildProjectPermissionDTO,
@ -244,13 +243,13 @@ export const permissionServiceFactory = ({
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
const unescapedMetadata = objectify(
userProjectPermission.metadata,
(i) => i.key,
(i) => i.value
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
objectify(
userProjectPermission.metadata,
(i) => i.key,
(i) => i.value
)
);
const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata");
requestContext.set("identityPermissionMetadata", { metadata: unescapedMetadata });
const interpolateRules = templatedRules(
{
identity: {
@ -318,26 +317,20 @@ export const permissionServiceFactory = ({
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
const unescapedIdentityAuthInfo = requestContext.get("identityAuthInfo");
const unescapedMetadata = objectify(
identityProjectPermission.metadata,
(i) => i.key,
(i) => i.value
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
objectify(
identityProjectPermission.metadata,
(i) => i.key,
(i) => i.value
)
);
const identityAuthInfo =
unescapedIdentityAuthInfo?.identityId === identityId && unescapedIdentityAuthInfo
? escapeHandlebarsMissingDict(unescapedIdentityAuthInfo as never, "identity.auth")
: {};
const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata");
requestContext.set("identityPermissionMetadata", { metadata: unescapedMetadata, auth: unescapedIdentityAuthInfo });
const interpolateRules = templatedRules(
{
identity: {
id: identityProjectPermission.identityId,
username: identityProjectPermission.username,
metadata: metadataKeyValuePair,
auth: identityAuthInfo
metadata: metadataKeyValuePair
}
},
{ data: false }
@ -397,18 +390,14 @@ export const permissionServiceFactory = ({
const scopes = ServiceTokenScopes.parse(serviceToken.scopes || []);
return {
permission: buildServiceTokenProjectPermission(scopes, serviceToken.permissions),
membership: {
shouldUseNewPrivilegeSystem: true
}
membership: undefined
};
};
type TProjectPermissionRT<T extends ActorType> = T extends ActorType.SERVICE
? {
permission: MongoAbility<ProjectPermissionSet, MongoQuery>;
membership: {
shouldUseNewPrivilegeSystem: boolean;
};
membership: undefined;
hasRole: (arg: string) => boolean;
} // service token doesn't have both membership and roles
: {
@ -417,7 +406,6 @@ export const permissionServiceFactory = ({
orgAuthEnforced: boolean | null | undefined;
orgId: string;
roles: Array<{ role: string }>;
shouldUseNewPrivilegeSystem: boolean;
};
hasRole: (role: string) => boolean;
};
@ -436,13 +424,12 @@ export const permissionServiceFactory = ({
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
const metadataKeyValuePair = escapeHandlebarsMissingDict(
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
objectify(
userProjectPermission.metadata,
(i) => i.key,
(i) => i.value
),
"identity.metadata"
)
);
const interpolateRules = templatedRules(
{
@ -482,14 +469,14 @@ export const permissionServiceFactory = ({
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
const metadataKeyValuePair = escapeHandlebarsMissingDict(
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
objectify(
identityProjectPermission.metadata,
(i) => i.key,
(i) => i.value
),
"identity.metadata"
)
);
const interpolateRules = templatedRules(
{
identity: {

View File

@ -43,30 +43,6 @@ export enum ProjectPermissionDynamicSecretActions {
Lease = "lease"
}
export enum ProjectPermissionIdentityActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges"
}
export enum ProjectPermissionMemberActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges"
}
export enum ProjectPermissionGroupActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges"
}
export enum ProjectPermissionSecretSyncActions {
Read = "read",
Create = "create",
@ -79,11 +55,11 @@ export enum ProjectPermissionSecretSyncActions {
export enum ProjectPermissionSecretRotationActions {
Read = "read",
ReadGeneratedCredentials = "read-generated-credentials",
ReadCredentials = "read-credentials",
Create = "create",
Edit = "edit",
Delete = "delete",
RotateSecrets = "rotate-secrets"
Rotate = "rotate"
}
export enum ProjectPermissionKmipActions {
@ -151,11 +127,6 @@ export type SecretImportSubjectFields = {
secretPath: string;
};
export type SecretRotationsSubjectFields = {
environment: string;
secretPath: string;
};
export type IdentityManagementSubjectFields = {
identityId: string;
};
@ -188,8 +159,8 @@ export type ProjectPermissionSet =
]
| [ProjectPermissionActions, ProjectPermissionSub.Role]
| [ProjectPermissionActions, ProjectPermissionSub.Tags]
| [ProjectPermissionMemberActions, ProjectPermissionSub.Member]
| [ProjectPermissionGroupActions, ProjectPermissionSub.Groups]
| [ProjectPermissionActions, ProjectPermissionSub.Member]
| [ProjectPermissionActions, ProjectPermissionSub.Groups]
| [ProjectPermissionActions, ProjectPermissionSub.Integrations]
| [ProjectPermissionActions, ProjectPermissionSub.Webhooks]
| [ProjectPermissionActions, ProjectPermissionSub.AuditLogs]
@ -198,15 +169,9 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions, ProjectPermissionSub.Settings]
| [ProjectPermissionActions, ProjectPermissionSub.ServiceTokens]
| [ProjectPermissionActions, ProjectPermissionSub.SecretApproval]
| [ProjectPermissionSecretRotationActions, ProjectPermissionSub.SecretRotation]
| [
ProjectPermissionSecretRotationActions,
(
| ProjectPermissionSub.SecretRotation
| (ForcedSubject<ProjectPermissionSub.SecretRotation> & SecretRotationsSubjectFields)
)
]
| [
ProjectPermissionIdentityActions,
ProjectPermissionActions,
ProjectPermissionSub.Identity | (ForcedSubject<ProjectPermissionSub.Identity> & IdentityManagementSubjectFields)
]
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
@ -320,6 +285,12 @@ const GeneralPermissionSchema = [
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretRotation).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretRotationActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretRollback).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_ENUM([ProjectPermissionActions.Read, ProjectPermissionActions.Create]).describe(
@ -328,13 +299,13 @@ const GeneralPermissionSchema = [
}),
z.object({
subject: z.literal(ProjectPermissionSub.Member).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionMemberActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.Groups).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionGroupActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
)
}),
@ -501,12 +472,6 @@ export const ProjectPermissionV1Schema = z.discriminatedUnion("subject", [
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretRotation).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
)
}),
...GeneralPermissionSchema
]);
@ -554,23 +519,13 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
z.object({
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionIdentityActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
),
conditions: IdentityManagementConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretRotation).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretRotationActions).describe(
"Describe what action an entity can take."
),
conditions: SecretConditionV1Schema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
...GeneralPermissionSchema
]);
@ -584,9 +539,12 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.SecretFolders,
ProjectPermissionSub.SecretImports,
ProjectPermissionSub.SecretApproval,
ProjectPermissionSub.Member,
ProjectPermissionSub.Groups,
ProjectPermissionSub.Role,
ProjectPermissionSub.Integrations,
ProjectPermissionSub.Webhooks,
ProjectPermissionSub.Identity,
ProjectPermissionSub.ServiceTokens,
ProjectPermissionSub.Settings,
ProjectPermissionSub.Environments,
@ -613,39 +571,6 @@ const buildAdminPermissionRules = () => {
);
});
can(
[
ProjectPermissionMemberActions.Create,
ProjectPermissionMemberActions.Edit,
ProjectPermissionMemberActions.Delete,
ProjectPermissionMemberActions.Read,
ProjectPermissionMemberActions.GrantPrivileges
],
ProjectPermissionSub.Member
);
can(
[
ProjectPermissionGroupActions.Create,
ProjectPermissionGroupActions.Edit,
ProjectPermissionGroupActions.Delete,
ProjectPermissionGroupActions.Read,
ProjectPermissionGroupActions.GrantPrivileges
],
ProjectPermissionSub.Groups
);
can(
[
ProjectPermissionIdentityActions.Create,
ProjectPermissionIdentityActions.Edit,
ProjectPermissionIdentityActions.Delete,
ProjectPermissionIdentityActions.Read,
ProjectPermissionIdentityActions.GrantPrivileges
],
ProjectPermissionSub.Identity
);
can(
[
ProjectPermissionSecretActions.DescribeAndReadValue,
@ -713,8 +638,8 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSecretRotationActions.Edit,
ProjectPermissionSecretRotationActions.Delete,
ProjectPermissionSecretRotationActions.Read,
ProjectPermissionSecretRotationActions.ReadGeneratedCredentials,
ProjectPermissionSecretRotationActions.RotateSecrets
ProjectPermissionSecretRotationActions.ReadCredentials,
ProjectPermissionSecretRotationActions.Rotate
],
ProjectPermissionSub.SecretRotation
);
@ -772,9 +697,9 @@ const buildMemberPermissionRules = () => {
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.SecretRollback);
can([ProjectPermissionMemberActions.Read, ProjectPermissionMemberActions.Create], ProjectPermissionSub.Member);
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.Member);
can([ProjectPermissionGroupActions.Read], ProjectPermissionSub.Groups);
can([ProjectPermissionActions.Read], ProjectPermissionSub.Groups);
can(
[
@ -798,10 +723,10 @@ const buildMemberPermissionRules = () => {
can(
[
ProjectPermissionIdentityActions.Read,
ProjectPermissionIdentityActions.Edit,
ProjectPermissionIdentityActions.Create,
ProjectPermissionIdentityActions.Delete
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.Identity
);
@ -915,12 +840,12 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
can(ProjectPermissionSecretRotationActions.Read, ProjectPermissionSub.SecretRotation);
can(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
can(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Role);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks);
can(ProjectPermissionIdentityActions.Read, ProjectPermissionSub.Identity);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
can(ProjectPermissionActions.Read, ProjectPermissionSub.ServiceTokens);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments);

View File

@ -2,20 +2,15 @@ import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import {
ProjectPermissionMemberActions,
ProjectPermissionSet,
ProjectPermissionSub
} from "../permission/project-permission";
import { ProjectPermissionActions, ProjectPermissionSet, ProjectPermissionSub } from "../permission/project-permission";
import { TProjectUserAdditionalPrivilegeDALFactory } from "./project-user-additional-privilege-dal";
import {
ProjectUserAdditionalPrivilegeTemporaryMode,
@ -68,8 +63,8 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
const { permission: targetUserPermission, membership } = await permissionService.getProjectPermission({
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
const { permission: targetUserPermission } = await permissionService.getProjectPermission({
actor: ActorType.USER,
actorId: projectMembership.userId,
projectId: projectMembership.projectId,
@ -81,21 +76,11 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(customPermission));
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionSub.Member,
permission,
targetUserPermission
);
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged user",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionSub.Member
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged user",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -107,10 +92,6 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
if (existingSlug)
throw new BadRequestError({ message: `Additional privilege with provided slug ${slug} already exists` });
validateHandlebarTemplate("User Additional Privilege Create", JSON.stringify(customPermission || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const packedPermission = JSON.stringify(packRules(customPermission));
if (!dto.isTemporary) {
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({
@ -165,7 +146,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
message: `Project membership for user with ID '${userPrivilege.userId}' not found in project with ID '${userPrivilege.projectId}'`
});
const { permission, membership } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: projectMembership.projectId,
@ -173,7 +154,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
const { permission: targetUserPermission } = await permissionService.getProjectPermission({
actor: ActorType.USER,
actorId: projectMembership.userId,
@ -186,21 +167,11 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(dto.permissions || []));
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionSub.Member,
permission,
targetUserPermission
);
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged user",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionSub.Member
),
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -214,10 +185,6 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
throw new BadRequestError({ message: `Additional privilege with provided slug ${dto.slug} already exists` });
}
validateHandlebarTemplate("User Additional Privilege Update", JSON.stringify(dto.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const isTemporary = typeof dto?.isTemporary !== "undefined" ? dto.isTemporary : userPrivilege.isTemporary;
const packedPermission = dto.permissions && JSON.stringify(packRules(dto.permissions));
@ -277,7 +244,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
const deletedPrivilege = await projectUserAdditionalPrivilegeDAL.deleteById(userPrivilege.id);
return {
@ -314,7 +281,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
return {
...userPrivilege,
@ -341,7 +308,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
const userPrivileges = await projectUserAdditionalPrivilegeDAL.find(
{

View File

@ -29,9 +29,15 @@ export const parseScimFilter = (filterToParse: string | undefined) => {
attributeName = "name";
}
return { [attributeName]: parsedValue.replaceAll('"', "") };
return { [attributeName]: parsedValue.replace(/"/g, "") };
};
export function extractScimValueFromPath(path: string): string | null {
const regex = /members\[value eq "([^"]+)"\]/;
const match = path.match(regex);
return match ? match[1] : null;
}
export const buildScimUser = ({
orgMembershipId,
username,

View File

@ -62,8 +62,7 @@ export const secretApprovalPolicyServiceFactory = ({
projectId,
secretPath,
environment,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
}: TCreateSapDTO) => {
const groupApprovers = approvers
?.filter((approver) => approver.type === ApproverType.Group)
@ -114,8 +113,7 @@ export const secretApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
},
tx
);
@ -174,8 +172,7 @@ export const secretApprovalPolicyServiceFactory = ({
actorAuthMethod,
approvals,
secretPolicyId,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
}: TUpdateSapDTO) => {
const groupApprovers = approvers
?.filter((approver) => approver.type === ApproverType.Group)
@ -221,8 +218,7 @@ export const secretApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel,
allowedSelfApprovals
enforcementLevel
},
tx
);

View File

@ -10,7 +10,6 @@ export type TCreateSapDTO = {
projectId: string;
name: string;
enforcementLevel: EnforcementLevel;
allowedSelfApprovals: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateSapDTO = {
@ -20,7 +19,6 @@ export type TUpdateSapDTO = {
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
name?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteSapDTO = {

View File

@ -112,7 +112,6 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"),
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
);
@ -151,8 +150,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
envId: el.policyEnvId,
deletedAt: el.policyDeletedAt,
allowedSelfApprovals: el.policyAllowedSelfApprovals
deletedAt: el.policyDeletedAt
}
}),
childrenMapper: [
@ -338,7 +336,6 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"),
@ -367,8 +364,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals
enforcementLevel: el.policyEnforcementLevel
},
committerUser: {
userId: el.committerUserId,
@ -486,7 +482,6 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
@ -516,8 +511,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals
enforcementLevel: el.policyEnforcementLevel
},
committerUser: {
userId: el.committerUserId,

View File

@ -257,11 +257,6 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
db.ref("id").withSchema("secVerTag")
)
.leftJoin(TableName.ResourceMetadata, `${TableName.SecretV2}.id`, `${TableName.ResourceMetadata}.secretId`)
.leftJoin(
TableName.SecretRotationV2SecretMapping,
`${TableName.SecretV2}.id`,
`${TableName.SecretRotationV2SecretMapping}.secretId`
)
.select(selectAllTableCols(TableName.SecretApprovalRequestSecretV2))
.select({
secVerTagId: "secVerTag.id",
@ -290,8 +285,7 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
db.ref("id").withSchema(TableName.ResourceMetadata).as("metadataId"),
db.ref("key").withSchema(TableName.ResourceMetadata).as("metadataKey"),
db.ref("value").withSchema(TableName.ResourceMetadata).as("metadataValue")
)
.select(db.ref("rotationId").withSchema(TableName.SecretRotationV2SecretMapping));
);
const formatedDoc = sqlNestRelationships({
data: doc,
key: "id",
@ -310,16 +304,14 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
{
key: "secretId",
label: "secret" as const,
mapper: ({ orgSecVersion, orgSecKey, orgSecValue, orgSecComment, secretId, rotationId }) =>
mapper: ({ orgSecVersion, orgSecKey, orgSecValue, orgSecComment, secretId }) =>
secretId
? {
id: secretId,
version: orgSecVersion,
key: orgSecKey,
encryptedValue: orgSecValue,
encryptedComment: orgSecComment,
isRotatedSecret: Boolean(rotationId),
rotationId
encryptedComment: orgSecComment
}
: undefined
},

View File

@ -262,13 +262,7 @@ export const secretApprovalRequestServiceFactory = ({
id: el.id,
version: el.version,
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
isRotatedSecret: el.secret.isRotatedSecret,
// eslint-disable-next-line no-nested-ternary
secretValue: el.secret.isRotatedSecret
? undefined
: el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
: "",
secretValue: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "",
secretComment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: "",
@ -358,11 +352,6 @@ export const secretApprovalRequestServiceFactory = ({
message: "The policy associated with this secret approval request has been deleted."
});
}
if (!policy.allowedSelfApprovals && actorId === secretApprovalRequest.committerUserId) {
throw new BadRequestError({
message: "Failed to review secret approval request. Users are not authorized to review their own request."
});
}
const { hasRole } = await permissionService.getProjectPermission({
actor: ActorType.USER,
@ -615,7 +604,7 @@ export const secretApprovalRequestServiceFactory = ({
tx,
inputSecrets: secretUpdationCommits.map((el) => {
const encryptedValue =
!el.secret.isRotatedSecret && typeof el.encryptedValue !== "undefined"
typeof el.encryptedValue !== "undefined"
? {
encryptedValue: el.encryptedValue as Buffer,
references: el.encryptedValue

View File

@ -1,3 +0,0 @@
export * from "./mssql-credentials-rotation-constants";
export * from "./mssql-credentials-rotation-schemas";
export * from "./mssql-credentials-rotation-types";

View File

@ -1,29 +0,0 @@
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { TSecretRotationV2ListItem } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const MSSQL_CREDENTIALS_ROTATION_LIST_OPTION: TSecretRotationV2ListItem = {
name: "Microsoft SQL Server Credentials",
type: SecretRotation.MsSqlCredentials,
connection: AppConnection.MsSql,
template: {
createUserStatement: `-- Create login at the server level
CREATE LOGIN [infisical_user] WITH PASSWORD = 'my-password';
-- Grant server-level connect permission
GRANT CONNECT SQL TO [infisical_user];
-- Switch to the database where you want to create the user
USE my_database;
-- Create the database user mapped to the login
CREATE USER [infisical_user] FOR LOGIN [infisical_user];
-- Grant permissions to the user on the schema in this database
GRANT SELECT, INSERT, UPDATE, DELETE ON SCHEMA::dbo TO [infisical_user];`,
secretsMapping: {
username: "MSSQL_DB_USERNAME",
password: "MSSQL_DB_PASSWORD"
}
}
};

View File

@ -1,41 +0,0 @@
import { z } from "zod";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import {
BaseCreateSecretRotationSchema,
BaseSecretRotationSchema,
BaseUpdateSecretRotationSchema
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-schemas";
import {
SqlCredentialsRotationParametersSchema,
SqlCredentialsRotationSecretsMappingSchema,
SqlCredentialsRotationTemplateSchema
} from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const MsSqlCredentialsRotationSchema = BaseSecretRotationSchema(SecretRotation.MsSqlCredentials).extend({
type: z.literal(SecretRotation.MsSqlCredentials),
parameters: SqlCredentialsRotationParametersSchema,
secretsMapping: SqlCredentialsRotationSecretsMappingSchema
});
export const CreateMsSqlCredentialsRotationSchema = BaseCreateSecretRotationSchema(
SecretRotation.MsSqlCredentials
).extend({
parameters: SqlCredentialsRotationParametersSchema,
secretsMapping: SqlCredentialsRotationSecretsMappingSchema
});
export const UpdateMsSqlCredentialsRotationSchema = BaseUpdateSecretRotationSchema(
SecretRotation.MsSqlCredentials
).extend({
parameters: SqlCredentialsRotationParametersSchema.optional(),
secretsMapping: SqlCredentialsRotationSecretsMappingSchema.optional()
});
export const MsSqlCredentialsRotationListItemSchema = z.object({
name: z.literal("Microsoft SQL Server Credentials"),
connection: z.literal(AppConnection.MsSql),
type: z.literal(SecretRotation.MsSqlCredentials),
template: SqlCredentialsRotationTemplateSchema
});

View File

@ -1,19 +0,0 @@
import { z } from "zod";
import { TMsSqlConnection } from "@app/services/app-connection/mssql";
import {
CreateMsSqlCredentialsRotationSchema,
MsSqlCredentialsRotationListItemSchema,
MsSqlCredentialsRotationSchema
} from "./mssql-credentials-rotation-schemas";
export type TMsSqlCredentialsRotation = z.infer<typeof MsSqlCredentialsRotationSchema>;
export type TMsSqlCredentialsRotationInput = z.infer<typeof CreateMsSqlCredentialsRotationSchema>;
export type TMsSqlCredentialsRotationListItem = z.infer<typeof MsSqlCredentialsRotationListItemSchema>;
export type TMsSqlCredentialsRotationWithConnection = TMsSqlCredentialsRotation & {
connection: TMsSqlConnection;
};

View File

@ -1,3 +1,4 @@
export * from "./postgres-credentials-rotation-constants";
export * from "./postgres-credentials-rotation-fns";
export * from "./postgres-credentials-rotation-schemas";
export * from "./postgres-credentials-rotation-types";

View File

@ -5,19 +5,5 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
export const POSTGRES_CREDENTIALS_ROTATION_LIST_OPTION: TSecretRotationV2ListItem = {
name: "PostgreSQL Credentials",
type: SecretRotation.PostgresCredentials,
connection: AppConnection.Postgres,
template: {
createUserStatement: `-- create user role
CREATE USER infisical_user WITH ENCRYPTED PASSWORD 'temporary_password';
-- grant database connection permissions
GRANT CONNECT ON DATABASE my_database TO infisical_user;
-- grant relevant table permissions
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO infisical_user;`,
secretsMapping: {
username: "POSTGRES_DB_USERNAME",
password: "POSTGRES_DB_PASSWORD"
}
}
connection: AppConnection.Postgres
};

View File

@ -6,36 +6,61 @@ import {
BaseSecretRotationSchema,
BaseUpdateSecretRotationSchema
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-schemas";
import {
SqlCredentialsRotationParametersSchema,
SqlCredentialsRotationSecretsMappingSchema,
SqlCredentialsRotationTemplateSchema
} from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
import { SecretRotations } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
const PostgresCredentialsRotationParametersSchema = z.object({
usernameSecretKey: z
.string()
.trim()
.min(1, "Username Secret Key Required")
.describe(SecretRotations.PARAMETERS.POSTGRES_CREDENTIALS.usernameSecretKey),
passwordSecretKey: z
.string()
.trim()
.min(1, "Username Secret Key Required")
.describe(SecretRotations.PARAMETERS.POSTGRES_CREDENTIALS.passwordSecretKey),
issueStatement: z
.string()
.trim()
.min(1, "Issue Credentials SQL Statement Required")
.describe(SecretRotations.PARAMETERS.POSTGRES_CREDENTIALS.issueStatement),
revokeStatement: z
.string()
.trim()
.min(1, "Revoke Credentials SQL Statement Required")
.describe(SecretRotations.PARAMETERS.POSTGRES_CREDENTIALS.revokeStatement)
});
const PostgresCredentialsRotationGeneratedCredentialsSchema = z
.object({
username: z.string(),
password: z.string()
})
.array()
.min(1)
.max(2);
export const PostgresCredentialsRotationSchema = BaseSecretRotationSchema(SecretRotation.PostgresCredentials).extend({
type: z.literal(SecretRotation.PostgresCredentials),
parameters: SqlCredentialsRotationParametersSchema,
secretsMapping: SqlCredentialsRotationSecretsMappingSchema
parameters: PostgresCredentialsRotationParametersSchema
// generatedCredentials: PostgresCredentialsRotationGeneratedCredentialsSchema
});
export const CreatePostgresCredentialsRotationSchema = BaseCreateSecretRotationSchema(
SecretRotation.PostgresCredentials
).extend({
parameters: SqlCredentialsRotationParametersSchema,
secretsMapping: SqlCredentialsRotationSecretsMappingSchema
parameters: PostgresCredentialsRotationParametersSchema
});
export const UpdatePostgresCredentialsRotationSchema = BaseUpdateSecretRotationSchema(
SecretRotation.PostgresCredentials
).extend({
parameters: SqlCredentialsRotationParametersSchema.optional(),
secretsMapping: SqlCredentialsRotationSecretsMappingSchema.optional()
parameters: PostgresCredentialsRotationParametersSchema.optional()
});
export const PostgresCredentialsRotationListItemSchema = z.object({
name: z.literal("PostgreSQL Credentials"),
name: z.literal("PostgreSQL Credentials"),
connection: z.literal(AppConnection.Postgres),
type: z.literal(SecretRotation.PostgresCredentials),
template: SqlCredentialsRotationTemplateSchema
type: z.literal(SecretRotation.PostgresCredentials)
});

View File

@ -4,37 +4,25 @@ import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { TSecretRotationsV2 } from "@app/db/schemas/secret-rotations-v2";
import { DatabaseError } from "@app/lib/errors";
import {
buildFindFilter,
ormify,
prependTableNameToFindFilter,
selectAllTableCols,
sqlNestRelationships,
TFindOpt
} from "@app/lib/knex";
import { buildFindFilter, ormify, prependTableNameToFindFilter, selectAllTableCols } from "@app/lib/knex";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
export type TSecretRotationV2DALFactory = ReturnType<typeof secretRotationV2DALFactory>;
type TSecretRotationFindFilter = Parameters<typeof buildFindFilter<TSecretRotationsV2>>[0];
type TSecretRotationFindOptions = TFindOpt<TSecretRotationsV2, true, "name">;
const baseSecretRotationV2Query = ({
filter = {},
options,
filter,
db,
tx
}: {
db: TDbClient;
filter?: { projectId?: string } & TSecretRotationFindFilter;
options?: TSecretRotationFindOptions;
filter?: TSecretRotationFindFilter;
tx?: Knex;
}) => {
const { projectId, ...filters } = filter;
const query = (tx || db.replicaNode())(TableName.SecretRotationV2)
.join(TableName.SecretFolder, `${TableName.SecretRotationV2}.folderId`, `${TableName.SecretFolder}.id`)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.leftJoin(TableName.SecretFolder, `${TableName.SecretRotationV2}.folderId`, `${TableName.SecretFolder}.id`)
.leftJoin(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.join(TableName.AppConnection, `${TableName.SecretRotationV2}.connectionId`, `${TableName.AppConnection}.id`)
.select(selectAllTableCols(TableName.SecretRotationV2))
.select(
@ -42,7 +30,6 @@ const baseSecretRotationV2Query = ({
db.ref("name").withSchema(TableName.Environment).as("envName"),
db.ref("id").withSchema(TableName.Environment).as("envId"),
db.ref("slug").withSchema(TableName.Environment).as("envSlug"),
db.ref("projectId").withSchema(TableName.Environment),
// entire connection
db.ref("name").withSchema(TableName.AppConnection).as("connectionName"),
db.ref("method").withSchema(TableName.AppConnection).as("connectionMethod"),
@ -53,42 +40,20 @@ const baseSecretRotationV2Query = ({
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
db
.ref("isPlatformManagedCredentials")
.withSchema(TableName.AppConnection)
.as("connectionIsPlatformManagedCredentials")
db.ref("isPlatformManaged").withSchema(TableName.AppConnection).as("connectionIsPlatformManaged")
);
if (filter) {
/* eslint-disable @typescript-eslint/no-misused-promises */
void query.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretRotationV2, filters)));
}
if (projectId) {
void query.where(`${TableName.Environment}.projectId`, projectId);
}
if (options) {
const { offset, limit, sort, count, countDistinct } = options;
if (countDistinct) {
void query.countDistinct(countDistinct);
} else if (count) {
void query.select(db.raw("COUNT(*) OVER() AS count"));
void query.select("*");
}
if (limit) void query.limit(limit);
if (offset) void query.offset(offset);
if (sort) {
void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls })));
}
void query.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretRotationV2, filter)));
}
return query;
};
const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRotationV2Query>>[number]>(
secretRotation: T,
folder: Awaited<ReturnType<TSecretFolderDALFactory["findSecretPathByFolderIds"]>>[number]
const expandSecretRotation = (
secretRotation: Awaited<ReturnType<typeof baseSecretRotationV2Query>>[number],
folder?: Awaited<ReturnType<TSecretFolderDALFactory["findSecretPathByFolderIds"]>>[number]
) => {
const {
envId,
@ -104,14 +69,14 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
connectionCreatedAt,
connectionUpdatedAt,
connectionVersion,
connectionIsPlatformManagedCredentials,
connectionIsPlatformManaged,
...el
} = secretRotation;
return {
...el,
connectionId,
environment: { id: envId, name: envName, slug: envSlug },
environment: envId ? { id: envId, name: envName, slug: envSlug } : null,
connection: {
app: connectionApp,
id: connectionId,
@ -123,12 +88,14 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
createdAt: connectionCreatedAt,
updatedAt: connectionUpdatedAt,
version: connectionVersion,
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
isPlatformManaged: connectionIsPlatformManaged
},
folder: {
id: folder!.id,
path: folder!.path
}
folder: folder
? {
id: folder.id,
path: folder.path
}
: null
};
};
@ -137,22 +104,19 @@ export const secretRotationV2DALFactory = (
folderDAL: Pick<TSecretFolderDALFactory, "findSecretPathByFolderIds">
) => {
const secretRotationV2Orm = ormify(db, TableName.SecretRotationV2);
const secretRotationV2SecretMappingOrm = ormify(db, TableName.SecretRotationV2SecretMapping);
const find = async (
filter: Parameters<(typeof secretRotationV2Orm)["find"]>[0] & { projectId: string },
options?: TSecretRotationFindOptions,
tx?: Knex
) => {
try {
const secretRotations = await baseSecretRotationV2Query({ filter, db, tx, options });
const secretRotations = await baseSecretRotationV2Query({ filter, db, tx });
if (!secretRotations.length) return [];
const foldersWithPath = await folderDAL.findSecretPathByFolderIds(
filter.projectId,
secretRotations.map((rotation) => rotation.folderId),
tx
secretRotations.filter((rotation) => Boolean(rotation.folderId)).map((rotation) => rotation.folderId!)
);
const folderRecord: Record<string, (typeof foldersWithPath)[number]> = {};
@ -161,189 +125,14 @@ export const secretRotationV2DALFactory = (
if (folder) folderRecord[folder.id] = folder;
});
return secretRotations.map((rotation) => expandSecretRotation(rotation, folderRecord[rotation.folderId]));
return secretRotations.map((rotation) =>
expandSecretRotation(rotation, rotation.folderId ? folderRecord[rotation.folderId] : undefined)
);
} catch (error) {
throw new DatabaseError({ error, name: "Find - Secret Rotation V2" });
}
};
const findWithMappedSecretsCount = async (
{
search,
projectId,
...filter
}: Parameters<(typeof secretRotationV2Orm)["find"]>[0] & { projectId: string; search?: string },
tx?: Knex
) => {
const query = (tx || db.replicaNode())(TableName.SecretRotationV2)
.join(TableName.SecretFolder, `${TableName.SecretRotationV2}.folderId`, `${TableName.SecretFolder}.id`)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.join(
TableName.SecretRotationV2SecretMapping,
`${TableName.SecretRotationV2SecretMapping}.rotationId`,
`${TableName.SecretRotationV2}.id`
)
.join(TableName.SecretV2, `${TableName.SecretRotationV2SecretMapping}.secretId`, `${TableName.SecretV2}.id`)
.where(`${TableName.Environment}.projectId`, projectId)
.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretRotationV2, filter)))
.countDistinct(`${TableName.SecretRotationV2}.name`);
if (search) {
void query.where((qb) => {
void qb
.whereILike(`${TableName.SecretV2}.key`, `%${search}%`)
.orWhereILike(`${TableName.SecretRotationV2}.name`, `%${search}%`);
});
}
const result = await query;
// @ts-expect-error knex infers wrong type...
return Number(result[0]?.count ?? 0);
};
const findWithMappedSecrets = async (
{ search, ...filter }: Parameters<(typeof secretRotationV2Orm)["find"]>[0] & { projectId: string; search?: string },
options?: TSecretRotationFindOptions,
tx?: Knex
) => {
try {
const extendedQuery = baseSecretRotationV2Query({ filter, db, tx, options })
.join(
TableName.SecretRotationV2SecretMapping,
`${TableName.SecretRotationV2SecretMapping}.rotationId`,
`${TableName.SecretRotationV2}.id`
)
.join(TableName.SecretV2, `${TableName.SecretV2}.id`, `${TableName.SecretRotationV2SecretMapping}.secretId`)
.leftJoin(
TableName.SecretV2JnTag,
`${TableName.SecretV2}.id`,
`${TableName.SecretV2JnTag}.${TableName.SecretV2}Id`
)
.leftJoin(
TableName.SecretTag,
`${TableName.SecretV2JnTag}.${TableName.SecretTag}Id`,
`${TableName.SecretTag}.id`
)
.leftJoin(TableName.ResourceMetadata, `${TableName.SecretV2}.id`, `${TableName.ResourceMetadata}.secretId`)
.select(
db.ref("id").withSchema(TableName.SecretV2).as("secretId"),
db.ref("key").withSchema(TableName.SecretV2).as("secretKey"),
db.ref("version").withSchema(TableName.SecretV2).as("secretVersion"),
db.ref("type").withSchema(TableName.SecretV2).as("secretType"),
db.ref("encryptedValue").withSchema(TableName.SecretV2).as("secretEncryptedValue"),
db.ref("encryptedComment").withSchema(TableName.SecretV2).as("secretEncryptedComment"),
db.ref("reminderNote").withSchema(TableName.SecretV2).as("secretReminderNote"),
db.ref("reminderRepeatDays").withSchema(TableName.SecretV2).as("secretReminderRepeatDays"),
db.ref("skipMultilineEncoding").withSchema(TableName.SecretV2).as("secretSkipMultilineEncoding"),
db.ref("metadata").withSchema(TableName.SecretV2).as("secretMetadata"),
db.ref("userId").withSchema(TableName.SecretV2).as("secretUserId"),
db.ref("folderId").withSchema(TableName.SecretV2).as("secretFolderId"),
db.ref("createdAt").withSchema(TableName.SecretV2).as("secretCreatedAt"),
db.ref("updatedAt").withSchema(TableName.SecretV2).as("secretUpdatedAt"),
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("id").withSchema(TableName.ResourceMetadata).as("metadataId"),
db.ref("key").withSchema(TableName.ResourceMetadata).as("metadataKey"),
db.ref("value").withSchema(TableName.ResourceMetadata).as("metadataValue")
);
if (search) {
void extendedQuery.where((query) => {
void query
.whereILike(`${TableName.SecretV2}.key`, `%${search}%`)
.orWhereILike(`${TableName.SecretRotationV2}.name`, `%${search}%`);
});
}
const secretRotations = await extendedQuery;
if (!secretRotations.length) return [];
const foldersWithPath = await folderDAL.findSecretPathByFolderIds(
filter.projectId,
secretRotations.map((rotation) => rotation.folderId),
tx
);
const folderRecord: Record<string, (typeof foldersWithPath)[number]> = {};
foldersWithPath.forEach((folder) => {
if (folder) folderRecord[folder.id] = folder;
});
return sqlNestRelationships({
data: secretRotations,
key: "id",
parentMapper: (rotation) => expandSecretRotation(rotation, folderRecord[rotation.folderId]),
childrenMapper: [
{
key: "secretId",
label: "secrets" as const,
mapper: ({
secretId,
secretKey,
secretVersion,
secretType,
secretEncryptedValue,
secretEncryptedComment,
secretReminderNote,
secretReminderRepeatDays,
secretSkipMultilineEncoding,
secretMetadata,
secretUserId,
secretFolderId,
secretCreatedAt,
secretUpdatedAt,
id
}) => ({
id: secretId,
key: secretKey,
version: secretVersion,
type: secretType,
encryptedValue: secretEncryptedValue,
encryptedComment: secretEncryptedComment,
reminderNote: secretReminderNote,
reminderRepeatDays: secretReminderRepeatDays,
skipMultilineEncoding: secretSkipMultilineEncoding,
metadata: secretMetadata,
userId: secretUserId,
folderId: secretFolderId,
createdAt: secretCreatedAt,
updatedAt: secretUpdatedAt,
rotationId: id,
isRotatedSecret: true
}),
childrenMapper: [
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
id,
color,
slug,
name: slug
})
},
{
key: "metadataId",
label: "secretMetadata" as const,
mapper: ({ metadataKey, metadataValue, metadataId }) => ({
id: metadataId,
key: metadataKey,
value: metadataValue
})
}
]
}
]
});
} catch (error) {
throw new DatabaseError({ error, name: "Find with Mapped Secrets - Secret Rotation V2" });
}
};
const findById = async (id: string, tx?: Knex) => {
try {
const secretRotation = await baseSecretRotationV2Query({
@ -353,11 +142,9 @@ export const secretRotationV2DALFactory = (
}).first();
if (secretRotation) {
const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(
secretRotation.projectId,
[secretRotation.folderId],
tx
);
const [folderWithPath] = secretRotation.folderId
? await folderDAL.findSecretPathByFolderIds(secretRotation.projectId, [secretRotation.folderId])
: [];
return expandSecretRotation(secretRotation, folderWithPath);
}
} catch (error) {
@ -365,61 +152,38 @@ export const secretRotationV2DALFactory = (
}
};
const create = async (data: Parameters<(typeof secretRotationV2Orm)["create"]>[0], tx?: Knex) => {
const rotation = await secretRotationV2Orm.create(data, tx);
const create = async (data: Parameters<(typeof secretRotationV2Orm)["create"]>[0]) => {
const secretRotation = (await secretRotationV2Orm.transaction(async (tx) => {
const rotation = await secretRotationV2Orm.create(data, tx);
const secretRotation = (await baseSecretRotationV2Query({
filter: { id: rotation.id },
db,
tx
}).first())!;
return baseSecretRotationV2Query({
filter: { id: rotation.id },
db,
tx
}).first();
}))!;
const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(
secretRotation.projectId,
[secretRotation.folderId],
tx
);
const [folderWithPath] = secretRotation.folderId
? await folderDAL.findSecretPathByFolderIds(secretRotation.projectId, [secretRotation.folderId])
: [];
return expandSecretRotation(secretRotation, folderWithPath);
};
const updateById = async (
rotationId: string,
data: Parameters<(typeof secretRotationV2Orm)["updateById"]>[1],
tx?: Knex
) => {
const rotation = await secretRotationV2Orm.updateById(rotationId, data, tx);
const updateById = async (rotationId: string, data: Parameters<(typeof secretRotationV2Orm)["updateById"]>[1]) => {
const secretRotation = (await secretRotationV2Orm.transaction(async (tx) => {
const rotation = await secretRotationV2Orm.updateById(rotationId, data, tx);
const secretRotation = (await baseSecretRotationV2Query({
filter: { id: rotation.id },
db,
tx
}).first())!;
const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(
secretRotation.projectId,
[secretRotation.folderId],
tx
);
return expandSecretRotation(secretRotation, folderWithPath);
};
const deleteById = async (rotationId: string, tx?: Knex) => {
const secretRotation = (await baseSecretRotationV2Query({
filter: { id: rotationId },
db,
tx
}).first())!;
await secretRotationV2Orm.deleteById(rotationId, tx);
const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(
secretRotation.projectId,
[secretRotation.folderId],
tx
);
return baseSecretRotationV2Query({
filter: { id: rotation.id },
db,
tx
}).first();
}))!;
const [folderWithPath] = secretRotation.folderId
? await folderDAL.findSecretPathByFolderIds(secretRotation.projectId, [secretRotation.folderId])
: [];
return expandSecretRotation(secretRotation, folderWithPath);
};
@ -428,12 +192,9 @@ export const secretRotationV2DALFactory = (
const secretRotation = await baseSecretRotationV2Query({ filter, db, tx }).first();
if (secretRotation) {
const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(
secretRotation.projectId,
[secretRotation.folderId],
tx
);
const [folderWithPath] = secretRotation.folderId
? await folderDAL.findSecretPathByFolderIds(secretRotation.projectId, [secretRotation.folderId])
: [];
return expandSecretRotation(secretRotation, folderWithPath);
}
} catch (error) {
@ -441,27 +202,5 @@ export const secretRotationV2DALFactory = (
}
};
const findSecretRotationsToQueue = async (rotateBy: Date, tx?: Knex) => {
const secretRotations = await (tx || db.replicaNode())(TableName.SecretRotationV2)
.where(`${TableName.SecretRotationV2}.isAutoRotationEnabled`, true)
.whereNotNull(`${TableName.SecretRotationV2}.nextRotationAt`)
.andWhereRaw(`"nextRotationAt" <= ?`, [rotateBy])
.select(selectAllTableCols(TableName.SecretRotationV2));
return secretRotations;
};
return {
...secretRotationV2Orm,
find,
create,
findById,
updateById,
deleteById,
findOne,
insertSecretMappings: secretRotationV2SecretMappingOrm.insertMany,
findWithMappedSecrets,
findWithMappedSecretsCount,
findSecretRotationsToQueue
};
return { ...secretRotationV2Orm, find, create, findById, updateById, findOne };
};

Some files were not shown because too many files have changed in this diff Show More