Compare commits

..

1 Commits

Author SHA1 Message Date
Maidul Islam
65b6f61b53 dedicated-instance-deploy 2025-02-25 16:21:20 +09:00
325 changed files with 8800 additions and 14805 deletions

View File

@@ -112,11 +112,4 @@ INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
# azure app connection
INF_APP_CONNECTION_AZURE_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
# datadog
SHOULD_USE_DATADOG_TRACER=
DATADOG_PROFILING_ENABLED=
DATADOG_ENV=
DATADOG_SERVICE=
DATADOG_HOSTNAME=
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=

View File

@@ -32,23 +32,10 @@ jobs:
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Start the server
run: |
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
echo "Examining built image:"
docker image inspect infisical-api | grep -A 5 "Entrypoint"
docker run --name infisical-api -d -p 4000:4000 \
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
-e REDIS_URL=$REDIS_URL \
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
--env-file .env \
infisical-api
echo "Container status right after creation:"
docker ps -a | grep infisical-api
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
@@ -56,48 +43,35 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- uses: actions/setup-go@v5
with:
go-version: "1.21.5"
go-version: '1.21.5'
- name: Wait for container to be stable and check logs
run: |
SECONDS=0
HEALTHY=0
while [ $SECONDS -lt 60 ]; do
# Check if container is running
if docker ps | grep infisical-api; then
# Try to access the API endpoint
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
echo "API endpoint is responding. Container seems healthy."
HEALTHY=1
break
fi
else
echo "Container is not running!"
docker ps -a | grep infisical-api
if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy."
HEALTHY=1
break
fi
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
sleep 5
SECONDS=$((SECONDS+5))
docker logs infisical-api
sleep 2
SECONDS=$((SECONDS+2))
done
if [ $HEALTHY -ne 1 ]; then
echo "Container did not become healthy in time"
echo "Container status:"
docker ps -a | grep infisical-api
echo "Container logs (if any):"
docker logs infisical-api || echo "No logs available"
echo "Container inspection:"
docker inspect infisical-api | grep -A 5 "State"
exit 1
fi
- name: Install openapi-diff
run: go install github.com/oasdiff/oasdiff@latest
run: go install github.com/tufin/oasdiff@latest
- name: Running OpenAPI Spec diff action
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
if: always()
run: |
docker compose -f "docker-compose.dev.yml" down
docker stop infisical-api || true
docker rm infisical-api || true
docker stop infisical-api
docker remove infisical-api

View File

@@ -26,7 +26,7 @@ jobs:
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
env:
working-directory: ./npm
needs:
@@ -83,7 +83,7 @@ jobs:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
@@ -103,12 +103,11 @@ jobs:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: Setup for libssl1.0-dev
- name: libssl1.1 => libssl1.0-dev for OSXCross
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross

View File

@@ -34,10 +34,7 @@ jobs:
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Run unit test
run: npm run test:unit
working-directory: backend
- name: Run integration test
- name: Start integration test
run: npm run test:e2e
working-directory: backend
env:
@@ -47,5 +44,4 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down
docker compose -f "docker-compose.dev.yml" down

View File

@@ -161,9 +161,6 @@ COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ARG INFISICAL_PLATFORM_VERSION
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false

View File

@@ -3,10 +3,13 @@ ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:20-slim AS base
FROM node:20-alpine AS base
FROM base AS frontend-dependencies
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
@@ -42,8 +45,8 @@ RUN npm run build
FROM base AS frontend-runner
WORKDIR /app
RUN groupadd --system --gid 1001 nodejs
RUN useradd --system --uid 1001 --gid nodejs non-root-user
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
@@ -53,23 +56,21 @@ USER non-root-user
## BACKEND
##
FROM base AS backend-build
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
WORKDIR /app
# Install all required dependencies for build
RUN apt-get update && apt-get install -y \
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds-bin \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd --system --gid 1001 nodejs
RUN useradd --system --uid 1001 --gid nodejs non-root-user
freetds-dev
COPY backend/package*.json ./
RUN npm ci --only-production
@@ -85,19 +86,18 @@ FROM base AS backend-runner
WORKDIR /app
# Install all required dependencies for runtime
RUN apt-get update && apt-get install -y \
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds-bin \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev \
&& rm -rf /var/lib/apt/lists/*
freetds-dev
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@@ -109,36 +109,34 @@ RUN mkdir frontend-build
# Production stage
FROM base AS production
RUN apt-get update && apt-get install -y \
ca-certificates \
bash \
curl \
git \
RUN apk add --upgrade --no-cache ca-certificates
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.31.1 && apk add --no-cache git
WORKDIR /
# Install all required runtime dependencies
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds-bin \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev \
wget \
openssh-client \
&& rm -rf /var/lib/apt/lists/*
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /
bash \
curl \
git \
openssh
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Setup user permissions
RUN groupadd --system --gid 1001 nodejs \
&& useradd --system --uid 1001 --gid nodejs non-root-user
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
# Give non-root-user permission to update SSL certs
RUN chown -R non-root-user /etc/ssl/certs
@@ -156,11 +154,11 @@ ENV INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ARG INFISICAL_PLATFORM_VERSION
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
@@ -168,7 +166,6 @@ ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
WORKDIR /backend
ENV TELEMETRY_ENABLED true

View File

@@ -1,22 +1,23 @@
# Build stage
FROM node:20-slim AS build
FROM node:20-alpine AS build
WORKDIR /app
# Required for pkcs11js
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
openssh-client
RUN apk --update add \
python3 \
make \
g++ \
openssh
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds-bin \
freetds-dev \
freetds \
unixodbc-dev \
libc-dev
libc-dev \
freetds-dev
COPY package*.json ./
RUN npm ci --only-production
@@ -25,36 +26,36 @@ COPY . .
RUN npm run build
# Production stage
FROM node:20-slim
FROM node:20-alpine
WORKDIR /app
ENV npm_config_cache /home/node/.npm
COPY package*.json ./
RUN apt-get update && apt-get install -y \
python3 \
make \
g++
RUN apk --update add \
python3 \
make \
g++
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds-bin \
freetds-dev \
freetds \
unixodbc-dev \
libc-dev
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN npm ci --only-production && npm cache clean --force
COPY --from=build /app .
# Install Infisical CLI
RUN apt-get install -y curl bash && \
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.8.1 git
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js

View File

@@ -1,4 +1,4 @@
FROM node:20-slim
FROM node:20-alpine
# ? Setup a test SoftHSM module. In production a real HSM is used.
@@ -7,32 +7,32 @@ ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apt-get update && apt-get install -y \
build-essential \
autoconf \
automake \
git \
libtool \
libssl-dev \
python3 \
make \
g++ \
openssh-client \
curl \
pkg-config
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apk --update add \
alpine-sdk \
autoconf \
automake \
git \
libtool \
openssl-dev \
python3 \
make \
g++ \
openssh
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Build and install SoftHSM2
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}
@@ -45,18 +45,16 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
WORKDIR /root
RUN rm -fr ${SOFTHSM2_SOURCES}
# Install pkcs11-tool
RUN apt-get install -y opensc
# install pkcs11-tool
RUN apk --update add opensc
RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
WORKDIR /app

5253
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -40,7 +40,6 @@
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
"test:unit": "vitest run -c vitest.unit.config.ts",
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
@@ -61,13 +60,6 @@
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
@@ -125,6 +117,7 @@
"vitest": "^1.2.2"
},
"dependencies": {
"@aws-sdk/client-cloudformation": "^3.750.0",
"@aws-sdk/client-elasticache": "^3.637.0",
"@aws-sdk/client-iam": "^3.525.0",
"@aws-sdk/client-kms": "^3.609.0",
@@ -146,18 +139,17 @@
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/auth-app": "^7.1.5",
"@octokit/plugin-retry": "^7.1.4",
"@octokit/rest": "^21.1.1",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
"@opentelemetry/exporter-prometheus": "^0.55.0",
"@opentelemetry/instrumentation": "^0.55.0",
"@opentelemetry/instrumentation-http": "^0.57.2",
"@opentelemetry/resources": "^1.28.0",
"@opentelemetry/sdk-metrics": "^1.28.0",
"@opentelemetry/semantic-conventions": "^1.27.0",
@@ -170,6 +162,7 @@
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-cdk-lib": "^2.180.0",
"aws-sdk": "^2.1553.0",
"axios": "^1.6.7",
"axios-retry": "^4.0.0",
@@ -178,7 +171,6 @@
"cassandra-driver": "^4.7.2",
"connect-redis": "^7.1.1",
"cron": "^3.1.7",
"dd-trace": "^5.40.0",
"dotenv": "^16.4.1",
"fastify": "^4.28.1",
"fastify-plugin": "^4.5.1",
@@ -187,7 +179,6 @@
"handlebars": "^4.7.8",
"hdb": "^0.19.10",
"ioredis": "^5.3.2",
"isomorphic-dompurify": "^2.22.0",
"jmespath": "^0.16.0",
"jsonwebtoken": "^9.0.2",
"jsrp": "^0.2.4",

View File

@@ -13,13 +13,13 @@ import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
import { TKmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service";
import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
import { TDedicatedInstanceServiceFactory } from "@app/ee/services/dedicated-instance/dedicated-instance-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
@@ -229,7 +229,7 @@ declare module "fastify" {
secretSync: TSecretSyncServiceFactory;
kmip: TKmipServiceFactory;
kmipOperation: TKmipOperationServiceFactory;
gateway: TGatewayServiceFactory;
dedicatedInstance: TDedicatedInstanceServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@@ -68,9 +68,6 @@ import {
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
TGateways,
TGatewaysInsert,
TGatewaysUpdate,
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate,
@@ -182,9 +179,6 @@ import {
TOrgBots,
TOrgBotsInsert,
TOrgBotsUpdate,
TOrgGatewayConfig,
TOrgGatewayConfigInsert,
TOrgGatewayConfigUpdate,
TOrgMemberships,
TOrgMembershipsInsert,
TOrgMembershipsUpdate,
@@ -206,9 +200,6 @@ import {
TProjectEnvironments,
TProjectEnvironmentsInsert,
TProjectEnvironmentsUpdate,
TProjectGateways,
TProjectGatewaysInsert,
TProjectGatewaysUpdate,
TProjectKeys,
TProjectKeysInsert,
TProjectKeysUpdate,
@@ -939,16 +930,10 @@ declare module "knex/types/tables" {
TKmipClientCertificatesInsert,
TKmipClientCertificatesUpdate
>;
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
TProjectGateways,
TProjectGatewaysInsert,
TProjectGatewaysUpdate
>;
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
TOrgGatewayConfig,
TOrgGatewayConfigInsert,
TOrgGatewayConfigUpdate
[TableName.DedicatedInstances]: KnexOriginal.CompositeTableType<
TDedicatedInstances,
TDedicatedInstancesInsert,
TDedicatedInstancesUpdate
>;
}
}

View File

@@ -39,7 +39,7 @@ export default {
},
migrations: {
tableName: "infisical_migrations",
loadExtensions: [".mjs", ".ts"]
loadExtensions: [".mjs"]
}
},
production: {
@@ -64,7 +64,7 @@ export default {
},
migrations: {
tableName: "infisical_migrations",
loadExtensions: [".mjs", ".ts"]
loadExtensions: [".mjs"]
}
}
} as Knex.Config;

View File

@@ -0,0 +1,56 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const isTablePresent = await knex.schema.hasTable(TableName.DedicatedInstances);
if (!isTablePresent) {
await knex.schema.createTable(TableName.DedicatedInstances, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable();
t.string("instanceName").notNullable();
t.string("subdomain").notNullable().unique();
t.enum("status", ["RUNNING", "UPGRADING", "PROVISIONING", "FAILED"]).notNullable();
t.string("rdsInstanceType").notNullable();
t.string("elasticCacheType").notNullable();
t.integer("elasticContainerMemory").notNullable();
t.integer("elasticContainerCpu").notNullable();
t.string("region").notNullable();
t.string("version").notNullable();
t.integer("backupRetentionDays").defaultTo(7);
t.timestamp("lastBackupTime").nullable();
t.timestamp("lastUpgradeTime").nullable();
t.boolean("publiclyAccessible").defaultTo(false);
t.string("vpcId").nullable();
t.specificType("subnetIds", "text[]").nullable();
t.jsonb("tags").nullable();
t.boolean("multiAz").defaultTo(true);
t.integer("rdsAllocatedStorage").defaultTo(50);
t.integer("rdsBackupRetentionDays").defaultTo(7);
t.integer("redisNumCacheNodes").defaultTo(1);
t.integer("desiredContainerCount").defaultTo(1);
t.string("stackName").nullable();
t.text("rdsInstanceId").nullable();
t.text("redisClusterId").nullable();
t.text("ecsClusterArn").nullable();
t.text("ecsServiceArn").nullable();
t.specificType("securityGroupIds", "text[]").nullable();
t.text("error").nullable();
t.timestamps(true, true, true);
t.foreign("orgId")
.references("id")
.inTable(TableName.Organization)
.onDelete("CASCADE");
t.unique(["orgId", "instanceName"]);
});
}
await createOnUpdateTrigger(knex, TableName.DedicatedInstances);
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.DedicatedInstances);
await knex.schema.dropTableIfExists(TableName.DedicatedInstances);
}

View File

@@ -0,0 +1,16 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// First drop the existing constraint
await knex.raw(`ALTER TABLE ${TableName.DedicatedInstances} DROP CONSTRAINT IF EXISTS dedicated_instances_status_check`);
// Add the new constraint with updated enum values
await knex.raw(`ALTER TABLE ${TableName.DedicatedInstances} ADD CONSTRAINT dedicated_instances_status_check CHECK (status IN ('RUNNING', 'UPGRADING', 'PROVISIONING', 'FAILED'))`);
}
export async function down(knex: Knex): Promise<void> {
// Revert back to original constraint
await knex.raw(`ALTER TABLE ${TableName.DedicatedInstances} DROP CONSTRAINT IF EXISTS dedicated_instances_status_check`);
await knex.raw(`ALTER TABLE ${TableName.DedicatedInstances} ADD CONSTRAINT dedicated_instances_status_check CHECK (status IN ('RUNNING', 'UPGRADING', 'PROVISIONING'))`);
}

View File

@@ -1,115 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.OrgGatewayConfig))) {
await knex.schema.createTable(TableName.OrgGatewayConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("rootCaKeyAlgorithm").notNullable();
t.datetime("rootCaIssuedAt").notNullable();
t.datetime("rootCaExpiration").notNullable();
t.string("rootCaSerialNumber").notNullable();
t.binary("encryptedRootCaCertificate").notNullable();
t.binary("encryptedRootCaPrivateKey").notNullable();
t.datetime("clientCaIssuedAt").notNullable();
t.datetime("clientCaExpiration").notNullable();
t.string("clientCaSerialNumber");
t.binary("encryptedClientCaCertificate").notNullable();
t.binary("encryptedClientCaPrivateKey").notNullable();
t.string("clientCertSerialNumber").notNullable();
t.string("clientCertKeyAlgorithm").notNullable();
t.datetime("clientCertIssuedAt").notNullable();
t.datetime("clientCertExpiration").notNullable();
t.binary("encryptedClientCertificate").notNullable();
t.binary("encryptedClientPrivateKey").notNullable();
t.datetime("gatewayCaIssuedAt").notNullable();
t.datetime("gatewayCaExpiration").notNullable();
t.string("gatewayCaSerialNumber").notNullable();
t.binary("encryptedGatewayCaCertificate").notNullable();
t.binary("encryptedGatewayCaPrivateKey").notNullable();
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.unique("orgId");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
}
if (!(await knex.schema.hasTable(TableName.Gateway))) {
await knex.schema.createTable(TableName.Gateway, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name").notNullable();
t.string("serialNumber").notNullable();
t.string("keyAlgorithm").notNullable();
t.datetime("issuedAt").notNullable();
t.datetime("expiration").notNullable();
t.datetime("heartbeat");
t.binary("relayAddress").notNullable();
t.uuid("orgGatewayRootCaId").notNullable();
t.foreign("orgGatewayRootCaId").references("id").inTable(TableName.OrgGatewayConfig).onDelete("CASCADE");
t.uuid("identityId").notNullable();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.Gateway);
}
if (!(await knex.schema.hasTable(TableName.ProjectGateway))) {
await knex.schema.createTable(TableName.ProjectGateway, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("gatewayId").notNullable();
t.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ProjectGateway);
}
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId");
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
// not setting a foreign constraint so that cascade effects are not triggered
if (!doesGatewayColExist) {
t.uuid("projectGatewayId");
t.foreign("projectGatewayId").references("id").inTable(TableName.ProjectGateway);
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (doesGatewayColExist) t.dropColumn("projectGatewayId");
});
}
await knex.schema.dropTableIfExists(TableName.ProjectGateway);
await dropOnUpdateTrigger(knex, TableName.ProjectGateway);
await knex.schema.dropTableIfExists(TableName.Gateway);
await dropOnUpdateTrigger(knex, TableName.Gateway);
await knex.schema.dropTableIfExists(TableName.OrgGatewayConfig);
await dropOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
}

View File

@@ -1,25 +0,0 @@
import { Knex } from "knex";
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
if (!hasSharingTypeColumn) {
table.string("type", 32).defaultTo(SecretSharingType.Share).notNullable();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
if (hasSharingTypeColumn) {
table.dropColumn("type");
}
});
}

View File

@@ -1,31 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (!hasAuthConsentContentCol) {
t.text("authConsentContent");
}
if (!hasPageFrameContentCol) {
t.text("pageFrameContent");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (hasAuthConsentContentCol) {
t.dropColumn("authConsentContent");
}
if (hasPageFrameContentCol) {
t.dropColumn("pageFrameContent");
}
});
}

View File

@@ -1,35 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
for await (const tableName of [
TableName.SecretV2,
TableName.SecretVersionV2,
TableName.SecretApprovalRequestSecretV2
]) {
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
if (hasReminderNoteCol) {
await knex.schema.alterTable(tableName, (t) => {
t.string("reminderNote", 1024).alter();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
for await (const tableName of [
TableName.SecretV2,
TableName.SecretVersionV2,
TableName.SecretApprovalRequestSecretV2
]) {
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
if (hasReminderNoteCol) {
await knex.schema.alterTable(tableName, (t) => {
t.string("reminderNote").alter();
});
}
}
}

View File

@@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
if (!hasProjectDescription) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.string("description");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
if (hasProjectDescription) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropColumn("description");
});
}
}

View File

@@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
t.string("comment");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
t.dropColumn("comment");
});
}
}

View File

@@ -1,45 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
if (!hasSecretVersionV2UserActorId) {
t.uuid("userActorId");
t.foreign("userActorId").references("id").inTable(TableName.Users);
}
if (!hasSecretVersionV2IdentityActorId) {
t.uuid("identityActorId");
t.foreign("identityActorId").references("id").inTable(TableName.Identity);
}
if (!hasSecretVersionV2ActorType) {
t.string("actorType");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
if (hasSecretVersionV2UserActorId) {
t.dropColumn("userActorId");
}
if (hasSecretVersionV2IdentityActorId) {
t.dropColumn("identityActorId");
}
if (hasSecretVersionV2ActorType) {
t.dropColumn("actorType");
}
});
}
}

View File

@@ -0,0 +1,34 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const DedicatedInstancesSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
instanceName: z.string(),
status: z.string(),
rdsInstanceType: z.string(),
elasticCacheType: z.string(),
elasticContainerMemory: z.number(),
elasticContainerCpu: z.number(),
region: z.string(),
version: z.string(),
backupRetentionDays: z.number().default(7).nullable().optional(),
lastBackupTime: z.date().nullable().optional(),
lastUpgradeTime: z.date().nullable().optional(),
publiclyAccessible: z.boolean().default(false).nullable().optional(),
vpcId: z.string().nullable().optional(),
subnetIds: z.string().array().nullable().optional(),
tags: z.unknown().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TDedicatedInstances = z.infer<typeof DedicatedInstancesSchema>;
export type TDedicatedInstancesInsert = Omit<z.input<typeof DedicatedInstancesSchema>, TImmutableDBKeys>;
export type TDedicatedInstancesUpdate = Partial<Omit<z.input<typeof DedicatedInstancesSchema>, TImmutableDBKeys>>;

View File

@@ -26,8 +26,7 @@ export const DynamicSecretsSchema = z.object({
statusDetails: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
encryptedInput: zodBuffer,
projectGatewayId: z.string().uuid().nullable().optional()
encryptedInput: zodBuffer
});
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;

View File

@@ -1,29 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const GatewaysSchema = z.object({
id: z.string().uuid(),
name: z.string(),
serialNumber: z.string(),
keyAlgorithm: z.string(),
issuedAt: z.date(),
expiration: z.date(),
heartbeat: z.date().nullable().optional(),
relayAddress: zodBuffer,
orgGatewayRootCaId: z.string().uuid(),
identityId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TGateways = z.infer<typeof GatewaysSchema>;
export type TGatewaysInsert = Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>;
export type TGatewaysUpdate = Partial<Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>>;

View File

@@ -20,7 +20,6 @@ export * from "./certificates";
export * from "./dynamic-secret-leases";
export * from "./dynamic-secrets";
export * from "./external-kms";
export * from "./gateways";
export * from "./git-app-install-sessions";
export * from "./git-app-org";
export * from "./group-project-membership-roles";
@@ -58,7 +57,6 @@ export * from "./ldap-group-maps";
export * from "./models";
export * from "./oidc-configs";
export * from "./org-bots";
export * from "./org-gateway-config";
export * from "./org-memberships";
export * from "./org-roles";
export * from "./organizations";
@@ -67,7 +65,6 @@ export * from "./pki-collection-items";
export * from "./pki-collections";
export * from "./project-bots";
export * from "./project-environments";
export * from "./project-gateways";
export * from "./project-keys";
export * from "./project-memberships";
export * from "./project-roles";

View File

@@ -2,6 +2,7 @@ import { z } from "zod";
export enum TableName {
Users = "users",
Organization = "organizations",
SshCertificateAuthority = "ssh_certificate_authorities",
SshCertificateAuthoritySecret = "ssh_certificate_authority_secrets",
SshCertificateTemplate = "ssh_certificate_templates",
@@ -29,7 +30,6 @@ export enum TableName {
AuthTokens = "auth_tokens",
AuthTokenSession = "auth_token_sessions",
BackupPrivateKey = "backup_private_key",
Organization = "organizations",
OrgMembership = "org_memberships",
OrgRoles = "org_roles",
OrgBot = "org_bots",
@@ -113,10 +113,6 @@ export enum TableName {
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
ProjectSplitBackfillIds = "project_split_backfill_ids",
// Gateway
OrgGatewayConfig = "org_gateway_config",
Gateway = "gateways",
ProjectGateway = "project_gateways",
// junction tables with tags
SecretV2JnTag = "secret_v2_tag_junction",
JnSecretTag = "secret_tag_junction",
@@ -140,7 +136,8 @@ export enum TableName {
KmipClient = "kmip_clients",
KmipOrgConfig = "kmip_org_configs",
KmipOrgServerCertificates = "kmip_org_server_certificates",
KmipClientCertificates = "kmip_client_certificates"
KmipClientCertificates = "kmip_client_certificates",
DedicatedInstances = "dedicated_instances"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";

View File

@@ -1,43 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const OrgGatewayConfigSchema = z.object({
id: z.string().uuid(),
rootCaKeyAlgorithm: z.string(),
rootCaIssuedAt: z.date(),
rootCaExpiration: z.date(),
rootCaSerialNumber: z.string(),
encryptedRootCaCertificate: zodBuffer,
encryptedRootCaPrivateKey: zodBuffer,
clientCaIssuedAt: z.date(),
clientCaExpiration: z.date(),
clientCaSerialNumber: z.string().nullable().optional(),
encryptedClientCaCertificate: zodBuffer,
encryptedClientCaPrivateKey: zodBuffer,
clientCertSerialNumber: z.string(),
clientCertKeyAlgorithm: z.string(),
clientCertIssuedAt: z.date(),
clientCertExpiration: z.date(),
encryptedClientCertificate: zodBuffer,
encryptedClientPrivateKey: zodBuffer,
gatewayCaIssuedAt: z.date(),
gatewayCaExpiration: z.date(),
gatewayCaSerialNumber: z.string(),
encryptedGatewayCaCertificate: zodBuffer,
encryptedGatewayCaPrivateKey: zodBuffer,
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TOrgGatewayConfig = z.infer<typeof OrgGatewayConfigSchema>;
export type TOrgGatewayConfigInsert = Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>;
export type TOrgGatewayConfigUpdate = Partial<Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>>;

View File

@@ -1,20 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectGatewaysSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
gatewayId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TProjectGateways = z.infer<typeof ProjectGatewaysSchema>;
export type TProjectGatewaysInsert = Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>;
export type TProjectGatewaysUpdate = Partial<Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>>;

View File

@@ -13,8 +13,7 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
requestId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
reviewerUserId: z.string().uuid(),
comment: z.string().nullable().optional()
reviewerUserId: z.string().uuid()
});
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;

View File

@@ -15,8 +15,7 @@ export const SecretFoldersSchema = z.object({
updatedAt: z.date(),
envId: z.string().uuid(),
parentId: z.string().uuid().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional(),
description: z.string().nullable().optional()
isReserved: z.boolean().default(false).nullable().optional()
});
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;

View File

@@ -12,7 +12,6 @@ import { TImmutableDBKeys } from "./models";
export const SecretSharingSchema = z.object({
id: z.string().uuid(),
encryptedValue: z.string().nullable().optional(),
type: z.string(),
iv: z.string().nullable().optional(),
tag: z.string().nullable().optional(),
hashedHex: z.string().nullable().optional(),

View File

@@ -25,10 +25,7 @@ export const SecretVersionsV2Schema = z.object({
folderId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
userActorId: z.string().uuid().nullable().optional(),
identityActorId: z.string().uuid().nullable().optional(),
actorType: z.string().nullable().optional()
updatedAt: z.date()
});
export type TSecretVersionsV2 = z.infer<typeof SecretVersionsV2Schema>;

View File

@@ -23,9 +23,7 @@ export const SuperAdminSchema = z.object({
defaultAuthOrgId: z.string().uuid().nullable().optional(),
enabledLoginMethods: z.string().array().nullable().optional(),
encryptedSlackClientId: zodBuffer.nullable().optional(),
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
authConsentContent: z.string().nullable().optional(),
pageFrameContent: z.string().nullable().optional()
encryptedSlackClientSecret: zodBuffer.nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

View File

@@ -0,0 +1 @@

View File

@@ -0,0 +1,141 @@
import { z } from "zod";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const DedicatedInstanceSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
instanceName: z.string().min(1),
subdomain: z.string().min(1),
status: z.enum(["RUNNING", "UPGRADING", "PROVISIONING", "FAILED"]),
rdsInstanceType: z.string(),
elasticCacheType: z.string(),
elasticContainerMemory: z.number(),
elasticContainerCpu: z.number(),
region: z.string(),
version: z.string(),
backupRetentionDays: z.number(),
lastBackupTime: z.date().nullable(),
lastUpgradeTime: z.date().nullable(),
publiclyAccessible: z.boolean(),
vpcId: z.string().nullable(),
subnetIds: z.array(z.string()).nullable(),
tags: z.record(z.string()).nullable(),
createdAt: z.date(),
updatedAt: z.date()
});
const CreateDedicatedInstanceSchema = z.object({
instanceName: z.string().min(1),
subdomain: z.string().min(1),
provider: z.literal('aws'), // Only allow 'aws' as provider
region: z.string(),
publiclyAccessible: z.boolean().default(false)
});
const DedicatedInstanceDetailsSchema = DedicatedInstanceSchema.extend({
stackStatus: z.string().optional(),
stackStatusReason: z.string().optional(),
error: z.string().nullable(),
events: z.array(
z.object({
timestamp: z.date().optional(),
logicalResourceId: z.string().optional(),
resourceType: z.string().optional(),
resourceStatus: z.string().optional(),
resourceStatusReason: z.string().optional()
})
).optional()
});
export const registerDedicatedInstanceRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:organizationId/dedicated-instances",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
organizationId: z.string().uuid()
}),
response: {
200: z.object({
instances: DedicatedInstanceSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const instances = await server.services.dedicatedInstance.listInstances({
orgId: req.params.organizationId
});
return { instances };
}
});
server.route({
method: "POST",
url: "/:organizationId/dedicated-instances",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
organizationId: z.string().uuid()
}),
body: CreateDedicatedInstanceSchema
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { organizationId } = req.params;
const { instanceName, subdomain, region, publiclyAccessible, provider} = req.body;
const instance = await server.services.dedicatedInstance.createInstance({
orgId: organizationId,
instanceName,
subdomain,
region,
publiclyAccessible,
provider: provider,
dryRun: false,
});
return instance;
}
});
server.route({
method: "GET",
url: "/:organizationId/dedicated-instances/:instanceId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
organizationId: z.string().uuid(),
instanceId: z.string().uuid()
}),
response: {
200: DedicatedInstanceDetailsSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { organizationId, instanceId } = req.params;
const { instance, stackStatus, stackStatusReason, events } = await server.services.dedicatedInstance.getInstance({
orgId: organizationId,
instanceId
});
return {
...instance,
stackStatus,
stackStatusReason,
events
};
}
});
};

View File

@@ -1,265 +0,0 @@
import { z } from "zod";
import { GatewaysSchema } from "@app/db/schemas";
import { isValidIp } from "@app/lib/ip";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedGatewaySchema = GatewaysSchema.pick({
id: true,
identityId: true,
name: true,
createdAt: true,
updatedAt: true,
issuedAt: true,
serialNumber: true,
heartbeat: true
});
const isValidRelayAddress = (relayAddress: string) => {
const [ip, port] = relayAddress.split(":");
return isValidIp(ip) && Number(port) <= 65535 && Number(port) >= 40000;
};
export const registerGatewayRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/register-identity",
config: {
rateLimit: writeLimit
},
schema: {
response: {
200: z.object({
turnServerUsername: z.string(),
turnServerPassword: z.string(),
turnServerRealm: z.string(),
turnServerAddress: z.string(),
infisicalStaticIp: z.string().optional()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const relayDetails = await server.services.gateway.getGatewayRelayDetails(
req.permission.id,
req.permission.orgId,
req.permission.authMethod
);
return relayDetails;
}
});
server.route({
method: "POST",
url: "/exchange-cert",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
relayAddress: z.string().refine(isValidRelayAddress, { message: "Invalid relay address" })
}),
response: {
200: z.object({
serialNumber: z.string(),
privateKey: z.string(),
certificate: z.string(),
certificateChain: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const gatewayCertificates = await server.services.gateway.exchangeAllocatedRelayAddress({
identityOrg: req.permission.orgId,
identityId: req.permission.id,
relayAddress: req.body.relayAddress,
identityOrgAuthMethod: req.permission.authMethod
});
return gatewayCertificates;
}
});
server.route({
method: "POST",
url: "/heartbeat",
config: {
rateLimit: writeLimit
},
schema: {
response: {
200: z.object({
message: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
await server.services.gateway.heartbeat({
orgPermission: req.permission
});
return { message: "Successfully registered heartbeat" };
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
projectId: z.string().optional()
}),
response: {
200: z.object({
gateways: SanitizedGatewaySchema.extend({
identity: z.object({
name: z.string(),
id: z.string()
}),
projects: z
.object({
name: z.string(),
id: z.string(),
slug: z.string()
})
.array()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateways = await server.services.gateway.listGateways({
orgPermission: req.permission
});
return { gateways };
}
});
server.route({
method: "GET",
url: "/projects/:projectId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
projectId: z.string()
}),
response: {
200: z.object({
gateways: SanitizedGatewaySchema.extend({
identity: z.object({
name: z.string(),
id: z.string()
}),
projectGatewayId: z.string()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateways = await server.services.gateway.getProjectGateways({
projectId: req.params.projectId,
projectPermission: req.permission
});
return { gateways };
}
});
server.route({
method: "GET",
url: "/:id",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: z.object({
gateway: SanitizedGatewaySchema.extend({
identity: z.object({
name: z.string(),
id: z.string()
})
})
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateway = await server.services.gateway.getGatewayById({
orgPermission: req.permission,
id: req.params.id
});
return { gateway };
}
});
server.route({
method: "PATCH",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
body: z.object({
name: slugSchema({ field: "name" }).optional(),
projectIds: z.string().array().optional()
}),
response: {
200: z.object({
gateway: SanitizedGatewaySchema
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateway = await server.services.gateway.updateGatewayById({
orgPermission: req.permission,
id: req.params.id,
name: req.body.name,
projectIds: req.body.projectIds
});
return { gateway };
}
});
server.route({
method: "DELETE",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: z.object({
gateway: SanitizedGatewaySchema
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateway = await server.services.gateway.deleteGatewayById({
orgPermission: req.permission,
id: req.params.id
});
return { gateway };
}
});
};

View File

@@ -4,10 +4,10 @@ import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-rou
import { registerAccessApprovalRequestRouter } from "./access-approval-request-router";
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
import { registerCaCrlRouter } from "./certificate-authority-crl-router";
import { registerDedicatedInstanceRouter } from "./dedicated-instance-router";
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerExternalKmsRouter } from "./external-kms-router";
import { registerGatewayRouter } from "./gateway-router";
import { registerGroupRouter } from "./group-router";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerKmipRouter } from "./kmip-router";
@@ -39,6 +39,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
// org role starts with organization
await server.register(registerOrgRoleRouter, { prefix: "/organization" });
await server.register(registerLicenseRouter, { prefix: "/organizations" });
await server.register(registerDedicatedInstanceRouter, { prefix: "/organizations" });
await server.register(
async (projectRouter) => {
await projectRouter.register(registerProjectRoleRouter);
@@ -68,8 +69,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/dynamic-secrets" }
);
await server.register(registerGatewayRouter, { prefix: "/gateways" });
await server.register(
async (pkiRouter) => {
await pkiRouter.register(registerCaCrlRouter, { prefix: "/crl" });

View File

@@ -159,8 +159,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
id: z.string()
}),
body: z.object({
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED]),
comment: z.string().optional()
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED])
}),
response: {
200: z.object({
@@ -176,25 +175,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
approvalId: req.params.id,
status: req.body.status,
comment: req.body.comment
status: req.body.status
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: review.projectId,
event: {
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW,
metadata: {
secretApprovalRequestId: review.requestId,
reviewedBy: review.reviewerUserId,
status: review.status as ApprovalStatus,
comment: review.comment || ""
}
}
});
return { review };
}
});
@@ -253,6 +235,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
const tagSchema = SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
})
.array()
@@ -285,7 +268,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),
committerUser: approvalRequestUser,
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
secretPath: z.string(),
commits: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true, version: true })

View File

@@ -35,6 +35,7 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
}).array()
})

View File

@@ -22,7 +22,6 @@ import {
} from "@app/services/secret-sync/secret-sync-types";
import { KmipPermission } from "../kmip/kmip-enum";
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
export type TListProjectAuditLogDTO = {
filter: {
@@ -166,7 +165,6 @@ export enum EventType {
SECRET_APPROVAL_REQUEST = "secret-approval-request",
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review",
SIGN_SSH_KEY = "sign-ssh-key",
ISSUE_SSH_CREDS = "issue-ssh-creds",
CREATE_SSH_CA = "create-ssh-certificate-authority",
@@ -252,7 +250,6 @@ export enum EventType {
UPDATE_APP_CONNECTION = "update-app-connection",
DELETE_APP_CONNECTION = "delete-app-connection",
CREATE_SHARED_SECRET = "create-shared-secret",
CREATE_SECRET_REQUEST = "create-secret-request",
DELETE_SHARED_SECRET = "delete-shared-secret",
READ_SHARED_SECRET = "read-shared-secret",
GET_SECRET_SYNCS = "get-secret-syncs",
@@ -1144,7 +1141,6 @@ interface CreateFolderEvent {
folderId: string;
folderName: string;
folderPath: string;
description?: string;
};
}
@@ -1316,16 +1312,6 @@ interface SecretApprovalRequest {
};
}
interface SecretApprovalRequestReview {
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW;
metadata: {
secretApprovalRequestId: string;
reviewedBy: string;
status: ApprovalStatus;
comment: string;
};
}
interface SignSshKey {
type: EventType.SIGN_SSH_KEY;
metadata: {
@@ -2034,15 +2020,6 @@ interface CreateSharedSecretEvent {
};
}
interface CreateSecretRequestEvent {
type: EventType.CREATE_SECRET_REQUEST;
metadata: {
id: string;
accessType: string;
name?: string;
};
}
interface DeleteSharedSecretEvent {
type: EventType.DELETE_SHARED_SECRET;
metadata: {
@@ -2493,6 +2470,4 @@ export type Event =
| KmipOperationActivateEvent
| KmipOperationRevokeEvent
| KmipOperationLocateEvent
| KmipOperationRegisterEvent
| CreateSecretRequestEvent
| SecretApprovalRequestReview;
| KmipOperationRegisterEvent;

View File

@@ -0,0 +1,28 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify } from "@app/lib/knex";
export type TDedicatedInstanceDALFactory = ReturnType<typeof dedicatedInstanceDALFactory>;
export const dedicatedInstanceDALFactory = (db: TDbClient) => {
const dedicatedInstanceOrm = ormify(db, TableName.DedicatedInstances);
const findInstancesByOrgId = async (orgId: string, tx?: Knex) => {
try {
const instances = await (tx || db.replicaNode())(TableName.DedicatedInstances)
.where({ orgId })
.select("*");
return instances;
} catch (error) {
throw new DatabaseError({ error, name: "Find instances by org ID" });
}
};
return {
...dedicatedInstanceOrm,
findInstancesByOrgId
};
};

View File

@@ -0,0 +1,470 @@
import { ForbiddenError } from "@casl/ability";
import * as cdk from 'aws-cdk-lib';
import * as ec2 from 'aws-cdk-lib/aws-ec2';
import * as rds from 'aws-cdk-lib/aws-rds';
import * as elasticache from 'aws-cdk-lib/aws-elasticache';
import * as ecs from 'aws-cdk-lib/aws-ecs';
import * as ssm from 'aws-cdk-lib/aws-ssm';
import * as iam from 'aws-cdk-lib/aws-iam';
import { randomBytes } from 'crypto';
import { CloudFormationClient, CreateStackCommand, DescribeStacksCommand, DescribeStackEventsCommand } from "@aws-sdk/client-cloudformation";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { TDedicatedInstanceDALFactory } from "./dedicated-instance-dal";
type TDedicatedInstanceServiceFactoryDep = {
dedicatedInstanceDAL: TDedicatedInstanceDALFactory;
permissionService: TPermissionServiceFactory;
};
interface CreateInstanceParams {
orgId: string;
instanceName: string;
subdomain: string;
region: string;
provider: 'aws';
publiclyAccessible: boolean;
clusterSize: 'small' | 'medium' | 'large';
dryRun?: boolean;
}
interface GetInstanceParams {
orgId: string;
instanceId: string;
}
interface StackResource {
resourceType: string;
resourceStatus: string;
resourceStatusReason?: string;
}
interface StackEvent {
timestamp?: Date;
logicalResourceId?: string;
resourceType?: string;
resourceStatus?: string;
resourceStatusReason?: string;
}
interface InstanceDetails {
instance: Awaited<ReturnType<TDedicatedInstanceDALFactory["findById"]>>;
stackStatus?: string;
stackStatusReason?: string;
resources?: StackResource[];
events?: StackEvent[];
}
export type TDedicatedInstanceServiceFactory = ReturnType<typeof dedicatedInstanceServiceFactory>;
const CLUSTER_SIZES = {
small: {
containerCpu: 1024, // 1 vCPU
containerMemory: 2048, // 2GB
rdsInstanceType: 'db.t3.small',
elasticCacheType: 'cache.t3.micro',
desiredContainerCount: 1,
displayName: '1 vCPU, 2GB RAM'
},
medium: {
containerCpu: 2048, // 2 vCPU
containerMemory: 4096, // 4GB
rdsInstanceType: 'db.t3.medium',
elasticCacheType: 'cache.t3.small',
desiredContainerCount: 2,
displayName: '2 vCPU, 4GB RAM'
},
large: {
containerCpu: 4096, // 4 vCPU
containerMemory: 8192, // 8GB
rdsInstanceType: 'db.t3.large',
elasticCacheType: 'cache.t3.medium',
desiredContainerCount: 4,
displayName: '4 vCPU, 8GB RAM'
}
};
export const dedicatedInstanceServiceFactory = ({
dedicatedInstanceDAL,
permissionService
}: TDedicatedInstanceServiceFactoryDep) => {
const listInstances = async ({
orgId
}: {
orgId: string;
}) => {
const instances = await dedicatedInstanceDAL.findInstancesByOrgId(orgId);
return instances;
};
const createInstance = async (params: CreateInstanceParams) => {
const { orgId, instanceName, subdomain, region, publiclyAccessible, dryRun = false, clusterSize = 'small' } = params;
if (params.provider !== 'aws') {
throw new BadRequestError({ message: 'Only AWS provider is supported' });
}
const clusterConfig = CLUSTER_SIZES[clusterSize];
// Configure AWS SDK with environment variables
const awsConfig = {
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID || '',
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || '',
},
region: region,
};
if (!awsConfig.credentials.accessKeyId || !awsConfig.credentials.secretAccessKey) {
throw new Error('AWS credentials not found in environment variables');
}
const internalTags = {
'managed-by': 'infisical',
'organization-id': orgId,
'instance-name': instanceName
};
// Create the instance record with expanded configuration
const instance = await dedicatedInstanceDAL.create({
orgId,
instanceName,
subdomain,
status: "PROVISIONING",
region,
rdsInstanceType: clusterConfig.rdsInstanceType,
elasticCacheType: clusterConfig.elasticCacheType,
elasticContainerMemory: clusterConfig.containerMemory,
elasticContainerCpu: clusterConfig.containerCpu,
publiclyAccessible,
tags: internalTags,
version: "1.0.0",
multiAz: true,
rdsAllocatedStorage: 50,
rdsBackupRetentionDays: 7,
redisNumCacheNodes: 1,
desiredContainerCount: clusterConfig.desiredContainerCount,
subnetIds: [],
securityGroupIds: []
});
// Generate unique names for resources
const stackName = `infisical-dedicated-${instance.id}`;
const dbPassword = randomBytes(32).toString('hex');
// Create CDK app and stack
const app = new cdk.App();
const stack = new cdk.Stack(app, stackName, {
env: { region },
tags: internalTags,
synthesizer: new cdk.DefaultStackSynthesizer({
generateBootstrapVersionRule: false,
})
});
// Create VPC
const vpc = new ec2.Vpc(stack, `${orgId}-${instanceName}-vpc`, {
maxAzs: 2,
natGateways: 1,
subnetConfiguration: [
{
name: 'Public',
subnetType: ec2.SubnetType.PUBLIC,
cidrMask: 24,
},
{
name: 'Private',
subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS,
cidrMask: 24,
},
],
});
// Create RDS instance
const dbSecurityGroup = new ec2.SecurityGroup(stack, `${orgId}-${instanceName}-db-sg`, {
vpc,
description: `Security group for ${instanceName} RDS instance`,
});
const db = new rds.DatabaseInstance(stack, `${orgId}-${instanceName}-db`, {
vpc,
vpcSubnets: { subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS },
engine: rds.DatabaseInstanceEngine.postgres({
version: rds.PostgresEngineVersion.VER_14,
}),
instanceType: ec2.InstanceType.of(ec2.InstanceClass.T3, ec2.InstanceSize.MEDIUM),
securityGroups: [dbSecurityGroup],
credentials: rds.Credentials.fromPassword(
'postgres',
cdk.SecretValue.unsafePlainText(dbPassword)
),
multiAz: true,
allocatedStorage: 50,
backupRetention: cdk.Duration.days(7),
});
// Create Redis cluster
const redisSecurityGroup = new ec2.SecurityGroup(stack, `${orgId}-${instanceName}-redis-sg`, {
vpc,
description: `Security group for ${instanceName} Redis cluster`,
});
const redisSubnetGroup = new elasticache.CfnSubnetGroup(stack, `${orgId}-${instanceName}-redis-subnet`, {
subnetIds: vpc.privateSubnets.map((subnet: ec2.ISubnet) => subnet.subnetId),
description: `Subnet group for ${instanceName} Redis cluster`,
});
const redis = new elasticache.CfnCacheCluster(stack, `${orgId}-${instanceName}-redis`, {
engine: 'redis',
cacheNodeType: 'cache.t3.micro',
numCacheNodes: 1,
vpcSecurityGroupIds: [redisSecurityGroup.securityGroupId],
cacheSubnetGroupName: redisSubnetGroup.ref,
});
// Create ECS Fargate cluster and service
const cluster = new ecs.Cluster(stack, `${orgId}-${instanceName}-cluster`, { vpc });
// Create task execution role with permissions to read from Parameter Store
const executionRole = new iam.Role(stack, `${orgId}-${instanceName}-execution-role`, {
assumedBy: new iam.ServicePrincipal('ecs-tasks.amazonaws.com'),
});
executionRole.addManagedPolicy(
iam.ManagedPolicy.fromAwsManagedPolicyName('service-role/AmazonECSTaskExecutionRolePolicy')
);
// Create ECS task definition and service
const taskDefinition = new ecs.FargateTaskDefinition(stack, `${orgId}-${instanceName}-task-def`, {
memoryLimitMiB: clusterConfig.containerMemory,
cpu: clusterConfig.containerCpu,
executionRole,
});
taskDefinition.addContainer('infisical', {
image: ecs.ContainerImage.fromRegistry('infisical/infisical:latest-postgres'),
environment: {
NODE_ENV: 'production',
ENCRYPTION_KEY: randomBytes(16).toString('hex'),
AUTH_SECRET: randomBytes(32).toString('base64'),
DB_CONNECTION_URI: `postgresql://postgres:${dbPassword}@${db.instanceEndpoint.hostname}:5432/postgres?sslmode=no-verify`,
REDIS_URL: `redis://${redis.attrRedisEndpointAddress}:${redis.attrRedisEndpointPort}`,
},
logging: ecs.LogDrivers.awsLogs({ streamPrefix: stackName }),
});
const service = new ecs.FargateService(stack, `${orgId}-${instanceName}-service`, {
cluster,
taskDefinition,
desiredCount: clusterConfig.desiredContainerCount,
assignPublicIp: publiclyAccessible,
vpcSubnets: {
subnetType: publiclyAccessible ? ec2.SubnetType.PUBLIC : ec2.SubnetType.PRIVATE_WITH_EGRESS,
},
});
// Create security group for ECS service
const ecsSecurityGroup = new ec2.SecurityGroup(stack, `${orgId}-${instanceName}-ecs-sg`, {
vpc,
description: `Security group for ${instanceName} ECS service`,
allowAllOutbound: true,
});
// Update service to use the security group
service.connections.addSecurityGroup(ecsSecurityGroup);
// Configure security group rules for RDS access
dbSecurityGroup.addIngressRule(
ecsSecurityGroup,
ec2.Port.tcp(5432),
'Allow ECS tasks to connect to PostgreSQL'
);
// Configure security group rules for Redis access
redisSecurityGroup.addIngressRule(
ecsSecurityGroup,
ec2.Port.tcp(6379),
'Allow ECS tasks to connect to Redis'
);
// Add outputs for resource IDs
new cdk.CfnOutput(stack, 'vpcid', { value: vpc.vpcId });
new cdk.CfnOutput(stack, 'publicsubnet1id', { value: vpc.publicSubnets[0].subnetId });
new cdk.CfnOutput(stack, 'publicsubnet2id', { value: vpc.publicSubnets[1].subnetId });
new cdk.CfnOutput(stack, 'privatesubnet1id', { value: vpc.privateSubnets[0].subnetId });
new cdk.CfnOutput(stack, 'privatesubnet2id', { value: vpc.privateSubnets[1].subnetId });
new cdk.CfnOutput(stack, 'rdsinstanceid', { value: db.instanceIdentifier });
new cdk.CfnOutput(stack, 'redisclusterid', { value: redis.ref });
new cdk.CfnOutput(stack, 'ecsclusterarn', { value: cluster.clusterArn });
new cdk.CfnOutput(stack, 'ecsservicearn', { value: service.serviceArn });
new cdk.CfnOutput(stack, 'dbsecuritygroupid', { value: dbSecurityGroup.securityGroupId });
new cdk.CfnOutput(stack, 'redissecuritygroupid', { value: redisSecurityGroup.securityGroupId });
// After VPC creation, store subnet IDs
const subnetIds = [
...vpc.publicSubnets.map(subnet => subnet.subnetId),
...vpc.privateSubnets.map(subnet => subnet.subnetId)
];
// After security group creation, store security group IDs
const securityGroupIds = [
dbSecurityGroup.securityGroupId,
redisSecurityGroup.securityGroupId
];
// Update instance with all infrastructure details
await dedicatedInstanceDAL.updateById(instance.id, {
stackName,
status: "PROVISIONING",
// Remove the token values and update them after stack creation
rdsInstanceId: null,
redisClusterId: null,
ecsClusterArn: null,
ecsServiceArn: null,
vpcId: null,
subnetIds: null,
securityGroupIds: null
});
// Deploy the stack
const deployment = app.synth();
if (dryRun) {
console.log('Dry run - would create stack with template:', JSON.stringify(deployment.getStackArtifact(stackName).template, null, 2));
return instance;
}
// Deploy the CloudFormation stack
try {
const cfnClient = new CloudFormationClient(awsConfig);
const command = new CreateStackCommand({
StackName: stackName,
TemplateBody: JSON.stringify(deployment.getStackArtifact(stackName).template),
Capabilities: ["CAPABILITY_IAM", "CAPABILITY_NAMED_IAM"],
Tags: Object.entries(internalTags).map(([Key, Value]) => ({ Key, Value }))
});
await cfnClient.send(command);
} catch (error) {
await dedicatedInstanceDAL.updateById(instance.id, {
status: "FAILED",
error: error instanceof Error ? error.message : 'Unknown error occurred'
});
throw error;
}
return instance;
};
const getInstance = async ({ orgId, instanceId }: GetInstanceParams): Promise<InstanceDetails> => {
const instance = await dedicatedInstanceDAL.findById(instanceId);
if (!instance) {
throw new NotFoundError({ message: "Instance not found" });
}
if (instance.orgId !== orgId) {
throw new BadRequestError({ message: "Not authorized to access this instance" });
}
// Get CloudFormation stack status
try {
const awsConfig = {
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID || '',
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || '',
},
region: instance.region,
};
if (!awsConfig.credentials.accessKeyId || !awsConfig.credentials.secretAccessKey) {
throw new Error('AWS credentials not found in environment variables');
}
const cfnClient = new CloudFormationClient(awsConfig);
// Get stack status
const stackResponse = await cfnClient.send(new DescribeStacksCommand({
StackName: instance.stackName
}));
const stack = stackResponse.Stacks?.[0];
if (!stack) {
return { instance };
}
// Get stack events for progress tracking
const eventsResponse = await cfnClient.send(new DescribeStackEventsCommand({
StackName: instance.stackName
}));
const events = eventsResponse.StackEvents?.map(event => ({
timestamp: event.Timestamp,
logicalResourceId: event.LogicalResourceId,
resourceType: event.ResourceType,
resourceStatus: event.ResourceStatus,
resourceStatusReason: event.ResourceStatusReason
})).sort((a, b) => (b.timestamp?.getTime() || 0) - (a.timestamp?.getTime() || 0)) || [];
const stackStatus = stack.StackStatus || '';
let updates: Record<string, any> = {};
// Process outputs when stack is complete
if (stackStatus === 'CREATE_COMPLETE') {
const outputs = stack.Outputs || [];
const getOutput = (key: string) => outputs.find(o => o.OutputKey?.toLowerCase() === key.toLowerCase())?.OutputValue;
updates = {
status: 'RUNNING',
vpcId: getOutput('vpcid'),
subnetIds: [
getOutput('publicsubnet1id'),
getOutput('publicsubnet2id'),
getOutput('privatesubnet1id'),
getOutput('privatesubnet2id')
].filter(Boolean) as string[],
rdsInstanceId: getOutput('rdsinstanceid'),
redisClusterId: getOutput('redisclusterid'),
ecsClusterArn: getOutput('ecsclusterarn'),
ecsServiceArn: getOutput('ecsservicearn'),
securityGroupIds: [
getOutput('dbsecuritygroupid'),
getOutput('redissecuritygroupid')
].filter(Boolean) as string[]
};
} else if (stackStatus.includes('FAILED')) {
updates = {
status: 'FAILED',
error: stack.StackStatusReason
};
}
// Update instance if we have changes
if (Object.keys(updates).length > 0) {
await dedicatedInstanceDAL.updateById(instance.id, updates);
}
return {
instance: {
...instance,
...updates
},
stackStatus: stack.StackStatus,
stackStatusReason: stack.StackStatusReason,
events
};
} catch (error) {
// Log the error but don't throw - we still want to return the instance details
console.error('Error fetching CloudFormation stack status:', error);
}
return { instance };
};
return {
listInstances,
createInstance,
getInstance
};
};

View File

@@ -1,31 +1,20 @@
import crypto from "node:crypto";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { getDbConnectionHost } from "@app/lib/knex";
export const verifyHostInputValidity = (host: string, isGateway = false) => {
export const verifyHostInputValidity = (host: string) => {
const appCfg = getConfig();
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
// no need for validation when it's dev
if (appCfg.NODE_ENV === "development") return;
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
if (
appCfg.isCloud &&
!isGateway &&
// localhost
// internal ips
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
if (
host === "localhost" ||
host === "127.0.0.1" ||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
) {
if (host === "localhost" || host === "127.0.0.1" || dbHost === host) {
throw new BadRequestError({ message: "Invalid db host" });
}
};

View File

@@ -16,7 +16,6 @@ import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-fold
import { TDynamicSecretLeaseDALFactory } from "../dynamic-secret-lease/dynamic-secret-lease-dal";
import { TDynamicSecretLeaseQueueServiceFactory } from "../dynamic-secret-lease/dynamic-secret-lease-queue";
import { TProjectGatewayDALFactory } from "../gateway/project-gateway-dal";
import { TDynamicSecretDALFactory } from "./dynamic-secret-dal";
import {
DynamicSecretStatus,
@@ -45,7 +44,6 @@ type TDynamicSecretServiceFactoryDep = {
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
projectGatewayDAL: Pick<TProjectGatewayDALFactory, "findOne">;
};
export type TDynamicSecretServiceFactory = ReturnType<typeof dynamicSecretServiceFactory>;
@@ -59,8 +57,7 @@ export const dynamicSecretServiceFactory = ({
permissionService,
dynamicSecretQueueService,
projectDAL,
kmsService,
projectGatewayDAL
kmsService
}: TDynamicSecretServiceFactoryDep) => {
const create = async ({
path,
@@ -111,18 +108,6 @@ export const dynamicSecretServiceFactory = ({
const selectedProvider = dynamicSecretProviders[provider.type];
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
let selectedGatewayId: string | null = null;
if (inputs && typeof inputs === "object" && "projectGatewayId" in inputs && inputs.projectGatewayId) {
const projectGatewayId = inputs.projectGatewayId as string;
const projectGateway = await projectGatewayDAL.findOne({ id: projectGatewayId, projectId });
if (!projectGateway)
throw new NotFoundError({
message: `Project gateway with ${projectGatewayId} not found`
});
selectedGatewayId = projectGateway.id;
}
const isConnected = await selectedProvider.validateConnection(provider.inputs);
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
@@ -138,8 +123,7 @@ export const dynamicSecretServiceFactory = ({
maxTTL,
defaultTTL,
folderId: folder.id,
name,
projectGatewayId: selectedGatewayId
name
});
return dynamicSecretCfg;
};
@@ -211,23 +195,6 @@ export const dynamicSecretServiceFactory = ({
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
let selectedGatewayId: string | null = null;
if (
updatedInput &&
typeof updatedInput === "object" &&
"projectGatewayId" in updatedInput &&
updatedInput?.projectGatewayId
) {
const projectGatewayId = updatedInput.projectGatewayId as string;
const projectGateway = await projectGatewayDAL.findOne({ id: projectGatewayId, projectId });
if (!projectGateway)
throw new NotFoundError({
message: `Project gateway with ${projectGatewayId} not found`
});
selectedGatewayId = projectGateway.id;
}
const isConnected = await selectedProvider.validateConnection(newInput);
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
@@ -237,8 +204,7 @@ export const dynamicSecretServiceFactory = ({
defaultTTL,
name: newName ?? name,
status: null,
statusDetails: null,
projectGatewayId: selectedGatewayId
statusDetails: null
});
return updatedDynamicCfg;

View File

@@ -1,6 +1,5 @@
import { SnowflakeProvider } from "@app/ee/services/dynamic-secret/providers/snowflake";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
import { AwsIamProvider } from "./aws-iam";
import { AzureEntraIDProvider } from "./azure-entra-id";
@@ -17,14 +16,8 @@ import { SapHanaProvider } from "./sap-hana";
import { SqlDatabaseProvider } from "./sql-database";
import { TotpProvider } from "./totp";
type TBuildDynamicSecretProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTls">;
};
export const buildDynamicSecretProviders = ({
gatewayService
}: TBuildDynamicSecretProviderDTO): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider({ gatewayService }),
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
[DynamicSecretProviders.Redis]: RedisDatabaseProvider(),

View File

@@ -103,8 +103,7 @@ export const DynamicSecretSqlDBSchema = z.object({
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
renewStatement: z.string().trim().optional(),
ca: z.string().optional(),
projectGatewayId: z.string().nullable().optional()
ca: z.string().optional()
});
export const DynamicSecretCassandraSchema = z.object({

View File

@@ -3,10 +3,8 @@ import knex from "knex";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
@@ -27,14 +25,10 @@ const generateUsername = (provider: SqlProviders) => {
return alphaNumericNanoId(32);
};
type TSqlDatabaseProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTls">;
};
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
@@ -51,6 +45,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
user: providerInputs.username,
password: providerInputs.password,
ssl,
pool: { min: 0, max: 1 },
// @ts-expect-error this is because of knexjs type signature issue. This is directly passed to driver
// https://github.com/knex/knex/blob/b6507a7129d2b9fafebf5f831494431e64c6a8a0/lib/dialects/mssql/index.js#L66
// https://github.com/tediousjs/tedious/blob/ebb023ed90969a7ec0e4b036533ad52739d921f7/test/config.ci.ts#L19
@@ -66,112 +61,61 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
return db;
};
const gatewayProxyWrapper = async (
providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>,
gatewayCallback: (host: string, port: number) => Promise<void>
) => {
const relayDetails = await gatewayService.fnGetGatewayClientTls(providerInputs.projectGatewayId as string);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
await withGatewayProxy(
async (port) => {
await gatewayCallback("localhost", port);
},
{
targetHost: providerInputs.host,
targetPort: providerInputs.port,
relayHost,
relayPort: Number(relayPort),
identityId: relayDetails.identityId,
orgId: relayDetails.orgId,
tlsOptions: {
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
}
);
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
let isConnected = false;
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
const db = await $getClient(providerInputs);
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
isConnected = await db.raw(testStatement).then(() => true);
await db.destroy();
};
if (providerInputs.projectGatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
const isConnected = await db.raw(testStatement).then(() => true);
await db.destroy();
return isConnected;
};
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
const username = generateUsername(providerInputs.client);
const password = generatePassword(providerInputs.client);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
try {
const { database } = providerInputs;
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const expiration = new Date(expireAt).toISOString();
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
expiration,
database
});
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
expiration,
database
});
const queries = creationStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
} finally {
await db.destroy();
const queries = creationStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
};
if (providerInputs.projectGatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
});
await db.destroy();
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
const username = entityId;
const { database } = providerInputs;
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
try {
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
const queries = revokeStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
} finally {
await db.destroy();
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
const queries = revokeStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
};
if (providerInputs.projectGatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
});
await db.destroy();
return { entityId: username };
};
@@ -179,35 +123,28 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const db = await $getClient(providerInputs);
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
expiration,
database
});
try {
if (renewStatement) {
const queries = renewStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
expiration,
database
});
if (renewStatement) {
const queries = renewStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
} finally {
await db.destroy();
}
};
if (providerInputs.projectGatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
});
}
await db.destroy();
return { entityId };
};

View File

@@ -1,86 +0,0 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { GatewaysSchema, TableName, TGateways } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import {
buildFindFilter,
ormify,
selectAllTableCols,
sqlNestRelationships,
TFindFilter,
TFindOpt
} from "@app/lib/knex";
export type TGatewayDALFactory = ReturnType<typeof gatewayDALFactory>;
export const gatewayDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.Gateway);
const find = async (filter: TFindFilter<TGateways>, { offset, limit, sort, tx }: TFindOpt<TGateways> = {}) => {
try {
const query = (tx || db)(TableName.Gateway)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter(filter))
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.Gateway}.identityId`)
.leftJoin(TableName.ProjectGateway, `${TableName.ProjectGateway}.gatewayId`, `${TableName.Gateway}.id`)
.leftJoin(TableName.Project, `${TableName.Project}.id`, `${TableName.ProjectGateway}.projectId`)
.select(selectAllTableCols(TableName.Gateway))
.select(
db.ref("name").withSchema(TableName.Identity).as("identityName"),
db.ref("name").withSchema(TableName.Project).as("projectName"),
db.ref("slug").withSchema(TableName.Project).as("projectSlug"),
db.ref("id").withSchema(TableName.Project).as("projectId")
);
if (limit) void query.limit(limit);
if (offset) void query.offset(offset);
if (sort) {
void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls })));
}
const docs = await query;
return sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (data) => ({
...GatewaysSchema.parse(data),
identity: { id: data.identityId, name: data.identityName }
}),
childrenMapper: [
{
key: "projectId",
label: "projects" as const,
mapper: ({ projectId, projectName, projectSlug }) => ({
id: projectId,
name: projectName,
slug: projectSlug
})
}
]
});
} catch (error) {
throw new DatabaseError({ error, name: `${TableName.Gateway}: Find` });
}
};
const findByProjectId = async (projectId: string, tx?: Knex) => {
try {
const query = (tx || db)(TableName.Gateway)
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.Gateway}.identityId`)
.join(TableName.ProjectGateway, `${TableName.ProjectGateway}.gatewayId`, `${TableName.Gateway}.id`)
.select(selectAllTableCols(TableName.Gateway))
.select(
db.ref("name").withSchema(TableName.Identity).as("identityName"),
db.ref("id").withSchema(TableName.ProjectGateway).as("projectGatewayId")
)
.where({ [`${TableName.ProjectGateway}.projectId` as "projectId"]: projectId });
const docs = await query;
return docs.map((el) => ({ ...el, identity: { id: el.identityId, name: el.identityName } }));
} catch (error) {
throw new DatabaseError({ error, name: `${TableName.Gateway}: Find by project id` });
}
};
return { ...orm, find, findByProjectId };
};

View File

@@ -1,652 +0,0 @@
import crypto from "node:crypto";
import { ForbiddenError } from "@casl/ability";
import * as x509 from "@peculiar/x509";
import { z } from "zod";
import { ActionProjectType } from "@app/db/schemas";
import { KeyStorePrefixes, PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { pingGatewayAndVerify } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { getTurnCredentials } from "@app/lib/turn/credentials";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
import {
createSerialNumber,
keyAlgorithmToAlgCfg
} from "@app/services/certificate-authority/certificate-authority-fns";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionGatewayActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { TGatewayDALFactory } from "./gateway-dal";
import {
TExchangeAllocatedRelayAddressDTO,
TGetGatewayByIdDTO,
TGetProjectGatewayByIdDTO,
THeartBeatDTO,
TListGatewaysDTO,
TUpdateGatewayByIdDTO
} from "./gateway-types";
import { TOrgGatewayConfigDALFactory } from "./org-gateway-config-dal";
import { TProjectGatewayDALFactory } from "./project-gateway-dal";
type TGatewayServiceFactoryDep = {
gatewayDAL: TGatewayDALFactory;
projectGatewayDAL: TProjectGatewayDALFactory;
orgGatewayConfigDAL: Pick<TOrgGatewayConfigDALFactory, "findOne" | "create" | "transaction" | "findById">;
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures" | "getPlan">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey" | "decryptWithRootKey">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getProjectPermission">;
keyStore: Pick<TKeyStoreFactory, "getItem" | "setItemWithExpiry">;
};
export type TGatewayServiceFactory = ReturnType<typeof gatewayServiceFactory>;
const TURN_SERVER_CREDENTIALS_SCHEMA = z.object({
username: z.string(),
password: z.string()
});
export const gatewayServiceFactory = ({
gatewayDAL,
licenseService,
kmsService,
permissionService,
orgGatewayConfigDAL,
keyStore,
projectGatewayDAL
}: TGatewayServiceFactoryDep) => {
const $validateOrgAccessToGateway = async (orgId: string, actorId: string, actorAuthMethod: ActorAuthMethod) => {
// if (!licenseService.onPremFeatures.gateway) {
// throw new BadRequestError({
// message:
// "Gateway handshake failed due to instance plan restrictions. Please upgrade your instance to Infisical's Enterprise plan."
// });
// }
const orgLicensePlan = await licenseService.getPlan(orgId);
if (!orgLicensePlan.gateway) {
throw new BadRequestError({
message:
"Gateway handshake failed due to organization plan restrictions. Please upgrade your instance to Infisical's Enterprise plan."
});
}
const { permission } = await permissionService.getOrgPermission(
ActorType.IDENTITY,
actorId,
orgId,
actorAuthMethod,
orgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionGatewayActions.CreateGateways,
OrgPermissionSubjects.Gateway
);
};
const getGatewayRelayDetails = async (actorId: string, actorOrgId: string, actorAuthMethod: ActorAuthMethod) => {
const TURN_CRED_EXPIRY = 10 * 60; // 10 minutes
const envCfg = getConfig();
await $validateOrgAccessToGateway(actorOrgId, actorId, actorAuthMethod);
const { encryptor, decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: actorOrgId
});
if (!envCfg.GATEWAY_RELAY_AUTH_SECRET || !envCfg.GATEWAY_RELAY_ADDRESS || !envCfg.GATEWAY_RELAY_REALM) {
throw new BadRequestError({
message: "Gateway handshake failed due to missing instance configuration."
});
}
let turnServerUsername = "";
let turnServerPassword = "";
// keep it in redis for 5mins to avoid generating so many credentials
const previousCredential = await keyStore.getItem(KeyStorePrefixes.GatewayIdentityCredential(actorId));
if (previousCredential) {
const el = await TURN_SERVER_CREDENTIALS_SCHEMA.parseAsync(
JSON.parse(decryptor({ cipherTextBlob: Buffer.from(previousCredential, "hex") }).toString())
);
turnServerUsername = el.username;
turnServerPassword = el.password;
} else {
const el = getTurnCredentials(actorId, envCfg.GATEWAY_RELAY_AUTH_SECRET);
await keyStore.setItemWithExpiry(
KeyStorePrefixes.GatewayIdentityCredential(actorId),
TURN_CRED_EXPIRY,
encryptor({
plainText: Buffer.from(JSON.stringify({ username: el.username, password: el.password }))
}).cipherTextBlob.toString("hex")
);
turnServerUsername = el.username;
turnServerPassword = el.password;
}
return {
turnServerUsername,
turnServerPassword,
turnServerRealm: envCfg.GATEWAY_RELAY_REALM,
turnServerAddress: envCfg.GATEWAY_RELAY_ADDRESS,
infisicalStaticIp: envCfg.GATEWAY_INFISICAL_STATIC_IP_ADDRESS
};
};
const exchangeAllocatedRelayAddress = async ({
identityId,
identityOrg,
relayAddress,
identityOrgAuthMethod
}: TExchangeAllocatedRelayAddressDTO) => {
await $validateOrgAccessToGateway(identityOrg, identityId, identityOrgAuthMethod);
const { encryptor: orgKmsEncryptor, decryptor: orgKmsDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: identityOrg
});
const orgGatewayConfig = await orgGatewayConfigDAL.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.OrgGatewayRootCaInit(identityOrg)]);
const existingGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: identityOrg });
if (existingGatewayConfig) return existingGatewayConfig;
const alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048);
// generate root CA
const rootCaKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
const rootCaSerialNumber = createSerialNumber();
const rootCaSkObj = crypto.KeyObject.from(rootCaKeys.privateKey);
const rootCaIssuedAt = new Date();
const rootCaKeyAlgorithm = CertKeyAlgorithm.RSA_2048;
const rootCaExpiration = new Date(new Date().setFullYear(2045));
const rootCaCert = await x509.X509CertificateGenerator.createSelfSigned({
name: `O=${identityOrg},CN=Infisical Gateway Root CA`,
serialNumber: rootCaSerialNumber,
notBefore: rootCaIssuedAt,
notAfter: rootCaExpiration,
signingAlgorithm: alg,
keys: rootCaKeys,
extensions: [
// eslint-disable-next-line no-bitwise
new x509.KeyUsagesExtension(x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, true),
await x509.SubjectKeyIdentifierExtension.create(rootCaKeys.publicKey)
]
});
// generate client ca
const clientCaSerialNumber = createSerialNumber();
const clientCaIssuedAt = new Date();
const clientCaExpiration = new Date(new Date().setFullYear(2045));
const clientCaKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
const clientCaSkObj = crypto.KeyObject.from(clientCaKeys.privateKey);
const clientCaCert = await x509.X509CertificateGenerator.create({
serialNumber: clientCaSerialNumber,
subject: `O=${identityOrg},CN=Client Intermediate CA`,
issuer: rootCaCert.subject,
notBefore: clientCaIssuedAt,
notAfter: clientCaExpiration,
signingKey: rootCaKeys.privateKey,
publicKey: clientCaKeys.publicKey,
signingAlgorithm: alg,
extensions: [
new x509.KeyUsagesExtension(
// eslint-disable-next-line no-bitwise
x509.KeyUsageFlags.keyCertSign |
x509.KeyUsageFlags.cRLSign |
x509.KeyUsageFlags.digitalSignature |
x509.KeyUsageFlags.keyEncipherment,
true
),
new x509.BasicConstraintsExtension(true, 0, true),
await x509.AuthorityKeyIdentifierExtension.create(rootCaCert, false),
await x509.SubjectKeyIdentifierExtension.create(clientCaKeys.publicKey)
]
});
const clientKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
const clientCertSerialNumber = createSerialNumber();
const clientCert = await x509.X509CertificateGenerator.create({
serialNumber: clientCertSerialNumber,
subject: `O=${identityOrg},OU=gateway-client,CN=cloud`,
issuer: clientCaCert.subject,
notAfter: clientCaExpiration,
notBefore: clientCaIssuedAt,
signingKey: clientCaKeys.privateKey,
publicKey: clientKeys.publicKey,
signingAlgorithm: alg,
extensions: [
new x509.BasicConstraintsExtension(false),
await x509.AuthorityKeyIdentifierExtension.create(clientCaCert, false),
await x509.SubjectKeyIdentifierExtension.create(clientKeys.publicKey),
new x509.CertificatePolicyExtension(["2.5.29.32.0"]), // anyPolicy
new x509.KeyUsagesExtension(
// eslint-disable-next-line no-bitwise
x509.KeyUsageFlags[CertKeyUsage.DIGITAL_SIGNATURE] |
x509.KeyUsageFlags[CertKeyUsage.KEY_ENCIPHERMENT] |
x509.KeyUsageFlags[CertKeyUsage.KEY_AGREEMENT],
true
),
new x509.ExtendedKeyUsageExtension([x509.ExtendedKeyUsage[CertExtendedKeyUsage.CLIENT_AUTH]], true)
]
});
const clientSkObj = crypto.KeyObject.from(clientKeys.privateKey);
// generate gateway ca
const gatewayCaSerialNumber = createSerialNumber();
const gatewayCaIssuedAt = new Date();
const gatewayCaExpiration = new Date(new Date().setFullYear(2045));
const gatewayCaKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
const gatewayCaSkObj = crypto.KeyObject.from(gatewayCaKeys.privateKey);
const gatewayCaCert = await x509.X509CertificateGenerator.create({
serialNumber: gatewayCaSerialNumber,
subject: `O=${identityOrg},CN=Gateway CA`,
issuer: rootCaCert.subject,
notBefore: gatewayCaIssuedAt,
notAfter: gatewayCaExpiration,
signingKey: rootCaKeys.privateKey,
publicKey: gatewayCaKeys.publicKey,
signingAlgorithm: alg,
extensions: [
new x509.KeyUsagesExtension(
// eslint-disable-next-line no-bitwise
x509.KeyUsageFlags.keyCertSign |
x509.KeyUsageFlags.cRLSign |
x509.KeyUsageFlags.digitalSignature |
x509.KeyUsageFlags.keyEncipherment,
true
),
new x509.BasicConstraintsExtension(true, 0, true),
await x509.AuthorityKeyIdentifierExtension.create(rootCaCert, false),
await x509.SubjectKeyIdentifierExtension.create(gatewayCaKeys.publicKey)
]
});
return orgGatewayConfigDAL.create({
orgId: identityOrg,
rootCaIssuedAt,
rootCaExpiration,
rootCaSerialNumber,
rootCaKeyAlgorithm,
encryptedRootCaPrivateKey: orgKmsEncryptor({
plainText: rootCaSkObj.export({
type: "pkcs8",
format: "der"
})
}).cipherTextBlob,
encryptedRootCaCertificate: orgKmsEncryptor({ plainText: Buffer.from(rootCaCert.rawData) }).cipherTextBlob,
clientCaIssuedAt,
clientCaExpiration,
clientCaSerialNumber,
encryptedClientCaPrivateKey: orgKmsEncryptor({
plainText: clientCaSkObj.export({
type: "pkcs8",
format: "der"
})
}).cipherTextBlob,
encryptedClientCaCertificate: orgKmsEncryptor({
plainText: Buffer.from(clientCaCert.rawData)
}).cipherTextBlob,
clientCertIssuedAt: clientCaIssuedAt,
clientCertExpiration: clientCaExpiration,
clientCertKeyAlgorithm: CertKeyAlgorithm.RSA_2048,
clientCertSerialNumber,
encryptedClientPrivateKey: orgKmsEncryptor({
plainText: clientSkObj.export({
type: "pkcs8",
format: "der"
})
}).cipherTextBlob,
encryptedClientCertificate: orgKmsEncryptor({
plainText: Buffer.from(clientCert.rawData)
}).cipherTextBlob,
gatewayCaIssuedAt,
gatewayCaExpiration,
gatewayCaSerialNumber,
encryptedGatewayCaPrivateKey: orgKmsEncryptor({
plainText: gatewayCaSkObj.export({
type: "pkcs8",
format: "der"
})
}).cipherTextBlob,
encryptedGatewayCaCertificate: orgKmsEncryptor({
plainText: Buffer.from(gatewayCaCert.rawData)
}).cipherTextBlob
});
});
const rootCaCert = new x509.X509Certificate(
orgKmsDecryptor({
cipherTextBlob: orgGatewayConfig.encryptedRootCaCertificate
})
);
const clientCaCert = new x509.X509Certificate(
orgKmsDecryptor({
cipherTextBlob: orgGatewayConfig.encryptedClientCaCertificate
})
);
const gatewayCaAlg = keyAlgorithmToAlgCfg(orgGatewayConfig.rootCaKeyAlgorithm as CertKeyAlgorithm);
const gatewayCaSkObj = crypto.createPrivateKey({
key: orgKmsDecryptor({ cipherTextBlob: orgGatewayConfig.encryptedGatewayCaPrivateKey }),
format: "der",
type: "pkcs8"
});
const gatewayCaCert = new x509.X509Certificate(
orgKmsDecryptor({
cipherTextBlob: orgGatewayConfig.encryptedGatewayCaCertificate
})
);
const gatewayCaPrivateKey = await crypto.subtle.importKey(
"pkcs8",
gatewayCaSkObj.export({ format: "der", type: "pkcs8" }),
gatewayCaAlg,
true,
["sign"]
);
const alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048);
const gatewayKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
const certIssuedAt = new Date();
// then need to periodically init
const certExpireAt = new Date(new Date().setMonth(new Date().getMonth() + 1));
const extensions: x509.Extension[] = [
new x509.BasicConstraintsExtension(false),
await x509.AuthorityKeyIdentifierExtension.create(gatewayCaCert, false),
await x509.SubjectKeyIdentifierExtension.create(gatewayKeys.publicKey),
new x509.CertificatePolicyExtension(["2.5.29.32.0"]), // anyPolicy
new x509.KeyUsagesExtension(
// eslint-disable-next-line no-bitwise
x509.KeyUsageFlags[CertKeyUsage.DIGITAL_SIGNATURE] | x509.KeyUsageFlags[CertKeyUsage.KEY_ENCIPHERMENT],
true
),
new x509.ExtendedKeyUsageExtension([x509.ExtendedKeyUsage[CertExtendedKeyUsage.SERVER_AUTH]], true),
// san
new x509.SubjectAlternativeNameExtension([{ type: "ip", value: relayAddress.split(":")[0] }], false)
];
const serialNumber = createSerialNumber();
const privateKey = crypto.KeyObject.from(gatewayKeys.privateKey);
const gatewayCertificate = await x509.X509CertificateGenerator.create({
serialNumber,
subject: `CN=${identityId},O=${identityOrg},OU=Gateway`,
issuer: gatewayCaCert.subject,
notBefore: certIssuedAt,
notAfter: certExpireAt,
signingKey: gatewayCaPrivateKey,
publicKey: gatewayKeys.publicKey,
signingAlgorithm: alg,
extensions
});
const appCfg = getConfig();
// just for local development
const formatedRelayAddress =
appCfg.NODE_ENV === "development" ? relayAddress.replace("127.0.0.1", "host.docker.internal") : relayAddress;
await gatewayDAL.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.OrgGatewayCertExchange(identityOrg)]);
const existingGateway = await gatewayDAL.findOne({ identityId, orgGatewayRootCaId: orgGatewayConfig.id });
if (existingGateway) {
return gatewayDAL.updateById(existingGateway.id, {
keyAlgorithm: CertKeyAlgorithm.RSA_2048,
issuedAt: certIssuedAt,
expiration: certExpireAt,
serialNumber,
relayAddress: orgKmsEncryptor({
plainText: Buffer.from(formatedRelayAddress)
}).cipherTextBlob
});
}
return gatewayDAL.create({
keyAlgorithm: CertKeyAlgorithm.RSA_2048,
issuedAt: certIssuedAt,
expiration: certExpireAt,
serialNumber,
relayAddress: orgKmsEncryptor({
plainText: Buffer.from(formatedRelayAddress)
}).cipherTextBlob,
identityId,
orgGatewayRootCaId: orgGatewayConfig.id,
name: `gateway-${alphaNumericNanoId(6).toLowerCase()}`
});
});
const gatewayCertificateChain = `${clientCaCert.toString("pem")}\n${rootCaCert.toString("pem")}`.trim();
return {
serialNumber,
privateKey: privateKey.export({ format: "pem", type: "pkcs8" }) as string,
certificate: gatewayCertificate.toString("pem"),
certificateChain: gatewayCertificateChain
};
};
const heartbeat = async ({ orgPermission }: THeartBeatDTO) => {
await $validateOrgAccessToGateway(orgPermission.orgId, orgPermission.id, orgPermission.authMethod);
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
if (!orgGatewayConfig) throw new NotFoundError({ message: `Identity with ID ${orgPermission.id} not found.` });
const [gateway] = await gatewayDAL.find({ identityId: orgPermission.id, orgGatewayRootCaId: orgGatewayConfig.id });
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${orgPermission.id} not found.` });
const { decryptor: orgKmsDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: orgGatewayConfig.orgId
});
const rootCaCert = new x509.X509Certificate(
orgKmsDecryptor({
cipherTextBlob: orgGatewayConfig.encryptedRootCaCertificate
})
);
const gatewayCaCert = new x509.X509Certificate(
orgKmsDecryptor({
cipherTextBlob: orgGatewayConfig.encryptedGatewayCaCertificate
})
);
const clientCert = new x509.X509Certificate(
orgKmsDecryptor({
cipherTextBlob: orgGatewayConfig.encryptedClientCertificate
})
);
const privateKey = crypto
.createPrivateKey({
key: orgKmsDecryptor({ cipherTextBlob: orgGatewayConfig.encryptedClientPrivateKey }),
format: "der",
type: "pkcs8"
})
.export({ type: "pkcs8", format: "pem" });
const relayAddress = orgKmsDecryptor({ cipherTextBlob: gateway.relayAddress }).toString();
const [relayHost, relayPort] = relayAddress.split(":");
await pingGatewayAndVerify({
relayHost,
relayPort: Number(relayPort),
tlsOptions: {
key: privateKey.toString(),
ca: `${gatewayCaCert.toString("pem")}\n${rootCaCert.toString("pem")}`.trim(),
cert: clientCert.toString("pem")
},
identityId: orgPermission.id,
orgId: orgPermission.orgId
});
await gatewayDAL.updateById(gateway.id, { heartbeat: new Date() });
};
const listGateways = async ({ orgPermission }: TListGatewaysDTO) => {
const { permission } = await permissionService.getOrgPermission(
orgPermission.type,
orgPermission.id,
orgPermission.orgId,
orgPermission.authMethod,
orgPermission.orgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionGatewayActions.ListGateways,
OrgPermissionSubjects.Gateway
);
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
if (!orgGatewayConfig) return [];
const gateways = await gatewayDAL.find({
orgGatewayRootCaId: orgGatewayConfig.id
});
return gateways;
};
const getGatewayById = async ({ orgPermission, id }: TGetGatewayByIdDTO) => {
const { permission } = await permissionService.getOrgPermission(
orgPermission.type,
orgPermission.id,
orgPermission.orgId,
orgPermission.authMethod,
orgPermission.orgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionGatewayActions.ListGateways,
OrgPermissionSubjects.Gateway
);
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
if (!orgGatewayConfig) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
const [gateway] = await gatewayDAL.find({ id, orgGatewayRootCaId: orgGatewayConfig.id });
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
return gateway;
};
const updateGatewayById = async ({ orgPermission, id, name, projectIds }: TUpdateGatewayByIdDTO) => {
const { permission } = await permissionService.getOrgPermission(
orgPermission.type,
orgPermission.id,
orgPermission.orgId,
orgPermission.authMethod,
orgPermission.orgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionGatewayActions.EditGateways,
OrgPermissionSubjects.Gateway
);
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
if (!orgGatewayConfig) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
const [gateway] = await gatewayDAL.update({ id, orgGatewayRootCaId: orgGatewayConfig.id }, { name });
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
if (projectIds) {
await projectGatewayDAL.transaction(async (tx) => {
await projectGatewayDAL.delete({ gatewayId: gateway.id }, tx);
await projectGatewayDAL.insertMany(
projectIds.map((el) => ({ gatewayId: gateway.id, projectId: el })),
tx
);
});
}
return gateway;
};
const deleteGatewayById = async ({ orgPermission, id }: TGetGatewayByIdDTO) => {
const { permission } = await permissionService.getOrgPermission(
orgPermission.type,
orgPermission.id,
orgPermission.orgId,
orgPermission.authMethod,
orgPermission.orgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionGatewayActions.DeleteGateways,
OrgPermissionSubjects.Gateway
);
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
if (!orgGatewayConfig) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
const [gateway] = await gatewayDAL.delete({ id, orgGatewayRootCaId: orgGatewayConfig.id });
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
return gateway;
};
const getProjectGateways = async ({ projectId, projectPermission }: TGetProjectGatewayByIdDTO) => {
await permissionService.getProjectPermission({
projectId,
actor: projectPermission.type,
actorId: projectPermission.id,
actorOrgId: projectPermission.orgId,
actorAuthMethod: projectPermission.authMethod,
actionProjectType: ActionProjectType.Any
});
const gateways = await gatewayDAL.findByProjectId(projectId);
return gateways;
};
// this has no permission check and used for dynamic secrets directly
// assumes permission check is already done
const fnGetGatewayClientTls = async (projectGatewayId: string) => {
const projectGateway = await projectGatewayDAL.findById(projectGatewayId);
if (!projectGateway) throw new NotFoundError({ message: `Project gateway with ID ${projectGatewayId} not found.` });
const { gatewayId } = projectGateway;
const gateway = await gatewayDAL.findById(gatewayId);
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${gatewayId} not found.` });
const orgGatewayConfig = await orgGatewayConfigDAL.findById(gateway.orgGatewayRootCaId);
const { decryptor: orgKmsDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: orgGatewayConfig.orgId
});
const rootCaCert = new x509.X509Certificate(
orgKmsDecryptor({
cipherTextBlob: orgGatewayConfig.encryptedRootCaCertificate
})
);
const gatewayCaCert = new x509.X509Certificate(
orgKmsDecryptor({
cipherTextBlob: orgGatewayConfig.encryptedGatewayCaCertificate
})
);
const clientCert = new x509.X509Certificate(
orgKmsDecryptor({
cipherTextBlob: orgGatewayConfig.encryptedClientCertificate
})
);
const clientSkObj = crypto.createPrivateKey({
key: orgKmsDecryptor({ cipherTextBlob: orgGatewayConfig.encryptedClientPrivateKey }),
format: "der",
type: "pkcs8"
});
return {
relayAddress: orgKmsDecryptor({ cipherTextBlob: gateway.relayAddress }).toString(),
privateKey: clientSkObj.export({ type: "pkcs8", format: "pem" }),
certificate: clientCert.toString("pem"),
certChain: `${gatewayCaCert.toString("pem")}\n${rootCaCert.toString("pem")}`.trim(),
identityId: gateway.identityId,
orgId: orgGatewayConfig.orgId
};
};
return {
getGatewayRelayDetails,
exchangeAllocatedRelayAddress,
listGateways,
getGatewayById,
updateGatewayById,
deleteGatewayById,
getProjectGateways,
fnGetGatewayClientTls,
heartbeat
};
};

View File

@@ -1,39 +0,0 @@
import { OrgServiceActor } from "@app/lib/types";
import { ActorAuthMethod } from "@app/services/auth/auth-type";
export type TExchangeAllocatedRelayAddressDTO = {
identityId: string;
identityOrg: string;
identityOrgAuthMethod: ActorAuthMethod;
relayAddress: string;
};
export type TListGatewaysDTO = {
orgPermission: OrgServiceActor;
};
export type TGetGatewayByIdDTO = {
id: string;
orgPermission: OrgServiceActor;
};
export type TUpdateGatewayByIdDTO = {
id: string;
name?: string;
projectIds?: string[];
orgPermission: OrgServiceActor;
};
export type TDeleteGatewayByIdDTO = {
id: string;
orgPermission: OrgServiceActor;
};
export type TGetProjectGatewayByIdDTO = {
projectId: string;
projectPermission: OrgServiceActor;
};
export type THeartBeatDTO = {
orgPermission: OrgServiceActor;
};

View File

@@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TOrgGatewayConfigDALFactory = ReturnType<typeof orgGatewayConfigDALFactory>;
export const orgGatewayConfigDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.OrgGatewayConfig);
return orm;
};

View File

@@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TProjectGatewayDALFactory = ReturnType<typeof projectGatewayDALFactory>;
export const projectGatewayDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.ProjectGateway);
return orm;
};

View File

@@ -3,7 +3,7 @@ import slugify from "@sindresorhus/slugify";
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
@@ -87,14 +87,9 @@ export const groupServiceFactory = ({
actorOrgId
);
const isCustomRole = Boolean(customRole);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to create a more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
const group = await groupDAL.transaction(async (tx) => {
const existingGroup = await groupDAL.findOne({ orgId: actorOrgId, name }, tx);
@@ -161,13 +156,9 @@ export const groupServiceFactory = ({
);
const isCustomRole = Boolean(customOrgRole);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update a more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredNewRolePermission = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredNewRolePermission)
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
if (isCustomRole) customRole = customOrgRole;
}
@@ -338,13 +329,9 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to add user to more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPrivileges)
throw new ForbiddenRequestError({ message: "Failed to add user to more privileged group" });
const user = await userDAL.findOne({ username });
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });
@@ -409,13 +396,9 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to delete user from more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPrivileges)
throw new ForbiddenRequestError({ message: "Failed to delete user from more privileged group" });
const user = await userDAL.findOne({ username });
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });

View File

@@ -3,7 +3,7 @@ import { packRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
@@ -79,13 +79,9 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@@ -165,13 +161,9 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
if (data?.slug) {
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
@@ -247,13 +239,9 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const deletedPrivilege = await identityProjectAdditionalPrivilegeDAL.deleteById(identityPrivilege.id);
return {

View File

@@ -3,7 +3,7 @@ import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
@@ -88,13 +88,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@@ -176,13 +172,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@@ -276,13 +268,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to edit more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to edit more privileged identity" });
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,

View File

@@ -51,8 +51,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
pkiEst: false,
enforceMfa: false,
projectTemplates: false,
kmip: false,
gateway: false
kmip: false
});
export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {

View File

@@ -69,7 +69,6 @@ export type TFeatureSet = {
enforceMfa: boolean;
projectTemplates: false;
kmip: false;
gateway: false;
};
export type TOrgPlansTableDTO = {

View File

@@ -12,7 +12,8 @@ export enum OrgPermissionActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete"
Delete = "delete",
Update = "update"
}
export enum OrgPermissionAppConnectionActions {
@@ -32,14 +33,6 @@ export enum OrgPermissionAdminConsoleAction {
AccessAllProjects = "access-all-projects"
}
export enum OrgPermissionGatewayActions {
// is there a better word for this. This mean can an identity be a gateway
CreateGateways = "create-gateways",
ListGateways = "list-gateways",
EditGateways = "edit-gateways",
DeleteGateways = "delete-gateways"
}
export enum OrgPermissionSubjects {
Workspace = "workspace",
Role = "role",
@@ -59,7 +52,7 @@ export enum OrgPermissionSubjects {
ProjectTemplates = "project-templates",
AppConnections = "app-connections",
Kmip = "kmip",
Gateway = "gateway"
DedicatedInstance = "dedicatedInstance"
}
export type AppConnectionSubjectFields = {
@@ -82,7 +75,6 @@ export type OrgPermissionSet =
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
| [OrgPermissionGatewayActions, OrgPermissionSubjects.Gateway]
| [
OrgPermissionAppConnectionActions,
(
@@ -91,7 +83,8 @@ export type OrgPermissionSet =
)
]
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole]
| [OrgPermissionKmipActions, OrgPermissionSubjects.Kmip];
| [OrgPermissionKmipActions, OrgPermissionSubjects.Kmip]
| [OrgPermissionActions, OrgPermissionSubjects.DedicatedInstance];
const AppConnectionConditionSchema = z
.object({
@@ -192,10 +185,8 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
)
}),
z.object({
subject: z.literal(OrgPermissionSubjects.Gateway).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionGatewayActions).describe(
"Describe what action an entity can take."
)
subject: z.literal(OrgPermissionSubjects.DedicatedInstance).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
})
]);
@@ -280,11 +271,6 @@ const buildAdminPermission = () => {
can(OrgPermissionAppConnectionActions.Delete, OrgPermissionSubjects.AppConnections);
can(OrgPermissionAppConnectionActions.Connect, OrgPermissionSubjects.AppConnections);
can(OrgPermissionGatewayActions.ListGateways, OrgPermissionSubjects.Gateway);
can(OrgPermissionGatewayActions.CreateGateways, OrgPermissionSubjects.Gateway);
can(OrgPermissionGatewayActions.EditGateways, OrgPermissionSubjects.Gateway);
can(OrgPermissionGatewayActions.DeleteGateways, OrgPermissionSubjects.Gateway);
can(OrgPermissionAdminConsoleAction.AccessAllProjects, OrgPermissionSubjects.AdminConsole);
can(OrgPermissionKmipActions.Setup, OrgPermissionSubjects.Kmip);
@@ -292,6 +278,11 @@ const buildAdminPermission = () => {
// the proxy assignment is temporary in order to prevent "more privilege" error during role assignment to MI
can(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
can(OrgPermissionActions.Create, OrgPermissionSubjects.DedicatedInstance);
can(OrgPermissionActions.Read, OrgPermissionSubjects.DedicatedInstance);
can(OrgPermissionActions.Update, OrgPermissionSubjects.DedicatedInstance);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.DedicatedInstance);
return rules;
};
@@ -321,8 +312,8 @@ const buildMemberPermission = () => {
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionAppConnectionActions.Connect, OrgPermissionSubjects.AppConnections);
can(OrgPermissionGatewayActions.ListGateways, OrgPermissionSubjects.Gateway);
can(OrgPermissionGatewayActions.CreateGateways, OrgPermissionSubjects.Gateway);
can(OrgPermissionActions.Read, OrgPermissionSubjects.DedicatedInstance);
return rules;
};

View File

@@ -5,6 +5,22 @@ import { PermissionConditionOperators } from "@app/lib/casl";
export const PermissionConditionSchema = {
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
[PermissionConditionOperators.$ALL]: z.string().trim().min(1).array(),
[PermissionConditionOperators.$REGEX]: z
.string()
.min(1)
.refine(
(el) => {
try {
// eslint-disable-next-line no-new
new RegExp(el);
return true;
} catch {
return false;
}
},
{ message: "Invalid regex pattern" }
),
[PermissionConditionOperators.$EQ]: z.string().min(1),
[PermissionConditionOperators.$NEQ]: z.string().min(1),
[PermissionConditionOperators.$GLOB]: z

View File

@@ -3,7 +3,7 @@ import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
@@ -76,13 +76,9 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(customPermission));
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged user",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
slug,
@@ -167,13 +163,9 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(dto.permissions || []));
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
if (dto?.slug) {
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({

View File

@@ -100,7 +100,6 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("lastName").withSchema("committerUser").as("committerUserLastName"),
tx.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
tx.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
tx.ref("comment").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerComment"),
tx.ref("email").withSchema("secretApprovalReviewerUser").as("reviewerEmail"),
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
@@ -163,10 +162,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
reviewerEmail: email,
reviewerLastName: lastName,
reviewerUsername: username,
reviewerFirstName: firstName,
reviewerComment: comment
}) =>
userId ? { userId, status, email, firstName, lastName, username, comment: comment ?? "" } : undefined
reviewerFirstName: firstName
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
},
{
key: "approverUserId",

View File

@@ -320,7 +320,6 @@ export const secretApprovalRequestServiceFactory = ({
approvalId,
actor,
status,
comment,
actorId,
actorAuthMethod,
actorOrgId
@@ -373,18 +372,15 @@ export const secretApprovalRequestServiceFactory = ({
return secretApprovalRequestReviewerDAL.create(
{
status,
comment,
requestId: secretApprovalRequest.id,
reviewerUserId: actorId
},
tx
);
}
return secretApprovalRequestReviewerDAL.updateById(review.id, { status, comment }, tx);
return secretApprovalRequestReviewerDAL.updateById(review.id, { status }, tx);
});
return { ...reviewStatus, projectId: secretApprovalRequest.projectId };
return reviewStatus;
};
const updateApprovalStatus = async ({
@@ -503,7 +499,7 @@ export const secretApprovalRequestServiceFactory = ({
if (!hasMinApproval && !isSoftEnforcement)
throw new BadRequestError({ message: "Doesn't have minimum approvals needed" });
const { botKey, shouldUseSecretV2Bridge, project } = await projectBotService.getBotKey(projectId);
const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
let mergeStatus;
if (shouldUseSecretV2Bridge) {
// this cycle if for bridged secrets
@@ -861,6 +857,7 @@ export const secretApprovalRequestServiceFactory = ({
if (isSoftEnforcement) {
const cfg = getConfig();
const project = await projectDAL.findProjectById(projectId);
const env = await projectEnvDAL.findOne({ id: policy.envId });
const requestedByUser = await userDAL.findOne({ id: actorId });
const approverUsers = await userDAL.find({
@@ -1155,8 +1152,7 @@ export const secretApprovalRequestServiceFactory = ({
environment: env.name,
secretPath,
projectId,
requestId: secretApprovalRequest.id,
secretKeys: [...new Set(Object.values(data).flatMap((arr) => arr?.map((item) => item.secretName) ?? []))]
requestId: secretApprovalRequest.id
}
}
});
@@ -1298,7 +1294,7 @@ export const secretApprovalRequestServiceFactory = ({
secretMetadata
}) => {
const secretId = updatingSecretsGroupByKey[secretKey][0].id;
if (tagIds?.length) commitTagIds[newSecretName ?? secretKey] = tagIds;
if (tagIds?.length) commitTagIds[secretKey] = tagIds;
return {
...latestSecretVersions[secretId],
secretMetadata,
@@ -1456,8 +1452,7 @@ export const secretApprovalRequestServiceFactory = ({
environment: env.name,
secretPath,
projectId,
requestId: secretApprovalRequest.id,
secretKeys: [...new Set(Object.values(data).flatMap((arr) => arr?.map((item) => item.secretKey) ?? []))]
requestId: secretApprovalRequest.id
}
}
});

View File

@@ -80,7 +80,6 @@ export type TStatusChangeDTO = {
export type TReviewRequestDTO = {
approvalId: string;
status: ApprovalStatus;
comment?: string;
} & Omit<TProjectPermission, "projectId">;
export type TApprovalRequestCountDTO = TProjectPermission;

View File

@@ -13,7 +13,6 @@ import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -333,7 +332,6 @@ export const secretRotationQueueFactory = ({
await secretVersionV2BridgeDAL.insertMany(
updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({
...el,
actorType: ActorType.PLATFORM,
secretId: id
})),
tx

View File

@@ -7,7 +7,6 @@ import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { InternalServerError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -371,21 +370,7 @@ export const secretSnapshotServiceFactory = ({
const secrets = await secretV2BridgeDAL.insertMany(
rollbackSnaps.flatMap(({ secretVersions, folderId }) =>
secretVersions.map(
({
latestSecretVersion,
version,
updatedAt,
createdAt,
secretId,
envId,
id,
tags,
// exclude the bottom fields from the secret - they are for versioning only.
userActorId,
identityActorId,
actorType,
...el
}) => ({
({ latestSecretVersion, version, updatedAt, createdAt, secretId, envId, id, tags, ...el }) => ({
...el,
id: secretId,
version: deletedTopLevelSecsGroupById[secretId] ? latestSecretVersion + 1 : latestSecretVersion,
@@ -416,18 +401,8 @@ export const secretSnapshotServiceFactory = ({
})),
tx
);
const userActorId = actor === ActorType.USER ? actorId : undefined;
const identityActorId = actor !== ActorType.USER ? actorId : undefined;
const actorType = actor || ActorType.PLATFORM;
const secretVersions = await secretVersionV2BridgeDAL.insertMany(
secrets.map(({ id, updatedAt, createdAt, ...el }) => ({
...el,
secretId: id,
userActorId,
identityActorId,
actorType
})),
secrets.map(({ id, updatedAt, createdAt, ...el }) => ({ ...el, secretId: id })),
tx
);
await secretVersionV2TagBridgeDAL.insertMany(

View File

@@ -6,6 +6,7 @@ export const sanitizedSshCertificate = SshCertificatesSchema.pick({
sshCertificateTemplateId: true,
serialNumber: true,
certType: true,
publicKey: true,
principals: true,
keyId: true,
notBefore: true,

View File

@@ -1,15 +1,12 @@
import { Redis } from "ioredis";
import { pgAdvisoryLockHashText } from "@app/lib/crypto/hashtext";
import { Redlock, Settings } from "@app/lib/red-lock";
export const PgSqlLock = {
BootUpMigration: 2023,
SuperAdminInit: 2024,
KmsRootKeyInit: 2025,
OrgGatewayRootCaInit: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-root-ca:${orgId}`),
OrgGatewayCertExchange: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-cert-exchange:${orgId}`)
} as const;
export enum PgSqlLock {
BootUpMigration = 2023,
SuperAdminInit = 2024,
KmsRootKeyInit = 2025
}
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
@@ -36,8 +33,7 @@ export const KeyStorePrefixes = {
SecretSyncLastRunTimestamp: (syncId: string) => `secret-sync-last-run-${syncId}` as const,
IdentityAccessTokenStatusUpdate: (identityAccessTokenId: string) =>
`identity-access-token-status:${identityAccessTokenId}`,
ServiceTokenStatusUpdate: (serviceTokenId: string) => `service-token-status:${serviceTokenId}`,
GatewayIdentityCredential: (identityId: string) => `gateway-credentials:${identityId}`
ServiceTokenStatusUpdate: (serviceTokenId: string) => `service-token-status:${serviceTokenId}`
};
export const KeyStoreTtls = {

View File

@@ -459,8 +459,7 @@ export const PROJECTS = {
workspaceId: "The ID of the project to update.",
name: "The new name of the project.",
projectDescription: "An optional description label for the project.",
autoCapitalization: "Disable or enable auto-capitalization for the project.",
slug: "An optional slug for the project. (must be unique within the organization)"
autoCapitalization: "Disable or enable auto-capitalization for the project."
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."
@@ -639,8 +638,7 @@ export const FOLDERS = {
environment: "The slug of the environment to create the folder in.",
name: "The name of the folder to create.",
path: "The path of the folder to create.",
directory: "The directory of the folder to create. (Deprecated in favor of path)",
description: "An optional description label for the folder."
directory: "The directory of the folder to create. (Deprecated in favor of path)"
},
UPDATE: {
folderId: "The ID of the folder to update.",
@@ -649,8 +647,7 @@ export const FOLDERS = {
path: "The path of the folder to update.",
directory: "The new directory of the folder to update. (Deprecated in favor of path)",
projectSlug: "The slug of the project where the folder is located.",
workspaceId: "The ID of the project where the folder is located.",
description: "An optional description label for the folder."
workspaceId: "The ID of the project where the folder is located."
},
DELETE: {
folderIdOrName: "The ID or name of the folder to delete.",

View File

@@ -1,669 +0,0 @@
import { createMongoAbility } from "@casl/ability";
import { PermissionConditionOperators } from ".";
import { validatePermissionBoundary } from "./boundary";
describe("Validate Permission Boundary Function", () => {
test.each([
{
title: "child with equal privilege",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets"
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets"
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "child with less privilege",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets"
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit"],
subject: "secrets"
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "child with more privilege",
parentPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets"
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit"],
subject: "secrets"
}
]),
expectValid: false,
missingPermissions: [{ action: "edit", subject: "secrets" }]
},
{
title: "parent with multiple and child with multiple",
parentPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets"
},
{
action: ["create", "edit"],
subject: "members"
}
]),
childPermission: createMongoAbility([
{
action: ["create"],
subject: "members"
},
{
action: ["create"],
subject: "secrets"
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "Child with no access",
parentPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets"
},
{
action: ["create", "edit"],
subject: "members"
}
]),
childPermission: createMongoAbility([]),
expectValid: true,
missingPermissions: []
},
{
title: "Parent and child disjoint set",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
]),
expectValid: false,
missingPermissions: ["create", "edit", "delete", "read"].map((el) => ({
action: el,
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "dev" }
}
}))
},
{
title: "Parent with inverted rules",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
},
{
action: "read",
subject: "secrets",
inverted: true,
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
}
}
]),
childPermission: createMongoAbility([
{
action: "read",
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$EQ]: "/" }
}
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "Parent with inverted rules - child accessing invalid one",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
},
{
action: "read",
subject: "secrets",
inverted: true,
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
}
}
]),
childPermission: createMongoAbility([
{
action: "read",
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
}
}
]),
expectValid: false,
missingPermissions: [
{
action: "read",
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
}
}
]
}
])("Check permission: $title", ({ parentPermission, childPermission, expectValid, missingPermissions }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
if (expectValid) {
expect(permissionBoundary.isValid).toBeTruthy();
} else {
expect(permissionBoundary.isValid).toBeFalsy();
expect(permissionBoundary.missingPermissions).toEqual(expect.arrayContaining(missingPermissions));
}
});
});
describe("Validate Permission Boundary: Checking Parent $eq operator", () => {
const parentPermission = createMongoAbility([
{
action: ["create", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "prod" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "prod"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev**" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "staging" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});
describe("Validate Permission Boundary: Checking Parent $neq operator", () => {
const parentPermission = createMongoAbility([
{
action: ["create", "read"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello" }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/staging"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/dev**" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/hello" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/hello"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello**" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});
describe("Validate Permission Boundary: Checking Parent $IN operator", () => {
const parentPermission = createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "staging"] }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev"] }
}
}
])
},
{
operator: `${PermissionConditionOperators.$IN} - 2`,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "staging"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "prod" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$NEQ]: "dev" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "prod"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev**" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});
describe("Validate Permission Boundary: Checking Parent $GLOB operator", () => {
const parentPermission = createMongoAbility([
{
action: ["create", "read"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/hello/world", "/hello/world2"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**/world" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/print" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello/world" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/hello"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello**" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});

View File

@@ -1,249 +0,0 @@
import { MongoAbility } from "@casl/ability";
import { MongoQuery } from "@ucast/mongo2js";
import picomatch from "picomatch";
import { PermissionConditionOperators } from "./index";
type TMissingPermission = {
action: string;
subject: string;
conditions?: MongoQuery;
};
type TPermissionConditionShape = {
[PermissionConditionOperators.$EQ]: string;
[PermissionConditionOperators.$NEQ]: string;
[PermissionConditionOperators.$GLOB]: string;
[PermissionConditionOperators.$IN]: string[];
};
const getPermissionSetID = (action: string, subject: string) => `${action}:${subject}`;
const invertTheOperation = (shouldInvert: boolean, operation: boolean) => (shouldInvert ? !operation : operation);
const formatConditionOperator = (condition: TPermissionConditionShape | string) => {
return (
typeof condition === "string" ? { [PermissionConditionOperators.$EQ]: condition } : condition
) as TPermissionConditionShape;
};
const isOperatorsASubset = (parentSet: TPermissionConditionShape, subset: TPermissionConditionShape) => {
// we compute each operator against each other in left hand side and right hand side
if (subset[PermissionConditionOperators.$EQ] || subset[PermissionConditionOperators.$NEQ]) {
const subsetOperatorValue = subset[PermissionConditionOperators.$EQ] || subset[PermissionConditionOperators.$NEQ];
const isInverted = !subset[PermissionConditionOperators.$EQ];
if (
parentSet[PermissionConditionOperators.$EQ] &&
invertTheOperation(isInverted, parentSet[PermissionConditionOperators.$EQ] !== subsetOperatorValue)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$NEQ] &&
invertTheOperation(isInverted, parentSet[PermissionConditionOperators.$NEQ] === subsetOperatorValue)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$IN] &&
invertTheOperation(isInverted, !parentSet[PermissionConditionOperators.$IN].includes(subsetOperatorValue))
) {
return false;
}
// ne and glob cannot match each other
if (parentSet[PermissionConditionOperators.$GLOB] && isInverted) {
return false;
}
if (
parentSet[PermissionConditionOperators.$GLOB] &&
!picomatch.isMatch(subsetOperatorValue, parentSet[PermissionConditionOperators.$GLOB], { strictSlashes: false })
) {
return false;
}
}
if (subset[PermissionConditionOperators.$IN]) {
const subsetOperatorValue = subset[PermissionConditionOperators.$IN];
if (
parentSet[PermissionConditionOperators.$EQ] &&
(subsetOperatorValue.length !== 1 || subsetOperatorValue[0] !== parentSet[PermissionConditionOperators.$EQ])
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$NEQ] &&
subsetOperatorValue.includes(parentSet[PermissionConditionOperators.$NEQ])
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$IN] &&
!subsetOperatorValue.every((el) => parentSet[PermissionConditionOperators.$IN].includes(el))
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$GLOB] &&
!subsetOperatorValue.every((el) =>
picomatch.isMatch(el, parentSet[PermissionConditionOperators.$GLOB], {
strictSlashes: false
})
)
) {
return false;
}
}
if (subset[PermissionConditionOperators.$GLOB]) {
const subsetOperatorValue = subset[PermissionConditionOperators.$GLOB];
const { isGlob } = picomatch.scan(subsetOperatorValue);
// if it's glob, all other fixed operators would make this superset because glob is powerful. like eq
// example: $in [dev, prod] => glob: dev** could mean anything starting with dev: thus is bigger
if (
isGlob &&
Object.keys(parentSet).some(
(el) => el !== PermissionConditionOperators.$GLOB && el !== PermissionConditionOperators.$NEQ
)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$EQ] &&
parentSet[PermissionConditionOperators.$EQ] !== subsetOperatorValue
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$NEQ] &&
picomatch.isMatch(parentSet[PermissionConditionOperators.$NEQ], subsetOperatorValue, {
strictSlashes: false
})
) {
return false;
}
// if parent set is IN, glob cannot be used for children - It's a bigger scope
if (
parentSet[PermissionConditionOperators.$IN] &&
!parentSet[PermissionConditionOperators.$IN].includes(subsetOperatorValue)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$GLOB] &&
!picomatch.isMatch(subsetOperatorValue, parentSet[PermissionConditionOperators.$GLOB], {
strictSlashes: false
})
) {
return false;
}
}
return true;
};
const isSubsetForSamePermissionSubjectAction = (
parentSetRules: ReturnType<MongoAbility["possibleRulesFor"]>,
subsetRules: ReturnType<MongoAbility["possibleRulesFor"]>,
appendToMissingPermission: (condition?: MongoQuery) => void
) => {
const isMissingConditionInParent = parentSetRules.every((el) => !el.conditions);
if (isMissingConditionInParent) return true;
// all subset rules must pass in comparison to parent rul
return subsetRules.every((subsetRule) => {
const subsetRuleConditions = subsetRule.conditions as Record<string, TPermissionConditionShape | string>;
// compare subset rule with all parent rules
const isSubsetOfNonInvertedParentSet = parentSetRules
.filter((el) => !el.inverted)
.some((parentSetRule) => {
// get conditions and iterate
const parentSetRuleConditions = parentSetRule?.conditions as Record<string, TPermissionConditionShape | string>;
if (!parentSetRuleConditions) return true;
return Object.keys(parentSetRuleConditions).every((parentConditionField) => {
// if parent condition is missing then it's never a subset
if (!subsetRuleConditions?.[parentConditionField]) return false;
// standardize the conditions plain string operator => $eq function
const parentRuleConditionOperators = formatConditionOperator(parentSetRuleConditions[parentConditionField]);
const selectedSubsetRuleCondition = subsetRuleConditions?.[parentConditionField];
const subsetRuleConditionOperators = formatConditionOperator(selectedSubsetRuleCondition);
return isOperatorsASubset(parentRuleConditionOperators, subsetRuleConditionOperators);
});
});
const invertedParentSetRules = parentSetRules.filter((el) => el.inverted);
const isNotSubsetOfInvertedParentSet = invertedParentSetRules.length
? !invertedParentSetRules.some((parentSetRule) => {
// get conditions and iterate
const parentSetRuleConditions = parentSetRule?.conditions as Record<
string,
TPermissionConditionShape | string
>;
if (!parentSetRuleConditions) return true;
return Object.keys(parentSetRuleConditions).every((parentConditionField) => {
// if parent condition is missing then it's never a subset
if (!subsetRuleConditions?.[parentConditionField]) return false;
// standardize the conditions plain string operator => $eq function
const parentRuleConditionOperators = formatConditionOperator(parentSetRuleConditions[parentConditionField]);
const selectedSubsetRuleCondition = subsetRuleConditions?.[parentConditionField];
const subsetRuleConditionOperators = formatConditionOperator(selectedSubsetRuleCondition);
return isOperatorsASubset(parentRuleConditionOperators, subsetRuleConditionOperators);
});
})
: true;
const isSubset = isSubsetOfNonInvertedParentSet && isNotSubsetOfInvertedParentSet;
if (!isSubset) {
appendToMissingPermission(subsetRule.conditions);
}
return isSubset;
});
};
export const validatePermissionBoundary = (parentSetPermissions: MongoAbility, subsetPermissions: MongoAbility) => {
const checkedPermissionRules = new Set<string>();
const missingPermissions: TMissingPermission[] = [];
subsetPermissions.rules.forEach((subsetPermissionRules) => {
const subsetPermissionSubject = subsetPermissionRules.subject.toString();
let subsetPermissionActions: string[] = [];
// actions can be string or string[]
if (typeof subsetPermissionRules.action === "string") {
subsetPermissionActions.push(subsetPermissionRules.action);
} else {
subsetPermissionRules.action.forEach((subsetPermissionAction) => {
subsetPermissionActions.push(subsetPermissionAction);
});
}
// if action is already processed ignore
subsetPermissionActions = subsetPermissionActions.filter(
(el) => !checkedPermissionRules.has(getPermissionSetID(el, subsetPermissionSubject))
);
if (!subsetPermissionActions.length) return;
subsetPermissionActions.forEach((subsetPermissionAction) => {
const parentSetRulesOfSubset = parentSetPermissions.possibleRulesFor(
subsetPermissionAction,
subsetPermissionSubject
);
const nonInveretedOnes = parentSetRulesOfSubset.filter((el) => !el.inverted);
if (!nonInveretedOnes.length) {
missingPermissions.push({ action: subsetPermissionAction, subject: subsetPermissionSubject });
return;
}
const subsetRules = subsetPermissions.possibleRulesFor(subsetPermissionAction, subsetPermissionSubject);
isSubsetForSamePermissionSubjectAction(parentSetRulesOfSubset, subsetRules, (conditions) => {
missingPermissions.push({ action: subsetPermissionAction, subject: subsetPermissionSubject, conditions });
});
});
subsetPermissionActions.forEach((el) =>
checkedPermissionRules.add(getPermissionSetID(el, subsetPermissionSubject))
);
});
if (missingPermissions.length) {
return { isValid: false as const, missingPermissions };
}
return { isValid: true };
};

View File

@@ -1,5 +1,5 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { buildMongoQueryMatcher } from "@casl/ability";
import { buildMongoQueryMatcher, MongoAbility } from "@casl/ability";
import { FieldCondition, FieldInstruction, JsInterpreter } from "@ucast/mongo2js";
import picomatch from "picomatch";
@@ -20,8 +20,45 @@ const glob: JsInterpreter<FieldCondition<string>> = (node, object, context) => {
export const conditionsMatcher = buildMongoQueryMatcher({ $glob }, { glob });
/**
* Extracts and formats permissions from a CASL Ability object or a raw permission set.
*/
const extractPermissions = (ability: MongoAbility) => {
const permissions: string[] = [];
ability.rules.forEach((permission) => {
if (typeof permission.action === "string") {
permissions.push(`${permission.action}_${permission.subject as string}`);
} else {
permission.action.forEach((permissionAction) => {
permissions.push(`${permissionAction}_${permission.subject as string}`);
});
}
});
return permissions;
};
/**
* Compares two sets of permissions to determine if the first set is at least as privileged as the second set.
* The function checks if all permissions in the second set are contained within the first set and if the first set has equal or more permissions.
*
*/
export const isAtLeastAsPrivileged = (permissions1: MongoAbility, permissions2: MongoAbility) => {
const set1 = new Set(extractPermissions(permissions1));
const set2 = new Set(extractPermissions(permissions2));
for (const perm of set2) {
if (!set1.has(perm)) {
return false;
}
}
return set1.size >= set2.size;
};
export enum PermissionConditionOperators {
$IN = "$in",
$ALL = "$all",
$REGEX = "$regex",
$EQ = "$eq",
$NEQ = "$ne",
$GLOB = "$glob"

View File

@@ -24,7 +24,6 @@ const databaseReadReplicaSchema = z
const envSchema = z
.object({
INFISICAL_PLATFORM_VERSION: zpStr(z.string().optional()),
PORT: z.coerce.number().default(IS_PACKAGED ? 8080 : 4000),
DISABLE_SECRET_SCANNING: z
.enum(["true", "false"])
@@ -185,14 +184,6 @@ const envSchema = z
USE_PG_QUEUE: zodStrBool.default("false"),
SHOULD_INIT_PG_QUEUE: zodStrBool.default("false"),
/* Gateway----------------------------------------------------------------------------- */
GATEWAY_INFISICAL_STATIC_IP_ADDRESS: zpStr(z.string().optional()),
GATEWAY_RELAY_ADDRESS: zpStr(z.string().optional()),
GATEWAY_RELAY_REALM: zpStr(z.string().optional()),
GATEWAY_RELAY_AUTH_SECRET: zpStr(z.string().optional()),
/* ----------------------------------------------------------------------------- */
/* App Connections ----------------------------------------------------------------------------- */
// aws
@@ -217,13 +208,6 @@ const envSchema = z
INF_APP_CONNECTION_AZURE_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_CLIENT_SECRET: zpStr(z.string().optional()),
// datadog
SHOULD_USE_DATADOG_TRACER: zodStrBool.default("false"),
DATADOG_PROFILING_ENABLED: zodStrBool.default("false"),
DATADOG_ENV: zpStr(z.string().optional().default("prod")),
DATADOG_SERVICE: zpStr(z.string().optional().default("infisical-core")),
DATADOG_HOSTNAME: zpStr(z.string().optional()),
/* CORS ----------------------------------------------------------------------------- */
CORS_ALLOWED_ORIGINS: zpStr(

View File

@@ -1,29 +0,0 @@
// used for postgres lock
// this is something postgres does under the hood
// convert any string to a unique number
export const hashtext = (text: string) => {
// Convert text to UTF8 bytes array for consistent behavior with PostgreSQL
const encoder = new TextEncoder();
const bytes = encoder.encode(text);
// Implementation of hash_any
let result = 0;
for (let i = 0; i < bytes.length; i += 1) {
// eslint-disable-next-line no-bitwise
result = ((result << 5) + result) ^ bytes[i];
// Keep within 32-bit integer range
// eslint-disable-next-line no-bitwise
result >>>= 0;
}
// Convert to signed 32-bit integer like PostgreSQL
// eslint-disable-next-line no-bitwise
return result | 0;
};
export const pgAdvisoryLockHashText = (text: string) => {
const hash = hashtext(text);
// Ensure positive value within PostgreSQL integer range
return Math.abs(hash) % 2 ** 31;
};

View File

@@ -52,18 +52,10 @@ export class ForbiddenRequestError extends Error {
error: unknown;
details?: unknown;
constructor({
name,
error,
message,
details
}: { message?: string; name?: string; error?: unknown; details?: unknown } = {}) {
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown } = {}) {
super(message ?? "You are not allowed to access this resource");
this.name = name || "ForbiddenError";
this.error = error;
this.details = details;
}
}

View File

@@ -1,353 +0,0 @@
/* eslint-disable no-await-in-loop */
import crypto from "node:crypto";
import net from "node:net";
import quicDefault, * as quicModule from "@infisical/quic";
import { BadRequestError } from "../errors";
import { logger } from "../logger";
const DEFAULT_MAX_RETRIES = 3;
const DEFAULT_RETRY_DELAY = 1000; // 1 second
const quic = quicDefault || quicModule;
const parseSubjectDetails = (data: string) => {
const values: Record<string, string> = {};
data.split("\n").forEach((el) => {
const [key, value] = el.split("=");
values[key.trim()] = value.trim();
});
return values;
};
type TTlsOption = { ca: string; cert: string; key: string };
const createQuicConnection = async (
relayHost: string,
relayPort: number,
tlsOptions: TTlsOption,
identityId: string,
orgId: string
) => {
const client = await quic.QUICClient.createQUICClient({
host: relayHost,
port: relayPort,
config: {
ca: tlsOptions.ca,
cert: tlsOptions.cert,
key: tlsOptions.key,
applicationProtos: ["infisical-gateway"],
verifyPeer: true,
verifyCallback: async (certs) => {
if (!certs || certs.length === 0) return quic.native.CryptoError.CertificateRequired;
const serverCertificate = new crypto.X509Certificate(Buffer.from(certs[0]));
const caCertificate = new crypto.X509Certificate(tlsOptions.ca);
const isValidServerCertificate = serverCertificate.checkIssued(caCertificate);
if (!isValidServerCertificate) return quic.native.CryptoError.BadCertificate;
const subjectDetails = parseSubjectDetails(serverCertificate.subject);
if (subjectDetails.OU !== "Gateway" || subjectDetails.CN !== identityId || subjectDetails.O !== orgId) {
return quic.native.CryptoError.CertificateUnknown;
}
if (new Date() > new Date(serverCertificate.validTo) || new Date() < new Date(serverCertificate.validFrom)) {
return quic.native.CryptoError.CertificateExpired;
}
const formatedRelayHost =
process.env.NODE_ENV === "development" ? relayHost.replace("host.docker.internal", "127.0.0.1") : relayHost;
if (!serverCertificate.checkIP(formatedRelayHost)) return quic.native.CryptoError.BadCertificate;
},
maxIdleTimeout: 90000,
keepAliveIntervalTime: 30000
},
crypto: {
ops: {
randomBytes: async (data) => {
crypto.getRandomValues(new Uint8Array(data));
}
}
}
});
return client;
};
type TPingGatewayAndVerifyDTO = {
relayHost: string;
relayPort: number;
tlsOptions: TTlsOption;
maxRetries?: number;
identityId: string;
orgId: string;
};
export const pingGatewayAndVerify = async ({
relayHost,
relayPort,
tlsOptions,
maxRetries = DEFAULT_MAX_RETRIES,
identityId,
orgId
}: TPingGatewayAndVerifyDTO) => {
let lastError: Error | null = null;
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
error: err as Error
});
});
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
try {
const stream = quicClient.connection.newStream("bidi");
const pingWriter = stream.writable.getWriter();
await pingWriter.write(Buffer.from("PING\n"));
pingWriter.releaseLock();
// Read PONG response
const reader = stream.readable.getReader();
const { value, done } = await reader.read();
if (done) {
throw new Error("Gateway closed before receiving PONG");
}
const response = Buffer.from(value).toString();
if (response !== "PONG\n" && response !== "PONG") {
throw new Error(`Failed to Ping. Unexpected response: ${response}`);
}
reader.releaseLock();
return;
} catch (err) {
lastError = err as Error;
if (attempt < maxRetries) {
await new Promise((resolve) => {
setTimeout(resolve, DEFAULT_RETRY_DELAY);
});
}
} finally {
await quicClient.destroy();
}
}
logger.error(lastError);
throw new BadRequestError({
message: `Failed to ping gateway after ${maxRetries} attempts. Last error: ${lastError?.message}`
});
};
interface TProxyServer {
server: net.Server;
port: number;
cleanup: () => Promise<void>;
getProxyError: () => string;
}
const setupProxyServer = async ({
targetPort,
targetHost,
tlsOptions,
relayHost,
relayPort,
identityId,
orgId
}: {
targetHost: string;
targetPort: number;
relayPort: number;
relayHost: string;
tlsOptions: TTlsOption;
identityId: string;
orgId: string;
}): Promise<TProxyServer> => {
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
error: err as Error
});
});
const proxyErrorMsg = [""];
return new Promise((resolve, reject) => {
const server = net.createServer();
// eslint-disable-next-line @typescript-eslint/no-misused-promises
server.on("connection", async (clientConn) => {
try {
clientConn.setKeepAlive(true, 30000); // 30 seconds
clientConn.setNoDelay(true);
const stream = quicClient.connection.newStream("bidi");
// Send FORWARD-TCP command
const forwardWriter = stream.writable.getWriter();
await forwardWriter.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`));
forwardWriter.releaseLock();
// Set up bidirectional copy
const setupCopy = () => {
// Client to QUIC
// eslint-disable-next-line
(async () => {
const writer = stream.writable.getWriter();
// Create a handler for client data
clientConn.on("data", (chunk) => {
writer.write(chunk).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
// Handle client connection close
clientConn.on("end", () => {
writer.close().catch((err) => {
logger.error(err);
});
});
clientConn.on("error", (clientConnErr) => {
writer.abort(clientConnErr?.message).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
})();
// QUIC to Client
void (async () => {
try {
const reader = stream.readable.getReader();
let reading = true;
while (reading) {
const { value, done } = await reader.read();
if (done) {
reading = false;
clientConn.end(); // Close client connection when QUIC stream ends
break;
}
// Write data to TCP client
const canContinue = clientConn.write(Buffer.from(value));
// Handle backpressure
if (!canContinue) {
await new Promise((res) => {
clientConn.once("drain", res);
});
}
}
} catch (err) {
proxyErrorMsg.push((err as Error)?.message);
clientConn.destroy();
}
})();
};
setupCopy();
// Handle connection closure
clientConn.on("close", () => {
stream.destroy().catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
const cleanup = async () => {
clientConn?.destroy();
await stream.destroy();
};
clientConn.on("error", (clientConnErr) => {
logger.error(clientConnErr, "Client socket error");
cleanup().catch((err) => {
logger.error(err, "Client conn cleanup");
});
});
clientConn.on("end", () => {
cleanup().catch((err) => {
logger.error(err, "Client conn end");
});
});
} catch (err) {
logger.error(err, "Failed to establish target connection:");
clientConn.end();
reject(err);
}
});
server.on("error", (err) => {
reject(err);
});
server.on("close", () => {
quicClient?.destroy().catch((err) => {
logger.error(err, "Failed to destroy quic client");
});
});
server.listen(0, () => {
const address = server.address();
if (!address || typeof address === "string") {
server.close();
reject(new Error("Failed to get server port"));
return;
}
logger.info("Gateway proxy started");
resolve({
server,
port: address.port,
cleanup: async () => {
server.close();
await quicClient?.destroy();
},
getProxyError: () => proxyErrorMsg.join(",")
});
});
});
};
interface ProxyOptions {
targetHost: string;
targetPort: number;
relayHost: string;
relayPort: number;
tlsOptions: TTlsOption;
identityId: string;
orgId: string;
}
export const withGatewayProxy = async (
callback: (port: number) => Promise<void>,
options: ProxyOptions
): Promise<void> => {
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId } = options;
// Setup the proxy server
const { port, cleanup, getProxyError } = await setupProxyServer({
targetHost,
targetPort,
relayPort,
relayHost,
tlsOptions,
identityId,
orgId
});
try {
// Execute the callback with the allocated port
await callback(port);
} catch (err) {
const proxyErrorMessage = getProxyError();
if (proxyErrorMessage) {
logger.error(new Error(proxyErrorMessage), "Failed to proxy");
}
logger.error(err, "Failed to do gateway");
throw new BadRequestError({ message: proxyErrorMessage || (err as Error)?.message });
} finally {
// Ensure cleanup happens regardless of success or failure
await cleanup();
}
};

View File

@@ -1,12 +1,11 @@
import opentelemetry, { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
import { registerInstrumentations } from "@opentelemetry/instrumentation";
import { HttpInstrumentation } from "@opentelemetry/instrumentation-http";
import { Resource } from "@opentelemetry/resources";
import { AggregationTemporality, MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
import tracer from "dd-trace";
import dotenv from "dotenv";
import { initEnvConfig } from "../config/env";
@@ -70,7 +69,7 @@ const initTelemetryInstrumentation = ({
opentelemetry.metrics.setGlobalMeterProvider(meterProvider);
registerInstrumentations({
instrumentations: [new HttpInstrumentation()]
instrumentations: [getNodeAutoInstrumentations()]
});
};
@@ -87,17 +86,6 @@ const setupTelemetry = () => {
exportType: appCfg.OTEL_EXPORT_TYPE
});
}
if (appCfg.SHOULD_USE_DATADOG_TRACER) {
console.log("Initializing Datadog tracer");
tracer.init({
profiling: appCfg.DATADOG_PROFILING_ENABLED,
version: appCfg.INFISICAL_PLATFORM_VERSION,
env: appCfg.DATADOG_ENV,
service: appCfg.DATADOG_SERVICE,
hostname: appCfg.DATADOG_HOSTNAME
});
}
};
void setupTelemetry();

View File

@@ -1,16 +0,0 @@
import crypto from "node:crypto";
const TURN_TOKEN_TTL = 24 * 60 * 60 * 1000; // 24 hours in milliseconds
export const getTurnCredentials = (id: string, authSecret: string, ttl = TURN_TOKEN_TTL) => {
const timestamp = Math.floor((Date.now() + ttl) / 1000);
const username = `${timestamp}:${id}`;
const hmac = crypto.createHmac("sha1", authSecret);
hmac.update(username);
const password = hmac.digest("base64");
return {
username,
password
};
};

View File

@@ -83,14 +83,6 @@ const run = async () => {
process.exit(0);
});
process.on("uncaughtException", (error) => {
logger.error(error, "CRITICAL ERROR: Uncaught Exception");
});
process.on("unhandledRejection", (error) => {
logger.error(error, "CRITICAL ERROR: Unhandled Promise Rejection");
});
await server.listen({
port: envConfig.PORT,
host: envConfig.HOST,

View File

@@ -21,7 +21,6 @@ import {
TQueueSecretSyncSyncSecretsByIdDTO,
TQueueSendSecretSyncActionFailedNotificationsDTO
} from "@app/services/secret-sync/secret-sync-types";
import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
export enum QueueName {
SecretRotation = "secret-rotation",
@@ -108,7 +107,7 @@ export type TQueueJobTypes = {
};
[QueueName.SecretWebhook]: {
name: QueueJobs.SecWebhook;
payload: TWebhookPayloads;
payload: { projectId: string; environment: string; secretPath: string; depth?: number };
};
[QueueName.AccessTokenStatusUpdate]:

View File

@@ -122,8 +122,7 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
reqId: req.id,
statusCode: HttpStatusCodes.Forbidden,
message: error.message,
error: error.name,
details: error?.details
error: error.name
});
} else if (error instanceof RateLimitError) {
void res.status(HttpStatusCodes.TooManyRequests).send({

View File

@@ -27,10 +27,6 @@ import { dynamicSecretLeaseQueueServiceFactory } from "@app/ee/services/dynamic-
import { dynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { externalKmsDALFactory } from "@app/ee/services/external-kms/external-kms-dal";
import { externalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { gatewayDALFactory } from "@app/ee/services/gateway/gateway-dal";
import { gatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { orgGatewayConfigDALFactory } from "@app/ee/services/gateway/org-gateway-config-dal";
import { projectGatewayDALFactory } from "@app/ee/services/gateway/project-gateway-dal";
import { groupDALFactory } from "@app/ee/services/group/group-dal";
import { groupServiceFactory } from "@app/ee/services/group/group-service";
import { userGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
@@ -236,6 +232,8 @@ import { webhookDALFactory } from "@app/services/webhook/webhook-dal";
import { webhookServiceFactory } from "@app/services/webhook/webhook-service";
import { workflowIntegrationDALFactory } from "@app/services/workflow-integration/workflow-integration-dal";
import { workflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
import { dedicatedInstanceDALFactory } from "@app/ee/services/dedicated-instance/dedicated-instance-dal";
import { dedicatedInstanceServiceFactory } from "@app/ee/services/dedicated-instance/dedicated-instance-service";
import { injectAuditLogInfo } from "../plugins/audit-log";
import { injectIdentity } from "../plugins/auth/inject-identity";
@@ -397,10 +395,6 @@ export const registerRoutes = async (
const kmipOrgConfigDAL = kmipOrgConfigDALFactory(db);
const kmipOrgServerCertificateDAL = kmipOrgServerCertificateDALFactory(db);
const orgGatewayConfigDAL = orgGatewayConfigDALFactory(db);
const gatewayDAL = gatewayDALFactory(db);
const projectGatewayDAL = projectGatewayDALFactory(db);
const permissionService = permissionServiceFactory({
permissionDAL,
orgRoleDAL,
@@ -1096,9 +1090,7 @@ export const registerRoutes = async (
permissionService,
secretSharingDAL,
orgDAL,
kmsService,
smtpService,
userDAL
kmsService
});
const accessApprovalPolicyService = accessApprovalPolicyServiceFactory({
@@ -1310,19 +1302,7 @@ export const registerRoutes = async (
kmsService
});
const gatewayService = gatewayServiceFactory({
permissionService,
gatewayDAL,
kmsService,
licenseService,
orgGatewayConfigDAL,
keyStore,
projectGatewayDAL
});
const dynamicSecretProviders = buildDynamicSecretProviders({
gatewayService
});
const dynamicSecretProviders = buildDynamicSecretProviders();
const dynamicSecretQueueService = dynamicSecretLeaseQueueServiceFactory({
queueService,
dynamicSecretLeaseDAL,
@@ -1340,10 +1320,8 @@ export const registerRoutes = async (
folderDAL,
permissionService,
licenseService,
kmsService,
projectGatewayDAL
kmsService
});
const dynamicSecretLeaseService = dynamicSecretLeaseServiceFactory({
projectDAL,
permissionService,
@@ -1481,6 +1459,12 @@ export const registerRoutes = async (
permissionService
});
const dedicatedInstanceDAL = dedicatedInstanceDALFactory(db);
const dedicatedInstanceService = dedicatedInstanceServiceFactory({
dedicatedInstanceDAL,
permissionService
});
await superAdminService.initServerCfg();
// setup the communication with license key server
@@ -1582,7 +1566,7 @@ export const registerRoutes = async (
secretSync: secretSyncService,
kmip: kmipService,
kmipOperation: kmipOperationService,
gateway: gatewayService
dedicatedInstance: dedicatedInstanceService
});
const cronJobs: CronJob[] = [];

View File

@@ -111,16 +111,7 @@ export const secretRawSchema = z.object({
secretReminderRepeatDays: z.number().nullable().optional(),
skipMultilineEncoding: z.boolean().default(false).nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
actor: z
.object({
actorId: z.string().nullable().optional(),
actorType: z.string().nullable().optional(),
name: z.string().nullable().optional(),
membershipId: z.string().nullable().optional()
})
.optional()
.nullable()
updatedAt: z.date()
});
export const ProjectPermissionSchema = z.object({

View File

@@ -1,4 +1,3 @@
import DOMPurify from "isomorphic-dompurify";
import { z } from "zod";
import { OrganizationsSchema, SuperAdminSchema, UsersSchema } from "@app/db/schemas";
@@ -73,21 +72,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
message: "At least one login method should be enabled."
}),
slackClientId: z.string().optional(),
slackClientSecret: z.string().optional(),
authConsentContent: z
.string()
.trim()
.refine((content) => DOMPurify.sanitize(content) === content, {
message: "Auth consent content contains unsafe HTML."
})
.optional(),
pageFrameContent: z
.string()
.trim()
.refine((content) => DOMPurify.sanitize(content) === content, {
message: "Page frame content contains unsafe HTML."
})
.optional()
slackClientSecret: z.string().optional()
}),
response: {
200: z.object({
@@ -118,12 +103,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
querystring: z.object({
searchTerm: z.string().default(""),
offset: z.coerce.number().default(0),
limit: z.coerce.number().max(100).default(20),
// TODO: remove this once z.coerce.boolean() is supported
adminsOnly: z
.string()
.transform((val) => val === "true")
.default("false")
limit: z.coerce.number().max(100).default(20)
}),
response: {
200: z.object({
@@ -216,27 +196,6 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "PATCH",
url: "/user-management/users/:userId/admin-access",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
userId: z.string()
})
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async (req) => {
await server.services.superAdmin.grantServerAdminAccessToUser(req.params.userId);
}
});
server.route({
method: "GET",
url: "/encryption-strategies",

View File

@@ -37,7 +37,6 @@ import { registerProjectMembershipRouter } from "./project-membership-router";
import { registerProjectRouter } from "./project-router";
import { registerSecretFolderRouter } from "./secret-folder-router";
import { registerSecretImportRouter } from "./secret-import-router";
import { registerSecretRequestsRouter } from "./secret-requests-router";
import { registerSecretSharingRouter } from "./secret-sharing-router";
import { registerSecretTagRouter } from "./secret-tag-router";
import { registerSlackRouter } from "./slack-router";
@@ -111,15 +110,7 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
await server.register(registerIntegrationAuthRouter, { prefix: "/integration-auth" });
await server.register(registerWebhookRouter, { prefix: "/webhooks" });
await server.register(registerIdentityRouter, { prefix: "/identities" });
await server.register(
async (secretSharingRouter) => {
await secretSharingRouter.register(registerSecretSharingRouter, { prefix: "/shared" });
await secretSharingRouter.register(registerSecretRequestsRouter, { prefix: "/requests" });
},
{ prefix: "/secret-sharing" }
);
await server.register(registerSecretSharingRouter, { prefix: "/secret-sharing" });
await server.register(registerUserEngagementRouter, { prefix: "/user-engagement" });
await server.register(registerDashboardRouter, { prefix: "/dashboard" });
await server.register(registerCmekRouter, { prefix: "/kms" });

View File

@@ -6,7 +6,6 @@ import { authRateLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { validateSignUpAuthorization } from "@app/services/auth/auth-fns";
import { AuthMode } from "@app/services/auth/auth-type";
import { UserEncryption } from "@app/services/user/user-types";
export const registerPasswordRouter = async (server: FastifyZodProvider) => {
server.route({
@@ -114,16 +113,20 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
message: z.string(),
user: UsersSchema,
token: z.string(),
userEncryptionVersion: z.nativeEnum(UserEncryption)
token: z.string()
})
}
},
handler: async (req) => {
const passwordReset = await server.services.password.verifyPasswordResetEmail(req.body.email, req.body.code);
const { token, user } = await server.services.password.verifyPasswordResetEmail(req.body.email, req.body.code);
return passwordReset;
return {
message: "Successfully verified email",
user,
token
};
}
});

View File

@@ -307,17 +307,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
.max(256, { message: "Description must be 256 or fewer characters" })
.optional()
.describe(PROJECTS.UPDATE.projectDescription),
autoCapitalization: z.boolean().optional().describe(PROJECTS.UPDATE.autoCapitalization),
slug: z
.string()
.trim()
.regex(
/^[a-z0-9]+(?:[_-][a-z0-9]+)*$/,
"Project slug can only contain lowercase letters and numbers, with optional single hyphens (-) or underscores (_) between words. Cannot start or end with a hyphen or underscore."
)
.max(64, { message: "Slug must be 64 characters or fewer" })
.optional()
.describe(PROJECTS.UPDATE.slug)
autoCapitalization: z.boolean().optional().describe(PROJECTS.UPDATE.autoCapitalization)
}),
response: {
200: z.object({
@@ -335,8 +325,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
update: {
name: req.body.name,
description: req.body.description,
autoCapitalization: req.body.autoCapitalization,
slug: req.body.slug
autoCapitalization: req.body.autoCapitalization
},
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,

View File

@@ -47,8 +47,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
.default("/")
.transform(prefixWithSlash)
.transform(removeTrailingSlash)
.describe(FOLDERS.CREATE.directory),
description: z.string().optional().nullable().describe(FOLDERS.CREATE.description)
.describe(FOLDERS.CREATE.directory)
}),
response: {
200: z.object({
@@ -66,8 +65,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
actorOrgId: req.permission.orgId,
...req.body,
projectId: req.body.workspaceId,
path,
description: req.body.description
path
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
@@ -78,8 +76,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
environment: req.body.environment,
folderId: folder.id,
folderName: folder.name,
folderPath: path,
...(req.body.description ? { description: req.body.description } : {})
folderPath: path
}
}
});
@@ -128,8 +125,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
.default("/")
.transform(prefixWithSlash)
.transform(removeTrailingSlash)
.describe(FOLDERS.UPDATE.directory),
description: z.string().optional().nullable().describe(FOLDERS.UPDATE.description)
.describe(FOLDERS.UPDATE.directory)
}),
response: {
200: z.object({
@@ -200,8 +196,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
.default("/")
.transform(prefixWithSlash)
.transform(removeTrailingSlash)
.describe(FOLDERS.UPDATE.path),
description: z.string().optional().nullable().describe(FOLDERS.UPDATE.description)
.describe(FOLDERS.UPDATE.path)
})
.array()
.min(1)

View File

@@ -1,270 +0,0 @@
import { z } from "zod";
import { SecretSharingSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SecretSharingAccessType } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerSecretRequestsRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:id",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: z.object({
secretRequest: SecretSharingSchema.omit({
encryptedSecret: true,
tag: true,
iv: true,
encryptedValue: true
}).extend({
isSecretValueSet: z.boolean(),
requester: z.object({
organizationName: z.string(),
firstName: z.string().nullish(),
lastName: z.string().nullish(),
username: z.string()
})
})
})
}
},
handler: async (req) => {
const secretRequest = await req.server.services.secretSharing.getSecretRequestById({
id: req.params.id,
actorOrgId: req.permission?.orgId,
actor: req.permission?.type,
actorId: req.permission?.id,
actorAuthMethod: req.permission?.authMethod
});
return { secretRequest };
}
});
server.route({
method: "POST",
url: "/:id/set-value",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
body: z.object({
secretValue: z.string()
}),
response: {
200: z.object({
secretRequest: SecretSharingSchema.omit({
encryptedSecret: true,
tag: true,
iv: true,
encryptedValue: true
})
})
}
},
handler: async (req) => {
const secretRequest = await req.server.services.secretSharing.setSecretRequestValue({
id: req.params.id,
actorOrgId: req.permission?.orgId,
actor: req.permission?.type,
actorId: req.permission?.id,
actorAuthMethod: req.permission?.authMethod,
secretValue: req.body.secretValue
});
return { secretRequest };
}
});
server.route({
method: "POST",
url: "/:id/reveal-value",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: z.object({
secretRequest: SecretSharingSchema.omit({
encryptedSecret: true,
tag: true,
iv: true,
encryptedValue: true
}).extend({
secretValue: z.string()
})
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const secretRequest = await req.server.services.secretSharing.revealSecretRequestValue({
id: req.params.id,
actorOrgId: req.permission.orgId,
orgId: req.permission.orgId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod
});
return { secretRequest };
}
});
server.route({
method: "DELETE",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: z.object({
secretRequest: SecretSharingSchema.omit({
encryptedSecret: true,
tag: true,
iv: true,
encryptedValue: true
})
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const secretRequest = await req.server.services.secretSharing.deleteSharedSecretById({
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,
sharedSecretId: req.params.id,
orgId: req.permission.orgId,
actor: req.permission.type,
type: SecretSharingType.Request
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretRequestDeleted,
distinctId: getTelemetryDistinctId(req),
properties: {
secretRequestId: req.params.id,
organizationId: req.permission.orgId,
...req.auditLogInfo
}
});
return { secretRequest };
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
offset: z.coerce.number().min(0).max(100).default(0),
limit: z.coerce.number().min(1).max(100).default(25)
}),
response: {
200: z.object({
secrets: z.array(SecretSharingSchema),
totalCount: z.number()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { secrets, totalCount } = await req.server.services.secretSharing.getSharedSecrets({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
type: SecretSharingType.Request,
...req.query
});
return {
secrets,
totalCount
};
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
name: z.string().max(50).optional(),
expiresAt: z.string(),
accessType: z.nativeEnum(SecretSharingAccessType).default(SecretSharingAccessType.Organization)
}),
response: {
200: z.object({
id: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const shareRequest = await req.server.services.secretSharing.createSecretRequest({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
orgId: req.permission.orgId,
...req.auditLogInfo,
event: {
type: EventType.CREATE_SECRET_REQUEST,
metadata: {
accessType: req.body.accessType,
name: req.body.name,
id: shareRequest.id
}
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretRequestCreated,
distinctId: getTelemetryDistinctId(req),
properties: {
secretRequestId: shareRequest.id,
organizationId: req.permission.orgId,
secretRequestName: req.body.name,
...req.auditLogInfo
}
});
return { id: shareRequest.id };
}
});
};

View File

@@ -11,7 +11,6 @@ import {
} from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
export const registerSecretSharingRouter = async (server: FastifyZodProvider) => {
server.route({
@@ -39,7 +38,6 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
type: SecretSharingType.Share,
...req.query
});
@@ -213,8 +211,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
orgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
sharedSecretId,
type: SecretSharingType.Share
sharedSecretId
});
await server.services.auditLog.createAuditLog({

View File

@@ -3,7 +3,6 @@ import { registerIdentityOrgRouter } from "./identity-org-router";
import { registerIdentityProjectRouter } from "./identity-project-router";
import { registerMfaRouter } from "./mfa-router";
import { registerOrgRouter } from "./organization-router";
import { registerPasswordRouter } from "./password-router";
import { registerProjectMembershipRouter } from "./project-membership-router";
import { registerProjectRouter } from "./project-router";
import { registerServiceTokenRouter } from "./service-token-router";
@@ -13,7 +12,6 @@ export const registerV2Routes = async (server: FastifyZodProvider) => {
await server.register(registerMfaRouter, { prefix: "/auth" });
await server.register(registerUserRouter, { prefix: "/users" });
await server.register(registerServiceTokenRouter, { prefix: "/service-token" });
await server.register(registerPasswordRouter, { prefix: "/password" });
await server.register(
async (orgRouter) => {
await orgRouter.register(registerOrgRouter);

View File

@@ -1,53 +0,0 @@
import { z } from "zod";
import { authRateLimit } from "@app/server/config/rateLimiter";
import { validatePasswordResetAuthorization } from "@app/services/auth/auth-fns";
import { AuthMode } from "@app/services/auth/auth-type";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ResetPasswordV2Type } from "@app/services/auth/auth-password-type";
export const registerPasswordRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/password-reset",
config: {
rateLimit: authRateLimit
},
schema: {
body: z.object({
newPassword: z.string().trim()
})
},
handler: async (req) => {
const token = validatePasswordResetAuthorization(req.headers.authorization);
await server.services.password.resetPasswordV2({
type: ResetPasswordV2Type.Recovery,
newPassword: req.body.newPassword,
userId: token.userId
});
}
});
server.route({
method: "POST",
url: "/user/password-reset",
schema: {
body: z.object({
oldPassword: z.string().trim(),
newPassword: z.string().trim()
})
},
config: {
rateLimit: authRateLimit
},
onRequest: verifyAuth([AuthMode.JWT], { requireOrg: false }),
handler: async (req) => {
await server.services.password.resetPasswordV2({
type: ResetPasswordV2Type.LoggedInReset,
userId: req.permission.id,
newPassword: req.body.newPassword,
oldPassword: req.body.oldPassword
});
}
});
};

View File

@@ -380,48 +380,6 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/raw/id/:secretId",
config: {
rateLimit: secretsLimit
},
schema: {
params: z.object({
secretId: z.string()
}),
response: {
200: z.object({
secret: secretRawSchema.extend({
secretPath: z.string(),
tags: SecretTagsSchema.pick({
id: true,
slug: true,
color: true
})
.extend({ name: z.string() })
.array()
.optional(),
secretMetadata: ResourceMetadataSchema.optional()
})
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { secretId } = req.params;
const secret = await server.services.secret.getSecretByIdRaw({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
secretId
});
return { secret };
}
});
server.route({
method: "GET",
url: "/raw/:secretName",
@@ -579,12 +537,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
.optional()
.nullable()
.describe(RAW_SECRETS.CREATE.secretReminderRepeatDays),
secretReminderNote: z
.string()
.max(1024, "Secret reminder note cannot exceed 1024 characters")
.optional()
.nullable()
.describe(RAW_SECRETS.CREATE.secretReminderNote)
secretReminderNote: z.string().optional().nullable().describe(RAW_SECRETS.CREATE.secretReminderNote)
}),
response: {
200: z.union([
@@ -687,12 +640,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
tagIds: z.string().array().optional().describe(RAW_SECRETS.UPDATE.tagIds),
metadata: z.record(z.string()).optional(),
secretMetadata: ResourceMetadataSchema.optional(),
secretReminderNote: z
.string()
.max(1024, "Secret reminder note cannot exceed 1024 characters")
.optional()
.nullable()
.describe(RAW_SECRETS.UPDATE.secretReminderNote),
secretReminderNote: z.string().optional().nullable().describe(RAW_SECRETS.UPDATE.secretReminderNote),
secretReminderRepeatDays: z
.number()
.optional()
@@ -2105,12 +2053,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.UPDATE.skipMultilineEncoding),
newSecretName: SecretNameSchema.optional().describe(RAW_SECRETS.UPDATE.newSecretName),
tagIds: z.string().array().optional().describe(RAW_SECRETS.UPDATE.tagIds),
secretReminderNote: z
.string()
.max(1024, "Secret reminder note cannot exceed 1024 characters")
.optional()
.nullable()
.describe(RAW_SECRETS.UPDATE.secretReminderNote),
secretReminderNote: z.string().optional().nullable().describe(RAW_SECRETS.UPDATE.secretReminderNote),
secretMetadata: ResourceMetadataSchema.optional(),
secretReminderRepeatDays: z
.number()

View File

@@ -45,36 +45,6 @@ export const validateSignUpAuthorization = (token: string, userId: string, valid
if (decodedToken.userId !== userId) throw new UnauthorizedError();
};
export const validatePasswordResetAuthorization = (token?: string) => {
if (!token) throw new UnauthorizedError();
const appCfg = getConfig();
const [AUTH_TOKEN_TYPE, AUTH_TOKEN_VALUE] = <[string, string]>token?.split(" ", 2) ?? [null, null];
if (AUTH_TOKEN_TYPE === null) {
throw new UnauthorizedError({ message: "Missing Authorization Header in the request header." });
}
if (AUTH_TOKEN_TYPE.toLowerCase() !== "bearer") {
throw new UnauthorizedError({
message: `The provided authentication type '${AUTH_TOKEN_TYPE}' is not supported.`
});
}
if (AUTH_TOKEN_VALUE === null) {
throw new UnauthorizedError({
message: "Missing Authorization Body in the request header"
});
}
const decodedToken = jwt.verify(AUTH_TOKEN_VALUE, appCfg.AUTH_SECRET) as AuthModeProviderSignUpTokenPayload;
if (decodedToken.authTokenType !== AuthTokenType.SIGNUP_TOKEN) {
throw new UnauthorizedError({
message: `The provided authentication token type is not supported.`
});
}
return decodedToken;
};
export const enforceUserLockStatus = (isLocked: boolean, temporaryLockDateEnd?: Date | null) => {
if (isLocked) {
throw new ForbiddenRequestError({

View File

@@ -4,8 +4,6 @@ import jwt from "jsonwebtoken";
import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { generateUserSrpKeys } from "@app/lib/crypto/srp";
import { BadRequestError } from "@app/lib/errors";
import { OrgServiceActor } from "@app/lib/types";
@@ -14,18 +12,14 @@ import { TokenType } from "../auth-token/auth-token-types";
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
import { TTotpConfigDALFactory } from "../totp/totp-config-dal";
import { TUserDALFactory } from "../user/user-dal";
import { UserEncryption } from "../user/user-types";
import { TAuthDALFactory } from "./auth-dal";
import {
ResetPasswordV2Type,
TChangePasswordDTO,
TCreateBackupPrivateKeyDTO,
TResetPasswordV2DTO,
TResetPasswordViaBackupKeyDTO,
TSetupPasswordViaBackupKeyDTO
} from "./auth-password-type";
import { ActorType, AuthMethod, AuthTokenType } from "./auth-type";
import { logger } from "@app/lib/logger";
type TAuthPasswordServiceFactoryDep = {
authDAL: TAuthDALFactory;
@@ -120,31 +114,26 @@ export const authPaswordServiceFactory = ({
* Email password reset flow via email. Step 1 send email
*/
const sendPasswordResetEmail = async (email: string) => {
const sendEmail = async () => {
const user = await userDAL.findUserByUsername(email);
const user = await userDAL.findUserByUsername(email);
// ignore as user is not found to avoid an outside entity to identify infisical registered accounts
if (!user || (user && !user.isAccepted)) return;
if (user && user.isAccepted) {
const cfg = getConfig();
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_PASSWORD_RESET,
userId: user.id
});
const cfg = getConfig();
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_PASSWORD_RESET,
userId: user.id
});
await smtpService.sendMail({
template: SmtpTemplates.ResetPassword,
recipients: [email],
subjectLine: "Infisical password reset",
substitutions: {
email,
token,
callback_url: cfg.SITE_URL ? `${cfg.SITE_URL}/password-reset` : ""
}
});
await smtpService.sendMail({
template: SmtpTemplates.ResetPassword,
recipients: [email],
subjectLine: "Infisical password reset",
substitutions: {
email,
token,
callback_url: cfg.SITE_URL ? `${cfg.SITE_URL}/password-reset` : ""
}
};
// note(daniel): run in background to prevent timing attacks
void sendEmail().catch((err) => logger.error(err, "Failed to send password reset email"));
});
};
/*
@@ -153,11 +142,6 @@ export const authPaswordServiceFactory = ({
const verifyPasswordResetEmail = async (email: string, code: string) => {
const cfg = getConfig();
const user = await userDAL.findUserByUsername(email);
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
if (!userEnc) throw new BadRequestError({ message: "Failed to find user encryption data" });
// ignore as user is not found to avoid an outside entity to identify infisical registered accounts
if (!user || (user && !user.isAccepted)) {
throw new Error("Failed email verification for pass reset");
@@ -178,91 +162,8 @@ export const authPaswordServiceFactory = ({
{ expiresIn: cfg.JWT_SIGNUP_LIFETIME }
);
return { token, user, userEncryptionVersion: userEnc.encryptionVersion as UserEncryption };
return { token, user };
};
const resetPasswordV2 = async ({ userId, newPassword, type, oldPassword }: TResetPasswordV2DTO) => {
const cfg = getConfig();
const user = await userDAL.findUserEncKeyByUserId(userId);
if (!user) {
throw new BadRequestError({ message: `User encryption key not found for user with ID '${userId}'` });
}
if (!user.hashedPassword) {
throw new BadRequestError({ message: "Unable to reset password, no password is set" });
}
if (!user.authMethods?.includes(AuthMethod.EMAIL)) {
throw new BadRequestError({ message: "Unable to reset password, no email authentication method is configured" });
}
// we check the old password if the user is resetting their password while logged in
if (type === ResetPasswordV2Type.LoggedInReset) {
if (!oldPassword) {
throw new BadRequestError({ message: "Current password is required." });
}
const isValid = await bcrypt.compare(oldPassword, user.hashedPassword);
if (!isValid) {
throw new BadRequestError({ message: "Incorrect current password." });
}
}
const newHashedPassword = await bcrypt.hash(newPassword, cfg.BCRYPT_SALT_ROUND);
// we need to get the original private key first for v2
let privateKey: string;
if (
user.serverEncryptedPrivateKey &&
user.serverEncryptedPrivateKeyTag &&
user.serverEncryptedPrivateKeyIV &&
user.serverEncryptedPrivateKeyEncoding &&
user.encryptionVersion === UserEncryption.V2
) {
privateKey = infisicalSymmetricDecrypt({
iv: user.serverEncryptedPrivateKeyIV,
tag: user.serverEncryptedPrivateKeyTag,
ciphertext: user.serverEncryptedPrivateKey,
keyEncoding: user.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding
});
} else {
throw new BadRequestError({
message: "Cannot reset password without current credentials or recovery method",
name: "Reset password"
});
}
const encKeys = await generateUserSrpKeys(user.username, newPassword, {
publicKey: user.publicKey,
privateKey
});
const { tag, iv, ciphertext, encoding } = infisicalSymmetricEncypt(privateKey);
await userDAL.updateUserEncryptionByUserId(userId, {
hashedPassword: newHashedPassword,
// srp params
salt: encKeys.salt,
verifier: encKeys.verifier,
protectedKey: encKeys.protectedKey,
protectedKeyIV: encKeys.protectedKeyIV,
protectedKeyTag: encKeys.protectedKeyTag,
encryptedPrivateKey: encKeys.encryptedPrivateKey,
iv: encKeys.encryptedPrivateKeyIV,
tag: encKeys.encryptedPrivateKeyTag,
serverEncryptedPrivateKey: ciphertext,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyEncoding: encoding
});
await tokenService.revokeAllMySessions(userId);
};
/*
* Reset password of a user via backup key
* */
@@ -490,7 +391,6 @@ export const authPaswordServiceFactory = ({
createBackupPrivateKey,
getBackupPrivateKeyOfUser,
sendPasswordSetupEmail,
setupPassword,
resetPasswordV2
setupPassword
};
};

View File

@@ -13,18 +13,6 @@ export type TChangePasswordDTO = {
password: string;
};
export enum ResetPasswordV2Type {
Recovery = "recovery",
LoggedInReset = "logged-in-reset"
}
export type TResetPasswordV2DTO = {
type: ResetPasswordV2Type;
userId: string;
newPassword: string;
oldPassword?: string;
};
export type TResetPasswordViaBackupKeyDTO = {
userId: string;
protectedKey: string;

View File

@@ -772,10 +772,6 @@ export const importDataIntoInfisicalFn = async ({
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
actor: {
type: actor,
actorId
},
tx
});
}

View File

@@ -4,7 +4,7 @@ import ms from "ms";
import { ActionProjectType, ProjectMembershipRole, SecretKeyEncoding, TGroups } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { decryptAsymmetric, encryptAsymmetric } from "@app/lib/crypto";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
@@ -102,13 +102,11 @@ export const groupProjectServiceFactory = ({
project.id
);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to assign group to a more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredPrivileges) {
throw new ForbiddenRequestError({ message: "Failed to assign group to a more privileged role" });
}
}
// validate custom roles input
@@ -269,13 +267,12 @@ export const groupProjectServiceFactory = ({
requestedRoleChange,
project.id
);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to assign group to a more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredPrivileges) {
throw new ForbiddenRequestError({ message: "Failed to assign group to a more privileged role" });
}
}
// validate custom roles input

View File

@@ -7,7 +7,7 @@ import { IdentityAuthMethod } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
@@ -339,12 +339,9 @@ export const identityAwsAuthServiceFactory = ({
actorOrgId
);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
if (!isAtLeastAsPrivileged(permission, rolePermission))
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to revoke aws auth of identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
message: "Failed to revoke aws auth of identity with more privileged role"
});
const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => {

View File

@@ -5,7 +5,7 @@ import { IdentityAuthMethod } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
@@ -312,12 +312,9 @@ export const identityAzureAuthServiceFactory = ({
actorAuthMethod,
actorOrgId
);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
if (!isAtLeastAsPrivileged(permission, rolePermission))
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to revoke azure auth of identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
message: "Failed to revoke azure auth of identity with more privileged role"
});
const revokedIdentityAzureAuth = await identityAzureAuthDAL.transaction(async (tx) => {

Some files were not shown because too many files have changed in this diff Show More