Compare commits

..

1 Commits

1999 changed files with 50134 additions and 77124 deletions

View File

@ -74,34 +74,9 @@ CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
OTEL_TELEMETRY_COLLECTION_ENABLED=false
OTEL_EXPORT_TYPE=prometheus
OTEL_EXPORT_OTLP_ENDPOINT=
OTEL_OTLP_PUSH_INTERVAL=
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
PLAIN_API_KEY=
PLAIN_WISH_LABEL_IDS=
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
# App Connections
# aws assume-role
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
# github oauth
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
#github app
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=

View File

@ -18,10 +18,10 @@ jobs:
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 20
- name: 🔧 Setup Node 16
uses: actions/setup-node@v3
with:
node-version: "20"
node-version: "16"
cache: "npm"
cache-dependency-path: frontend/package-lock.json
- name: 📦 Install dependencies

View File

@ -97,7 +97,7 @@ jobs:
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-gamma-stage
@ -153,7 +153,7 @@ jobs:
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
@ -204,7 +204,7 @@ jobs:
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform

View File

@ -1,12 +1,6 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
# Check if infisical is installed
if ! command -v infisical >/dev/null 2>&1; then
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
exit 1
fi
npx lint-staged
infisical scan git-changes --staged -v

View File

@ -8,7 +8,7 @@ FROM node:20-slim AS base
FROM base AS frontend-dependencies
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
@ -23,16 +23,17 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
COPY /frontend .
ENV NODE_ENV production
ENV NEXT_PUBLIC_ENV production
ARG POSTHOG_HOST
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV VITE_INTERCOM_ID $INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@ -43,10 +44,20 @@ WORKDIR /app
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
VOLUME /app/.next/cache/images
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
COPY --from=frontend-builder /app/public ./public
RUN chown non-root-user:nodejs ./public/data
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
USER non-root-user
ENV NEXT_TELEMETRY_DISABLED 1
##
## BACKEND
##
@ -58,21 +69,13 @@ RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
WORKDIR /app
# Required for pkcs11js and ODBC
# Required for pkcs11js
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@ -88,21 +91,13 @@ ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /app
# Required for pkcs11js and ODBC
# Required for pkcs11js
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@ -113,25 +108,13 @@ RUN mkdir frontend-build
# Production stage
FROM base AS production
# Install necessary packages including ODBC
# Install necessary packages
RUN apt-get update && apt-get install -y \
ca-certificates \
curl \
git \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
openssh-client \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
@ -149,11 +132,14 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV INTERCOM_ID=$INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /

View File

@ -12,7 +12,7 @@ RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
@ -27,16 +27,17 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
COPY /frontend .
ENV NODE_ENV production
ENV NEXT_PUBLIC_ENV production
ARG POSTHOG_HOST
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV VITE_INTERCOM_ID $INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@ -48,10 +49,20 @@ WORKDIR /app
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
VOLUME /app/.next/cache/images
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
COPY --from=frontend-builder /app/public ./public
RUN chown non-root-user:nodejs ./public/data
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
USER non-root-user
ENV NEXT_TELEMETRY_DISABLED 1
##
## BACKEND
##
@ -61,16 +72,8 @@ RUN addgroup --system --gid 1001 nodejs \
WORKDIR /app
# Install all required dependencies for build
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
# Required for pkcs11js
RUN apk add --no-cache python3 make g++
COPY backend/package*.json ./
RUN npm ci --only-production
@ -85,19 +88,8 @@ FROM base AS backend-runner
WORKDIR /app
# Install all required dependencies for runtime
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Required for pkcs11js
RUN apk add --no-cache python3 make g++
COPY backend/package*.json ./
RUN npm ci --only-production
@ -108,33 +100,11 @@ RUN mkdir frontend-build
# Production stage
FROM base AS production
RUN apk add --upgrade --no-cache ca-certificates
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.31.1 && apk add --no-cache git
WORKDIR /
# Install all required runtime dependencies
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev \
bash \
curl \
git \
openssh
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Setup user permissions
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
@ -148,12 +118,16 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV INTERCOM_ID=$INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
COPY --from=backend-runner /app /backend

View File

@ -10,9 +10,6 @@ up-dev:
up-dev-ldap:
docker compose -f docker-compose.dev.yml --profile ldap up --build
up-dev-metrics:
docker compose -f docker-compose.dev.yml --profile metrics up --build
up-prod:
docker-compose -f docker-compose.prod.yml up --build
@ -30,3 +27,4 @@ reviewable-api:
npm run type:check
reviewable: reviewable-ui reviewable-api

View File

@ -14,6 +14,15 @@
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
</h4>
<p align="center">
<a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2">
<img src=".github/images/deploy-to-aws.png" width="137" />
</a>
<a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean">
<img width="200" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/>
</a>
</p>
<h4 align="center">
<a href="https://github.com/Infisical/infisical/blob/main/LICENSE">
<img src="https://img.shields.io/badge/license-MIT-blue.svg" alt="Infisical is released under the MIT license." />
@ -66,7 +75,7 @@ We're on a mission to make security tooling more accessible to everyone, not jus
### Key Management (KMS):
- **[Cryptographic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
- **[Cryptograhic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
### General Platform:

View File

@ -7,17 +7,7 @@ WORKDIR /app
RUN apk --update add \
python3 \
make \
g++ \
openssh
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
g++
COPY package*.json ./
RUN npm ci --only-production
@ -38,17 +28,6 @@ RUN apk --update add \
make \
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN npm ci --only-production && npm cache clean --force
COPY --from=build /app .

View File

@ -7,7 +7,7 @@ ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
# install build dependencies including python3
RUN apk --update add \
alpine-sdk \
autoconf \
@ -17,22 +17,9 @@ RUN apk --update add \
openssl-dev \
python3 \
make \
g++ \
openssh
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
g++
# build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}

View File

@ -10,22 +10,17 @@ export const mockQueue = (): TQueueServiceFactory => {
queue: async (name, jobData) => {
job[name] = jobData;
},
queuePg: async () => {},
initialize: async () => {},
shutdown: async () => undefined,
stopRepeatableJob: async () => true,
start: (name, jobFn) => {
queues[name] = jobFn;
workers[name] = jobFn;
},
startPg: async () => {},
listen: (name, event) => {
events[name] = event;
},
getRepeatableJobs: async () => [],
clearQueue: async () => {},
stopJobById: async () => {},
stopRepeatableJobByJobId: async () => true,
stopRepeatableJobByKey: async () => true
stopRepeatableJobByJobId: async () => true
};
};

View File

@ -1,86 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret Recursive Testing", async () => {
const projectId = seedData1.projectV3.id;
const folderAndSecretNames = [
{ name: "deep1", path: "/", expectedSecretCount: 4 },
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
];
beforeAll(async () => {
const rootFolderIds: string[] = [];
for (const folder of folderAndSecretNames) {
// eslint-disable-next-line no-await-in-loop
const createdFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: folder.path,
name: folder.name
});
if (folder.path === "/") {
rootFolderIds.push(createdFolder.id);
}
// eslint-disable-next-line no-await-in-loop
await createSecretV2({
secretPath: folder.path,
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
key: folder.name,
value: folder.name
});
}
return async () => {
await Promise.all(
rootFolderIds.map((id) =>
deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id,
workspaceId: projectId,
environmentSlug: "prod"
})
)
);
await deleteSecretV2({
authToken: jwtAuthToken,
secretPath: "/",
workspaceId: projectId,
environmentSlug: "prod",
key: folderAndSecretNames[0].name
});
};
});
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
const secrets = await getSecretsV2({
authToken: jwtAuthToken,
secretPath: path,
workspaceId: projectId,
environmentSlug: "prod",
recursive: true
});
expect(secrets.secrets.length).toEqual(expectedSecretCount);
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
folderAndSecretNames
.filter((el) => el.path.startsWith(path))
.sort((a, b) => a.name.localeCompare(b.name))
.map((el) =>
expect.objectContaining({
secretKey: el.name,
secretValue: el.name
})
)
);
});
});

View File

@ -97,7 +97,6 @@ export const getSecretsV2 = async (dto: {
environmentSlug: string;
secretPath: string;
authToken: string;
recursive?: boolean;
}) => {
const getSecretsResponse = await testServer.inject({
method: "GET",
@ -110,8 +109,7 @@ export const getSecretsV2 = async (dto: {
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true",
recursive: String(dto.recursive || false)
include_imports: "true"
}
});
expect(getSecretsResponse.statusCode).toBe(200);

View File

@ -53,13 +53,13 @@ export default {
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
const queue = queueServiceFactory(cfg.REDIS_URL);
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const hsmModule = initializeHsmModule();
hsmModule.initialize();
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() });
// @ts-expect-error type
globalThis.testServer = server;

1950
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -132,32 +132,21 @@
"@fastify/multipart": "8.3.0",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/request-context": "^5.1.0",
"@fastify/session": "^10.7.0",
"@fastify/static": "^7.0.4",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
"@opentelemetry/exporter-prometheus": "^0.55.0",
"@opentelemetry/instrumentation": "^0.55.0",
"@opentelemetry/resources": "^1.28.0",
"@opentelemetry/sdk-metrics": "^1.28.0",
"@opentelemetry/semantic-conventions": "^1.27.0",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.12.1",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@team-plain/typescript-sdk": "^4.6.1",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
@ -191,7 +180,6 @@
"mysql2": "^3.9.8",
"nanoid": "^3.3.4",
"nodemailer": "^6.9.9",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
@ -201,7 +189,6 @@
"passport-google-oauth20": "^2.0.0",
"passport-ldapauth": "^3.0.1",
"pg": "^8.11.3",
"pg-boss": "^10.1.5",
"pg-query-stream": "^4.5.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",

View File

@ -1,7 +0,0 @@
import "@fastify/request-context";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}

View File

@ -1,6 +1,6 @@
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
import { Logger } from "pino";
import { CustomLogger } from "@app/lib/logger/logger";
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
declare global {
@ -8,7 +8,7 @@ declare global {
RawServerDefault,
RawRequestDefaultExpression<RawServerDefault>,
RawReplyDefaultExpression<RawServerDefault>,
Readonly<CustomLogger>,
Readonly<Logger>,
ZodTypeProvider
>;

View File

@ -1,7 +1,5 @@
import "fastify";
import { Redis } from "ioredis";
import { TUsers } from "@app/db/schemas";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
@ -31,12 +29,9 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
@ -55,7 +50,6 @@ import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-acces
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
@ -93,10 +87,6 @@ import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
declare module "fastify" {
interface Session {
callbackPort: string;
}
interface FastifyRequest {
realIp: string;
// used for mfa session authentication
@ -125,7 +115,6 @@ declare module "fastify" {
}
interface FastifyInstance {
redis: Redis;
services: {
login: TAuthLoginFactory;
password: TAuthPasswordFactory;
@ -166,7 +155,6 @@ declare module "fastify" {
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOidcAuth: TIdentityOidcAuthServiceFactory;
identityJwtAuth: TIdentityJwtAuthServiceFactory;
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
@ -180,8 +168,6 @@ declare module "fastify" {
auditLogStream: TAuditLogStreamServiceFactory;
certificate: TCertificateServiceFactory;
certificateTemplate: TCertificateTemplateServiceFactory;
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
certificateAuthority: TCertificateAuthorityServiceFactory;
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
certificateEst: TCertificateEstServiceFactory;
@ -209,7 +195,6 @@ declare module "fastify" {
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
projectTemplate: TProjectTemplateServiceFactory;
totp: TTotpServiceFactory;
appConnection: TAppConnectionServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@ -98,9 +98,6 @@ import {
TIdentityGcpAuths,
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate,
TIdentityJwtAuths,
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate,
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate,
@ -202,9 +199,6 @@ import {
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate,
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate,
TProjectsUpdate,
TProjectTemplates,
TProjectTemplatesInsert,
@ -218,9 +212,6 @@ import {
TRateLimit,
TRateLimitInsert,
TRateLimitUpdate,
TResourceMetadata,
TResourceMetadataInsert,
TResourceMetadataUpdate,
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
@ -320,21 +311,6 @@ import {
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate,
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
TSshCertificateAuthoritiesUpdate,
TSshCertificateAuthoritySecrets,
TSshCertificateAuthoritySecretsInsert,
TSshCertificateAuthoritySecretsUpdate,
TSshCertificateBodies,
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate,
TSshCertificates,
TSshCertificatesInsert,
TSshCertificatesUpdate,
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate,
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
@ -366,7 +342,6 @@ import {
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
import {
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
@ -397,31 +372,6 @@ declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
TSshCertificateAuthoritiesUpdate
>;
[TableName.SshCertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
TSshCertificateAuthoritySecrets,
TSshCertificateAuthoritySecretsInsert,
TSshCertificateAuthoritySecretsUpdate
>;
[TableName.SshCertificateTemplate]: KnexOriginal.CompositeTableType<
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate
>;
[TableName.SshCertificate]: KnexOriginal.CompositeTableType<
TSshCertificates,
TSshCertificatesInsert,
TSshCertificatesUpdate
>;
[TableName.SshCertificateBody]: KnexOriginal.CompositeTableType<
TSshCertificateBodies,
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate
>;
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
@ -640,11 +590,6 @@ declare module "knex/types/tables" {
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate
>;
[TableName.IdentityJwtAuth]: KnexOriginal.CompositeTableType<
TIdentityJwtAuths,
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
@ -885,20 +830,5 @@ declare module "knex/types/tables" {
TProjectTemplatesUpdate
>;
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
[TableName.ProjectSplitBackfillIds]: KnexOriginal.CompositeTableType<
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate
>;
[TableName.ResourceMetadata]: KnexOriginal.CompositeTableType<
TResourceMetadata,
TResourceMetadataInsert,
TResourceMetadataUpdate
>;
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
TAppConnections,
TAppConnectionsInsert,
TAppConnectionsUpdate
>;
}
}

View File

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
if (!hasProjectDescription) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.string("description");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
if (hasProjectDescription) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("description");
});
}
}

View File

@ -0,0 +1,25 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasIdentityIdColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "identityId");
const hasAuthMethodColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
if (hasIdentityIdColumn && hasAuthMethodColumn) {
t.index(["authMethod", "identityId"]);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasIdentityIdColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "identityId");
const hasAuthMethodColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
if (hasIdentityIdColumn && hasAuthMethodColumn) {
t.dropIndex(["authMethod", "identityId"]);
}
});
}

View File

@ -1,20 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
await knex(TableName.IdentityMetadata).whereNull("value").delete();
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
t.string("value", 1020).notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
t.string("value", 1020).alter();
});
}
}

View File

@ -1,59 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicy,
"deletedAt"
);
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.SecretApprovalPolicy,
"deletedAt"
);
if (!hasAccessApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.timestamp("deletedAt");
});
}
if (!hasSecretApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.timestamp("deletedAt");
});
}
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropForeign(["privilegeId"]);
// Add the new foreign key constraint with ON DELETE SET NULL
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("SET NULL");
});
}
export async function down(knex: Knex): Promise<void> {
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicy,
"deletedAt"
);
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.SecretApprovalPolicy,
"deletedAt"
);
if (hasAccessApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropColumn("deletedAt");
});
}
if (hasSecretApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("deletedAt");
});
}
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropForeign(["privilegeId"]);
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("CASCADE");
});
}

View File

@ -1,34 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityJwtAuth))) {
await knex.schema.createTable(TableName.IdentityJwtAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("configurationType").notNullable();
t.string("jwksUrl").notNullable();
t.binary("encryptedJwksCaCert").notNullable();
t.binary("encryptedPublicKeys").notNullable();
t.string("boundIssuer").notNullable();
t.string("boundAudiences").notNullable();
t.jsonb("boundClaims").notNullable();
t.string("boundSubject").notNullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.IdentityJwtAuth);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityJwtAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityJwtAuth);
}

View File

@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretVersionV2, "folderId")) {
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
t.index("folderId");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretVersionV2, "folderId")) {
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
t.dropIndex("folderId");
});
}
}

View File

@ -1,297 +0,0 @@
import slugify from "@sindresorhus/slugify";
import { Knex } from "knex";
import { v4 as uuidV4 } from "uuid";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { ProjectType, TableName } from "../schemas";
/* eslint-disable no-await-in-loop,@typescript-eslint/ban-ts-comment */
const newProject = async (knex: Knex, projectId: string, projectType: ProjectType) => {
const newProjectId = uuidV4();
const project = await knex(TableName.Project).where("id", projectId).first();
await knex(TableName.Project).insert({
...project,
type: projectType,
// @ts-ignore id is required
id: newProjectId,
slug: slugify(`${project?.name}-${alphaNumericNanoId(4)}`)
});
const customRoleMapping: Record<string, string> = {};
const projectCustomRoles = await knex(TableName.ProjectRoles).where("projectId", projectId);
if (projectCustomRoles.length) {
await knex.batchInsert(
TableName.ProjectRoles,
projectCustomRoles.map((el) => {
const id = uuidV4();
customRoleMapping[el.id] = id;
return {
...el,
id,
projectId: newProjectId,
permissions: el.permissions ? JSON.stringify(el.permissions) : el.permissions
};
})
);
}
const groupMembershipMapping: Record<string, string> = {};
const groupMemberships = await knex(TableName.GroupProjectMembership).where("projectId", projectId);
if (groupMemberships.length) {
await knex.batchInsert(
TableName.GroupProjectMembership,
groupMemberships.map((el) => {
const id = uuidV4();
groupMembershipMapping[el.id] = id;
return { ...el, id, projectId: newProjectId };
})
);
}
const groupMembershipRoles = await knex(TableName.GroupProjectMembershipRole).whereIn(
"projectMembershipId",
groupMemberships.map((el) => el.id)
);
if (groupMembershipRoles.length) {
await knex.batchInsert(
TableName.GroupProjectMembershipRole,
groupMembershipRoles.map((el) => {
const id = uuidV4();
const projectMembershipId = groupMembershipMapping[el.projectMembershipId];
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
return { ...el, id, projectMembershipId, customRoleId };
})
);
}
const identityProjectMembershipMapping: Record<string, string> = {};
const identities = await knex(TableName.IdentityProjectMembership).where("projectId", projectId);
if (identities.length) {
await knex.batchInsert(
TableName.IdentityProjectMembership,
identities.map((el) => {
const id = uuidV4();
identityProjectMembershipMapping[el.id] = id;
return { ...el, id, projectId: newProjectId };
})
);
}
const identitiesRoles = await knex(TableName.IdentityProjectMembershipRole).whereIn(
"projectMembershipId",
identities.map((el) => el.id)
);
if (identitiesRoles.length) {
await knex.batchInsert(
TableName.IdentityProjectMembershipRole,
identitiesRoles.map((el) => {
const id = uuidV4();
const projectMembershipId = identityProjectMembershipMapping[el.projectMembershipId];
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
return { ...el, id, projectMembershipId, customRoleId };
})
);
}
const projectMembershipMapping: Record<string, string> = {};
const projectUserMembers = await knex(TableName.ProjectMembership).where("projectId", projectId);
if (projectUserMembers.length) {
await knex.batchInsert(
TableName.ProjectMembership,
projectUserMembers.map((el) => {
const id = uuidV4();
projectMembershipMapping[el.id] = id;
return { ...el, id, projectId: newProjectId };
})
);
}
const membershipRoles = await knex(TableName.ProjectUserMembershipRole).whereIn(
"projectMembershipId",
projectUserMembers.map((el) => el.id)
);
if (membershipRoles.length) {
await knex.batchInsert(
TableName.ProjectUserMembershipRole,
membershipRoles.map((el) => {
const id = uuidV4();
const projectMembershipId = projectMembershipMapping[el.projectMembershipId];
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
return { ...el, id, projectMembershipId, customRoleId };
})
);
}
const kmsKeys = await knex(TableName.KmsKey).where("projectId", projectId).andWhere("isReserved", true);
if (kmsKeys.length) {
await knex.batchInsert(
TableName.KmsKey,
kmsKeys.map((el) => {
const id = uuidV4();
const slug = slugify(alphaNumericNanoId(8).toLowerCase());
return { ...el, id, slug, projectId: newProjectId };
})
);
}
const projectBot = await knex(TableName.ProjectBot).where("projectId", projectId).first();
if (projectBot) {
const newProjectBot = { ...projectBot, id: uuidV4(), projectId: newProjectId };
await knex(TableName.ProjectBot).insert(newProjectBot);
}
const projectKeys = await knex(TableName.ProjectKeys).where("projectId", projectId);
if (projectKeys.length) {
await knex.batchInsert(
TableName.ProjectKeys,
projectKeys.map((el) => {
const id = uuidV4();
return { ...el, id, projectId: newProjectId };
})
);
}
return newProjectId;
};
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasSplitMappingTable = await knex.schema.hasTable(TableName.ProjectSplitBackfillIds);
if (!hasSplitMappingTable) {
await knex.schema.createTable(TableName.ProjectSplitBackfillIds, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("sourceProjectId", 36).notNullable();
t.foreign("sourceProjectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("destinationProjectType").notNullable();
t.string("destinationProjectId", 36).notNullable();
t.foreign("destinationProjectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
});
}
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
if (!hasTypeColumn) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.string("type");
});
let projectsToBeTyped;
do {
// eslint-disable-next-line no-await-in-loop
projectsToBeTyped = await knex(TableName.Project).whereNull("type").limit(BATCH_SIZE).select("id");
if (projectsToBeTyped.length) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.Project)
.whereIn(
"id",
projectsToBeTyped.map((el) => el.id)
)
.update({ type: ProjectType.SecretManager });
}
} while (projectsToBeTyped.length > 0);
const projectsWithCertificates = await knex(TableName.CertificateAuthority)
.distinct("projectId")
.select("projectId");
/* eslint-disable no-await-in-loop,no-param-reassign */
for (const { projectId } of projectsWithCertificates) {
const newProjectId = await newProject(knex, projectId, ProjectType.CertificateManager);
await knex(TableName.CertificateAuthority).where("projectId", projectId).update({ projectId: newProjectId });
await knex(TableName.PkiAlert).where("projectId", projectId).update({ projectId: newProjectId });
await knex(TableName.PkiCollection).where("projectId", projectId).update({ projectId: newProjectId });
await knex(TableName.ProjectSplitBackfillIds).insert({
sourceProjectId: projectId,
destinationProjectType: ProjectType.CertificateManager,
destinationProjectId: newProjectId
});
}
const projectsWithCmek = await knex(TableName.KmsKey)
.where("isReserved", false)
.whereNotNull("projectId")
.distinct("projectId")
.select("projectId");
for (const { projectId } of projectsWithCmek) {
if (projectId) {
const newProjectId = await newProject(knex, projectId, ProjectType.KMS);
await knex(TableName.KmsKey)
.where({
isReserved: false,
projectId
})
.update({ projectId: newProjectId });
await knex(TableName.ProjectSplitBackfillIds).insert({
sourceProjectId: projectId,
destinationProjectType: ProjectType.KMS,
destinationProjectId: newProjectId
});
}
}
/* eslint-enable */
await knex.schema.alterTable(TableName.Project, (t) => {
t.string("type").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
const hasSplitMappingTable = await knex.schema.hasTable(TableName.ProjectSplitBackfillIds);
if (hasTypeColumn && hasSplitMappingTable) {
const splitProjectMappings = await knex(TableName.ProjectSplitBackfillIds).where({});
const certMapping = splitProjectMappings.filter(
(el) => el.destinationProjectType === ProjectType.CertificateManager
);
/* eslint-disable no-await-in-loop */
for (const project of certMapping) {
await knex(TableName.CertificateAuthority)
.where("projectId", project.destinationProjectId)
.update({ projectId: project.sourceProjectId });
await knex(TableName.PkiAlert)
.where("projectId", project.destinationProjectId)
.update({ projectId: project.sourceProjectId });
await knex(TableName.PkiCollection)
.where("projectId", project.destinationProjectId)
.update({ projectId: project.sourceProjectId });
}
/* eslint-enable */
const kmsMapping = splitProjectMappings.filter((el) => el.destinationProjectType === ProjectType.KMS);
/* eslint-disable no-await-in-loop */
for (const project of kmsMapping) {
await knex(TableName.KmsKey)
.where({
isReserved: false,
projectId: project.destinationProjectId
})
.update({ projectId: project.sourceProjectId });
}
/* eslint-enable */
await knex(TableName.ProjectMembership)
.whereIn(
"projectId",
splitProjectMappings.map((el) => el.destinationProjectId)
)
.delete();
await knex(TableName.ProjectRoles)
.whereIn(
"projectId",
splitProjectMappings.map((el) => el.destinationProjectId)
)
.delete();
await knex(TableName.Project)
.whereIn(
"id",
splitProjectMappings.map((el) => el.destinationProjectId)
)
.delete();
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("type");
});
}
if (hasSplitMappingTable) {
await knex.schema.dropTableIfExists(TableName.ProjectSplitBackfillIds);
}
}

View File

@ -1,99 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthority))) {
await knex.schema.createTable(TableName.SshCertificateAuthority, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("status").notNullable(); // active / disabled
t.string("friendlyName").notNullable();
t.string("keyAlgorithm").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
}
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthoritySecret))) {
await knex.schema.createTable(TableName.SshCertificateAuthoritySecret, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshCaId").notNullable().unique();
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.binary("encryptedPrivateKey").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
}
if (!(await knex.schema.hasTable(TableName.SshCertificateTemplate))) {
await knex.schema.createTable(TableName.SshCertificateTemplate, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshCaId").notNullable();
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.string("status").notNullable(); // active / disabled
t.string("name").notNullable();
t.string("ttl").notNullable();
t.string("maxTTL").notNullable();
t.specificType("allowedUsers", "text[]").notNullable();
t.specificType("allowedHosts", "text[]").notNullable();
t.boolean("allowUserCertificates").notNullable();
t.boolean("allowHostCertificates").notNullable();
t.boolean("allowCustomKeyIds").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
}
if (!(await knex.schema.hasTable(TableName.SshCertificate))) {
await knex.schema.createTable(TableName.SshCertificate, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshCaId").notNullable();
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("SET NULL");
t.uuid("sshCertificateTemplateId");
t.foreign("sshCertificateTemplateId")
.references("id")
.inTable(TableName.SshCertificateTemplate)
.onDelete("SET NULL");
t.string("serialNumber").notNullable().unique();
t.string("certType").notNullable(); // user or host
t.specificType("principals", "text[]").notNullable();
t.string("keyId").notNullable();
t.datetime("notBefore").notNullable();
t.datetime("notAfter").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificate);
}
if (!(await knex.schema.hasTable(TableName.SshCertificateBody))) {
await knex.schema.createTable(TableName.SshCertificateBody, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshCertId").notNullable().unique();
t.foreign("sshCertId").references("id").inTable(TableName.SshCertificate).onDelete("CASCADE");
t.binary("encryptedCertificate").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificateBody);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SshCertificateBody);
await dropOnUpdateTrigger(knex, TableName.SshCertificateBody);
await knex.schema.dropTableIfExists(TableName.SshCertificate);
await dropOnUpdateTrigger(knex, TableName.SshCertificate);
await knex.schema.dropTableIfExists(TableName.SshCertificateTemplate);
await dropOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthoritySecret);
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthority);
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
}

View File

@ -1,40 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.ResourceMetadata))) {
await knex.schema.createTable(TableName.ResourceMetadata, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("key").notNullable();
tb.string("value", 1020).notNullable();
tb.uuid("orgId").notNullable();
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
tb.uuid("userId");
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
tb.uuid("identityId");
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
tb.uuid("secretId");
tb.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE");
tb.timestamps(true, true, true);
});
}
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
if (!hasSecretMetadataField) {
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
t.jsonb("secretMetadata");
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ResourceMetadata);
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
if (hasSecretMetadataField) {
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
t.dropColumn("secretMetadata");
});
}
}

View File

@ -1,28 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.AppConnection))) {
await knex.schema.createTable(TableName.AppConnection, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name", 32).notNullable();
t.string("description");
t.string("app").notNullable();
t.string("method").notNullable();
t.binary("encryptedCredentials").notNullable();
t.integer("version").defaultTo(1).notNullable();
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.AppConnection);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.AppConnection);
await dropOnUpdateTrigger(knex, TableName.AppConnection);
}

View File

@ -15,8 +15,7 @@ export const AccessApprovalPoliciesSchema = z.object({
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional()
enforcementLevel: z.string().default("hard")
});
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;

View File

@ -1,27 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const AppConnectionsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
description: z.string().nullable().optional(),
app: z.string(),
method: z.string(),
encryptedCredentials: zodBuffer,
version: z.number().default(1),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAppConnections = z.infer<typeof AppConnectionsSchema>;
export type TAppConnectionsInsert = Omit<z.input<typeof AppConnectionsSchema>, TImmutableDBKeys>;
export type TAppConnectionsUpdate = Partial<Omit<z.input<typeof AppConnectionsSchema>, TImmutableDBKeys>>;

View File

@ -1,33 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityJwtAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
identityId: z.string().uuid(),
configurationType: z.string(),
jwksUrl: z.string(),
encryptedJwksCaCert: zodBuffer,
encryptedPublicKeys: zodBuffer,
boundIssuer: z.string(),
boundAudiences: z.string(),
boundClaims: z.unknown(),
boundSubject: z.string(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityJwtAuths = z.infer<typeof IdentityJwtAuthsSchema>;
export type TIdentityJwtAuthsInsert = Omit<z.input<typeof IdentityJwtAuthsSchema>, TImmutableDBKeys>;
export type TIdentityJwtAuthsUpdate = Partial<Omit<z.input<typeof IdentityJwtAuthsSchema>, TImmutableDBKeys>>;

View File

@ -30,7 +30,6 @@ export * from "./identity-access-tokens";
export * from "./identity-aws-auths";
export * from "./identity-azure-auths";
export * from "./identity-gcp-auths";
export * from "./identity-jwt-auths";
export * from "./identity-kubernetes-auths";
export * from "./identity-metadata";
export * from "./identity-oidc-auths";
@ -65,13 +64,11 @@ export * from "./project-keys";
export * from "./project-memberships";
export * from "./project-roles";
export * from "./project-slack-configs";
export * from "./project-split-backfill-ids";
export * from "./project-templates";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
export * from "./projects";
export * from "./rate-limit";
export * from "./resource-metadata";
export * from "./saml-configs";
export * from "./scim-tokens";
export * from "./secret-approval-policies";
@ -108,11 +105,6 @@ export * from "./secrets";
export * from "./secrets-v2";
export * from "./service-tokens";
export * from "./slack-integrations";
export * from "./ssh-certificate-authorities";
export * from "./ssh-certificate-authority-secrets";
export * from "./ssh-certificate-bodies";
export * from "./ssh-certificate-templates";
export * from "./ssh-certificates";
export * from "./super-admin";
export * from "./totp-configs";
export * from "./trusted-ips";

View File

@ -12,7 +12,7 @@ import { TImmutableDBKeys } from "./models";
export const KmsRootConfigSchema = z.object({
id: z.string().uuid(),
encryptedRootKey: zodBuffer,
encryptionStrategy: z.string().default("SOFTWARE").nullable().optional(),
encryptionStrategy: z.string(),
createdAt: z.date(),
updatedAt: z.date()
});

View File

@ -2,11 +2,6 @@ import { z } from "zod";
export enum TableName {
Users = "users",
SshCertificateAuthority = "ssh_certificate_authorities",
SshCertificateAuthoritySecret = "ssh_certificate_authority_secrets",
SshCertificateTemplate = "ssh_certificate_templates",
SshCertificate = "ssh_certificates",
SshCertificateBody = "ssh_certificate_bodies",
CertificateAuthority = "certificate_authorities",
CertificateTemplateEstConfig = "certificate_template_est_configs",
CertificateAuthorityCert = "certificate_authority_certs",
@ -73,14 +68,12 @@ export enum TableName {
IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityAwsAuth = "identity_aws_auths",
IdentityOidcAuth = "identity_oidc_auths",
IdentityJwtAuth = "identity_jwt_auths",
IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
// used by both identity and users
IdentityMetadata = "identity_metadata",
ResourceMetadata = "resource_metadata",
ScimToken = "scim_tokens",
AccessApprovalPolicy = "access_approval_policies",
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
@ -112,7 +105,6 @@ export enum TableName {
SecretApprovalRequestSecretV2 = "secret_approval_requests_secrets_v2",
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
ProjectSplitBackfillIds = "project_split_backfill_ids",
// junction tables with tags
SecretV2JnTag = "secret_v2_tag_junction",
JnSecretTag = "secret_tag_junction",
@ -130,8 +122,7 @@ export enum TableName {
KmsKeyVersion = "kms_key_versions",
WorkflowIntegrations = "workflow_integrations",
SlackIntegrations = "slack_integrations",
ProjectSlackConfigs = "project_slack_configs",
AppConnection = "app_connections"
ProjectSlackConfigs = "project_slack_configs"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
@ -205,13 +196,5 @@ export enum IdentityAuthMethod {
GCP_AUTH = "gcp-auth",
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth",
OIDC_AUTH = "oidc-auth",
JWT_AUTH = "jwt-auth"
}
export enum ProjectType {
SecretManager = "secret-manager",
CertificateManager = "cert-manager",
KMS = "kms",
SSH = "ssh"
OIDC_AUTH = "oidc-auth"
}

View File

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectSplitBackfillIdsSchema = z.object({
id: z.string().uuid(),
sourceProjectId: z.string(),
destinationProjectType: z.string(),
destinationProjectId: z.string()
});
export type TProjectSplitBackfillIds = z.infer<typeof ProjectSplitBackfillIdsSchema>;
export type TProjectSplitBackfillIdsInsert = Omit<z.input<typeof ProjectSplitBackfillIdsSchema>, TImmutableDBKeys>;
export type TProjectSplitBackfillIdsUpdate = Partial<
Omit<z.input<typeof ProjectSplitBackfillIdsSchema>, TImmutableDBKeys>
>;

View File

@ -23,9 +23,7 @@ export const ProjectsSchema = z.object({
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
auditLogsRetentionDays: z.number().nullable().optional(),
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
description: z.string().nullable().optional(),
type: z.string()
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ResourceMetadataSchema = z.object({
id: z.string().uuid(),
key: z.string(),
value: z.string(),
orgId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
identityId: z.string().uuid().nullable().optional(),
secretId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TResourceMetadata = z.infer<typeof ResourceMetadataSchema>;
export type TResourceMetadataInsert = Omit<z.input<typeof ResourceMetadataSchema>, TImmutableDBKeys>;
export type TResourceMetadataUpdate = Partial<Omit<z.input<typeof ResourceMetadataSchema>, TImmutableDBKeys>>;

View File

@ -15,8 +15,7 @@ export const SecretApprovalPoliciesSchema = z.object({
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional()
enforcementLevel: z.string().default("hard")
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;

View File

@ -24,8 +24,7 @@ export const SecretApprovalRequestsSecretsV2Schema = z.object({
requestId: z.string().uuid(),
op: z.string(),
secretId: z.string().uuid().nullable().optional(),
secretVersion: z.string().uuid().nullable().optional(),
secretMetadata: z.unknown().nullable().optional()
secretVersion: z.string().uuid().nullable().optional()
});
export type TSecretApprovalRequestsSecretsV2 = z.infer<typeof SecretApprovalRequestsSecretsV2Schema>;

View File

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificateAuthoritiesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
status: z.string(),
friendlyName: z.string(),
keyAlgorithm: z.string()
});
export type TSshCertificateAuthorities = z.infer<typeof SshCertificateAuthoritiesSchema>;
export type TSshCertificateAuthoritiesInsert = Omit<z.input<typeof SshCertificateAuthoritiesSchema>, TImmutableDBKeys>;
export type TSshCertificateAuthoritiesUpdate = Partial<
Omit<z.input<typeof SshCertificateAuthoritiesSchema>, TImmutableDBKeys>
>;

View File

@ -1,27 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificateAuthoritySecretsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCaId: z.string().uuid(),
encryptedPrivateKey: zodBuffer
});
export type TSshCertificateAuthoritySecrets = z.infer<typeof SshCertificateAuthoritySecretsSchema>;
export type TSshCertificateAuthoritySecretsInsert = Omit<
z.input<typeof SshCertificateAuthoritySecretsSchema>,
TImmutableDBKeys
>;
export type TSshCertificateAuthoritySecretsUpdate = Partial<
Omit<z.input<typeof SshCertificateAuthoritySecretsSchema>, TImmutableDBKeys>
>;

View File

@ -1,22 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificateBodiesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCertId: z.string().uuid(),
encryptedCertificate: zodBuffer
});
export type TSshCertificateBodies = z.infer<typeof SshCertificateBodiesSchema>;
export type TSshCertificateBodiesInsert = Omit<z.input<typeof SshCertificateBodiesSchema>, TImmutableDBKeys>;
export type TSshCertificateBodiesUpdate = Partial<Omit<z.input<typeof SshCertificateBodiesSchema>, TImmutableDBKeys>>;

View File

@ -1,30 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificateTemplatesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCaId: z.string().uuid(),
status: z.string(),
name: z.string(),
ttl: z.string(),
maxTTL: z.string(),
allowedUsers: z.string().array(),
allowedHosts: z.string().array(),
allowUserCertificates: z.boolean(),
allowHostCertificates: z.boolean(),
allowCustomKeyIds: z.boolean()
});
export type TSshCertificateTemplates = z.infer<typeof SshCertificateTemplatesSchema>;
export type TSshCertificateTemplatesInsert = Omit<z.input<typeof SshCertificateTemplatesSchema>, TImmutableDBKeys>;
export type TSshCertificateTemplatesUpdate = Partial<
Omit<z.input<typeof SshCertificateTemplatesSchema>, TImmutableDBKeys>
>;

View File

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificatesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCaId: z.string().uuid(),
sshCertificateTemplateId: z.string().uuid().nullable().optional(),
serialNumber: z.string(),
certType: z.string(),
principals: z.string().array(),
keyId: z.string(),
notBefore: z.date(),
notAfter: z.date()
});
export type TSshCertificates = z.infer<typeof SshCertificatesSchema>;
export type TSshCertificatesInsert = Omit<z.input<typeof SshCertificatesSchema>, TImmutableDBKeys>;
export type TSshCertificatesUpdate = Partial<Omit<z.input<typeof SshCertificatesSchema>, TImmutableDBKeys>>;

View File

@ -4,7 +4,7 @@ import { Knex } from "knex";
import { encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { ProjectMembershipRole, ProjectType, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
import { ProjectMembershipRole, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
import { buildUserProjectKey, getUserPrivateKey, seedData1 } from "../seed-data";
export const DEFAULT_PROJECT_ENVS = [
@ -24,7 +24,6 @@ export async function seed(knex: Knex): Promise<void> {
name: seedData1.project.name,
orgId: seedData1.organization.id,
slug: "first-project",
type: ProjectType.SecretManager,
// eslint-disable-next-line
// @ts-ignore
id: seedData1.project.id

View File

@ -1,6 +1,6 @@
import { Knex } from "knex";
import { ProjectMembershipRole, ProjectType, ProjectVersion, TableName } from "../schemas";
import { ProjectMembershipRole, ProjectVersion, TableName } from "../schemas";
import { seedData1 } from "../seed-data";
export const DEFAULT_PROJECT_ENVS = [
@ -16,7 +16,6 @@ export async function seed(knex: Knex): Promise<void> {
orgId: seedData1.organization.id,
slug: seedData1.projectV3.slug,
version: ProjectVersion.V3,
type: ProjectType.SecretManager,
// eslint-disable-next-line
// @ts-ignore
id: seedData1.projectV3.id

View File

@ -109,8 +109,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
approvers: z.string().array(),
secretPath: z.string().nullish(),
envId: z.string(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
enforcementLevel: z.string()
}),
reviewers: z
.object({

View File

@ -1,3 +1,4 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
@ -7,7 +8,6 @@ import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
@ -48,7 +48,15 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
.nullable(),
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
name: slugSchema({ min: 1, max: 64, field: "Name" }).describe(DYNAMIC_SECRETS.CREATE.name)
name: z
.string()
.describe(DYNAMIC_SECRETS.CREATE.name)
.min(1)
.toLowerCase()
.max(64)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
})
}),
response: {
200: z.object({

View File

@ -4,15 +4,9 @@ import { ExternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
ExternalKmsAwsSchema,
ExternalKmsGcpCredentialSchema,
ExternalKmsGcpSchema,
ExternalKmsInputSchema,
ExternalKmsInputUpdateSchema,
KmsGcpKeyFetchAuthType,
KmsProviders,
TExternalKmsGcpCredentialSchema
ExternalKmsInputUpdateSchema
} from "@app/ee/services/external-kms/providers/model";
import { NotFoundError } from "@app/lib/errors";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -50,8 +44,7 @@ const sanitizedExternalSchemaForGetById = KmsKeysSchema.extend({
statusDetails: true,
provider: true
}).extend({
// for GCP, we don't return the credential object as it is sensitive data that should not be exposed
providerInput: z.union([ExternalKmsAwsSchema, ExternalKmsGcpSchema.pick({ gcpRegion: true, keyName: true })])
providerInput: ExternalKmsAwsSchema
})
});
@ -293,67 +286,4 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
return { externalKms };
}
});
server.route({
method: "POST",
url: "/gcp/keys",
config: {
rateLimit: writeLimit
},
schema: {
body: z.discriminatedUnion("authMethod", [
z.object({
authMethod: z.literal(KmsGcpKeyFetchAuthType.Credential),
region: z.string().trim().min(1),
credential: ExternalKmsGcpCredentialSchema
}),
z.object({
authMethod: z.literal(KmsGcpKeyFetchAuthType.Kms),
region: z.string().trim().min(1),
kmsId: z.string().trim().min(1)
})
]),
response: {
200: z.object({
keys: z.string().array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { region, authMethod } = req.body;
let credentialJson: TExternalKmsGcpCredentialSchema | undefined;
if (authMethod === KmsGcpKeyFetchAuthType.Credential) {
credentialJson = req.body.credential;
} else if (authMethod === KmsGcpKeyFetchAuthType.Kms) {
const externalKms = await server.services.externalKms.findById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.kmsId
});
if (!externalKms || externalKms.external.provider !== KmsProviders.Gcp) {
throw new NotFoundError({ message: "KMS not found or not of type GCP" });
}
credentialJson = externalKms.external.providerInput.credential as TExternalKmsGcpCredentialSchema;
}
if (!credentialJson) {
throw new NotFoundError({
message: "Something went wrong while fetching the GCP credential, please check inputs and try again"
});
}
const results = await server.services.externalKms.fetchGcpKeys({
credential: credentialJson,
gcpRegion: region
});
return results;
}
});
};

View File

@ -1,9 +1,8 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas";
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
import { GROUPS } from "@app/lib/api-docs";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -15,7 +14,15 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
schema: {
body: z.object({
name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name),
slug: slugSchema({ min: 5, max: 36 }).optional().describe(GROUPS.CREATE.slug),
slug: z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(GROUPS.CREATE.slug),
role: z.string().trim().min(1).default(OrgMembershipRole.NoAccess).describe(GROUPS.CREATE.role)
}),
response: {
@ -93,7 +100,14 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
body: z
.object({
name: z.string().trim().min(1).describe(GROUPS.UPDATE.name),
slug: slugSchema({ min: 5, max: 36 }).describe(GROUPS.UPDATE.slug),
slug: z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(GROUPS.UPDATE.slug),
role: z.string().trim().min(1).describe(GROUPS.UPDATE.role)
})
.partial(),
@ -152,8 +166,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search)
}),
response: {
200: z.object({
@ -166,8 +179,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
})
.merge(
z.object({
isPartOfGroup: z.boolean(),
joinedGroupAt: z.date().nullable()
isPartOfGroup: z.boolean()
})
)
.array(),

View File

@ -8,7 +8,6 @@ import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import {
ProjectPermissionSchema,
@ -34,7 +33,17 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
body: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionSchema.array()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
.optional(),
@ -68,7 +77,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
slug: req.body.slug ?? slugify(alphaNumericNanoId(12)),
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
isTemporary: false,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
@ -94,7 +103,17 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
body: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionSchema.array()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
.optional(),
@ -140,7 +159,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
slug: req.body.slug ?? slugify(alphaNumericNanoId(12)),
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
isTemporary: true,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
@ -170,7 +189,16 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.projectSlug),
privilegeDetails: z
.object({
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
permissions: ProjectPermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
privilegePermission: ProjectSpecificPrivilegePermissionSchema.describe(
IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.privilegePermission

View File

@ -22,13 +22,9 @@ import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-rou
import { registerSecretApprovalRequestRouter } from "./secret-approval-request-router";
import { registerSecretRotationProviderRouter } from "./secret-rotation-provider-router";
import { registerSecretRotationRouter } from "./secret-rotation-router";
import { registerSecretRouter } from "./secret-router";
import { registerSecretScanningRouter } from "./secret-scanning-router";
import { registerSecretVersionRouter } from "./secret-version-router";
import { registerSnapshotRouter } from "./snapshot-router";
import { registerSshCaRouter } from "./ssh-certificate-authority-router";
import { registerSshCertRouter } from "./ssh-certificate-router";
import { registerSshCertificateTemplateRouter } from "./ssh-certificate-template-router";
import { registerTrustedIpRouter } from "./trusted-ip-router";
import { registerUserAdditionalPrivilegeRouter } from "./user-additional-privilege-router";
@ -72,15 +68,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/pki" }
);
await server.register(
async (sshRouter) => {
await sshRouter.register(registerSshCaRouter, { prefix: "/ca" });
await sshRouter.register(registerSshCertRouter, { prefix: "/certificates" });
await sshRouter.register(registerSshCertificateTemplateRouter, { prefix: "/certificate-templates" });
},
{ prefix: "/ssh" }
);
await server.register(
async (ssoRouter) => {
await ssoRouter.register(registerSamlRouter);
@ -93,7 +80,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(registerLdapRouter, { prefix: "/ldap" });
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
await server.register(registerSecretRotationRouter, { prefix: "/secret-rotations" });
await server.register(registerSecretRouter, { prefix: "/secrets" });
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
await server.register(registerGroupRouter, { prefix: "/groups" });
await server.register(registerAuditLogStreamRouter, { prefix: "/audit-log-streams" });

View File

@ -9,6 +9,7 @@
import { Authenticator, Strategy } from "@fastify/passport";
import fastifySession from "@fastify/session";
import RedisStore from "connect-redis";
import { Redis } from "ioredis";
import { z } from "zod";
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
@ -20,6 +21,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
export const registerOidcRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const redis = new Redis(appCfg.REDIS_URL);
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
/*
@ -28,7 +30,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
*/
const redisStore = new RedisStore({
client: server.redis,
client: redis,
prefix: "oidc-session:",
ttl: 600 // 10 minutes
});

View File

@ -1,8 +1,8 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { OrgMembershipRole, OrgMembershipsSchema, OrgRolesSchema } from "@app/db/schemas";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -18,12 +18,19 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
organizationId: z.string().trim()
}),
body: z.object({
slug: slugSchema({ min: 1, max: 64 }).refine(
slug: z
.string()
.min(1)
.trim()
.refine(
(val) => !Object.values(OrgMembershipRole).includes(val as OrgMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
),
)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
}),
name: z.string().trim(),
description: z.string().trim().nullish(),
description: z.string().trim().optional(),
permissions: z.any().array()
}),
response: {
@ -87,15 +94,19 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
roleId: z.string().trim()
}),
body: z.object({
// TODO: Switch to slugSchema after verifying correct methods with Akhil - Omar 11/24
slug: slugSchema({ min: 1, max: 64 })
slug: z
.string()
.trim()
.optional()
.refine(
(val) => !Object.keys(OrgMembershipRole).includes(val),
(val) => typeof val !== "undefined" && !Object.keys(OrgMembershipRole).includes(val),
"Please choose a different slug, the slug you have entered is reserved."
)
.optional(),
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
message: "Slug must be a valid"
}),
name: z.string().trim().optional(),
description: z.string().trim().nullish(),
description: z.string().trim().optional(),
permissions: z.any().array().optional()
}),
response: {

View File

@ -1,4 +1,5 @@
import { packRules } from "@casl/ability/extra";
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
@ -8,7 +9,6 @@ import {
} from "@app/ee/services/permission/project-permission";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedRoleSchemaV1 } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
@ -32,14 +32,21 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
projectSlug: z.string().trim().describe(PROJECT_ROLE.CREATE.projectSlug)
}),
body: z.object({
slug: slugSchema({ max: 64 })
slug: z
.string()
.toLowerCase()
.trim()
.min(1)
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
})
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
}),
response: {
@ -87,15 +94,23 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
}),
body: z.object({
slug: slugSchema({ max: 64 })
slug: z
.string()
.toLowerCase()
.trim()
.optional()
.describe(PROJECT_ROLE.UPDATE.slug)
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
(val) =>
typeof val === "undefined" ||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.describe(PROJECT_ROLE.UPDATE.slug)
.optional(),
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
message: "Slug must be a valid"
}),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
}),
response: {

View File

@ -1,3 +1,4 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas";
@ -7,13 +8,22 @@ import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-tem
import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
import { ProjectTemplates } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
import { AuthMode } from "@app/services/auth/auth-type";
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
const SlugSchema = z
.string()
.trim()
.min(1)
.max(32)
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Must be valid slug format"
});
const isReservedRoleSlug = (slug: string) =>
Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole);
@ -24,14 +34,14 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
roles: z
.object({
name: z.string().trim().min(1),
slug: slugSchema(),
slug: SlugSchema,
permissions: UnpackedPermissionSchema.array()
})
.array(),
environments: z
.object({
name: z.string().trim().min(1),
slug: slugSchema(),
slug: SlugSchema,
position: z.number().min(1)
})
.array()
@ -40,7 +50,7 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
const ProjectTemplateRolesSchema = z
.object({
name: z.string().trim().min(1),
slug: slugSchema(),
slug: SlugSchema,
permissions: ProjectPermissionV2Schema.array()
})
.array()
@ -68,7 +78,7 @@ const ProjectTemplateRolesSchema = z
const ProjectTemplateEnvironmentsSchema = z
.object({
name: z.string().trim().min(1),
slug: slugSchema(),
slug: SlugSchema,
position: z.number().min(1)
})
.array()
@ -178,11 +188,9 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
schema: {
description: "Create a project template.",
body: z.object({
name: slugSchema({ field: "name" })
.refine((val) => !isInfisicalProjectTemplate(val), {
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
})
.describe(ProjectTemplates.CREATE.name),
}).describe(ProjectTemplates.CREATE.name),
description: z.string().max(256).trim().optional().describe(ProjectTemplates.CREATE.description),
roles: ProjectTemplateRolesSchema.default([]).describe(ProjectTemplates.CREATE.roles),
environments: ProjectTemplateEnvironmentsSchema.default(ProjectTemplateDefaultEnvironments).describe(
@ -222,8 +230,7 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
description: "Update a project template.",
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.UPDATE.templateId) }),
body: z.object({
name: slugSchema({ field: "name" })
.refine((val) => !isInfisicalProjectTemplate(val), {
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
})
.optional()

View File

@ -84,10 +84,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
samlConfig.audience = `spn:${ssoConfig.issuer}`;
}
}
if (
ssoConfig.authProvider === SamlProviders.GOOGLE_SAML ||
ssoConfig.authProvider === SamlProviders.AUTH0_SAML
) {
if (ssoConfig.authProvider === SamlProviders.GOOGLE_SAML) {
samlConfig.wantAssertionsSigned = false;
}
@ -126,10 +123,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
`email: ${email} firstName: ${profile.firstName as string}`
);
throw new BadRequestError({
message:
"Missing email or first name. Please double check your SAML attribute mapping for the selected provider."
});
throw new Error("Invalid saml request. Missing email or first name");
}
const userMetadata = Object.keys(profile.attributes || {})

View File

@ -12,7 +12,6 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
const approvalRequestUser = z.object({ userId: z.string().nullable().optional() }).merge(
UsersSchema.pick({
@ -53,8 +52,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
})
.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
enforcementLevel: z.string()
}),
committerUser: approvalRequestUser,
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
@ -262,8 +260,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
approvals: z.number(),
approvers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
enforcementLevel: z.string()
}),
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),
@ -275,7 +272,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
.extend({
op: z.string(),
tags: tagSchema,
secretMetadata: ResourceMetadataSchema.nullish(),
secret: z
.object({
id: z.string(),
@ -293,8 +289,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
secretKey: z.string(),
secretValue: z.string().optional(),
secretComment: z.string().optional(),
tags: tagSchema,
secretMetadata: ResourceMetadataSchema.nullish()
tags: tagSchema
})
.optional()
})

View File

@ -1,71 +0,0 @@
import z from "zod";
import { ProjectPermissionActions } from "@app/ee/services/permission/project-permission";
import { RAW_SECRETS } from "@app/lib/api-docs";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const AccessListEntrySchema = z
.object({
allowedActions: z.nativeEnum(ProjectPermissionActions).array(),
id: z.string(),
membershipId: z.string(),
name: z.string()
})
.array();
export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:secretName/access-list",
config: {
rateLimit: readLimit
},
schema: {
description: "Get list of users, machine identities, and groups with access to a secret",
security: [
{
bearerAuth: []
}
],
params: z.object({
secretName: z.string().trim().describe(RAW_SECRETS.GET_ACCESS_LIST.secretName)
}),
querystring: z.object({
workspaceId: z.string().trim().describe(RAW_SECRETS.GET_ACCESS_LIST.workspaceId),
environment: z.string().trim().describe(RAW_SECRETS.GET_ACCESS_LIST.environment),
secretPath: z
.string()
.trim()
.default("/")
.transform(removeTrailingSlash)
.describe(RAW_SECRETS.GET_ACCESS_LIST.secretPath)
}),
response: {
200: z.object({
groups: AccessListEntrySchema,
identities: AccessListEntrySchema,
users: AccessListEntrySchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { secretName } = req.params;
const { secretPath, environment, workspaceId: projectId } = req.query;
return server.services.secret.getSecretAccessList({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
secretPath,
environment,
projectId,
secretName
});
}
});
};

View File

@ -1,279 +0,0 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { sanitizedSshCa } from "@app/ee/services/ssh/ssh-certificate-authority-schema";
import { SshCaStatus } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { sanitizedSshCertificateTemplate } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-schema";
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
export const registerSshCaRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Create SSH CA",
body: z.object({
projectId: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.projectId),
friendlyName: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.friendlyName),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.default(CertKeyAlgorithm.RSA_2048)
.describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm)
}),
response: {
200: z.object({
ca: sanitizedSshCa.extend({
publicKey: z.string()
})
})
}
},
handler: async (req) => {
const ca = await server.services.sshCertificateAuthority.createSshCa({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.CREATE_SSH_CA,
metadata: {
sshCaId: ca.id,
friendlyName: ca.friendlyName
}
}
});
return {
ca
};
}
});
server.route({
method: "GET",
url: "/:sshCaId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get SSH CA",
params: z.object({
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.GET.sshCaId)
}),
response: {
200: z.object({
ca: sanitizedSshCa.extend({
publicKey: z.string()
})
})
}
},
handler: async (req) => {
const ca = await server.services.sshCertificateAuthority.getSshCaById({
caId: req.params.sshCaId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.GET_SSH_CA,
metadata: {
sshCaId: ca.id,
friendlyName: ca.friendlyName
}
}
});
return {
ca
};
}
});
server.route({
method: "GET",
url: "/:sshCaId/public-key",
config: {
rateLimit: readLimit
},
schema: {
description: "Get public key of SSH CA",
params: z.object({
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.GET_PUBLIC_KEY.sshCaId)
}),
response: {
200: z.string()
}
},
handler: async (req) => {
const publicKey = await server.services.sshCertificateAuthority.getSshCaPublicKey({
caId: req.params.sshCaId
});
return publicKey;
}
});
server.route({
method: "PATCH",
url: "/:sshCaId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update SSH CA",
params: z.object({
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.UPDATE.sshCaId)
}),
body: z.object({
friendlyName: z.string().optional().describe(SSH_CERTIFICATE_AUTHORITIES.UPDATE.friendlyName),
status: z.nativeEnum(SshCaStatus).optional().describe(SSH_CERTIFICATE_AUTHORITIES.UPDATE.status)
}),
response: {
200: z.object({
ca: sanitizedSshCa.extend({
publicKey: z.string()
})
})
}
},
handler: async (req) => {
const ca = await server.services.sshCertificateAuthority.updateSshCaById({
caId: req.params.sshCaId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.UPDATE_SSH_CA,
metadata: {
sshCaId: ca.id,
friendlyName: ca.friendlyName,
status: ca.status as SshCaStatus
}
}
});
return {
ca
};
}
});
server.route({
method: "DELETE",
url: "/:sshCaId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Delete SSH CA",
params: z.object({
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.DELETE.sshCaId)
}),
response: {
200: z.object({
ca: sanitizedSshCa
})
}
},
handler: async (req) => {
const ca = await server.services.sshCertificateAuthority.deleteSshCaById({
caId: req.params.sshCaId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.DELETE_SSH_CA,
metadata: {
sshCaId: ca.id,
friendlyName: ca.friendlyName
}
}
});
return {
ca
};
}
});
server.route({
method: "GET",
url: "/:sshCaId/certificate-templates",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get list of certificate templates for the SSH CA",
params: z.object({
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.GET_CERTIFICATE_TEMPLATES.sshCaId)
}),
response: {
200: z.object({
certificateTemplates: sanitizedSshCertificateTemplate.array()
})
}
},
handler: async (req) => {
const { certificateTemplates, ca } = await server.services.sshCertificateAuthority.getSshCaCertificateTemplates({
caId: req.params.sshCaId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.GET_SSH_CA_CERTIFICATE_TEMPLATES,
metadata: {
sshCaId: ca.id,
friendlyName: ca.friendlyName
}
}
});
return {
certificateTemplates
};
}
});
};

View File

@ -1,164 +0,0 @@
import ms from "ms";
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/sign",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Sign SSH public key",
body: z.object({
certificateTemplateId: z
.string()
.trim()
.min(1)
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.certificateTemplateId),
publicKey: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.publicKey),
certType: z
.nativeEnum(SshCertType)
.default(SshCertType.USER)
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.certType),
principals: z
.array(z.string().transform((val) => val.trim()))
.nonempty("Principals array must not be empty")
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.principals),
ttl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.ttl),
keyId: z.string().trim().max(50).optional().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.keyId)
}),
response: {
200: z.object({
serialNumber: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.serialNumber),
signedKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.signedKey)
})
}
},
handler: async (req) => {
const { serialNumber, signedPublicKey, certificateTemplate, ttl, keyId } =
await server.services.sshCertificateAuthority.signSshKey({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.SIGN_SSH_KEY,
metadata: {
certificateTemplateId: certificateTemplate.id,
certType: req.body.certType,
principals: req.body.principals,
ttl: String(ttl),
keyId
}
}
});
return {
serialNumber,
signedKey: signedPublicKey
};
}
});
server.route({
method: "POST",
url: "/issue",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Issue SSH credentials (certificate + key)",
body: z.object({
certificateTemplateId: z
.string()
.trim()
.min(1)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.certificateTemplateId),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.default(CertKeyAlgorithm.RSA_2048)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyAlgorithm),
certType: z
.nativeEnum(SshCertType)
.default(SshCertType.USER)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.certType),
principals: z
.array(z.string().transform((val) => val.trim()))
.nonempty("Principals array must not be empty")
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.principals),
ttl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.ttl),
keyId: z.string().trim().max(50).optional().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyId)
}),
response: {
200: z.object({
serialNumber: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.serialNumber),
signedKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.signedKey),
privateKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.privateKey),
publicKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.publicKey),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyAlgorithm)
})
}
},
handler: async (req) => {
const { serialNumber, signedPublicKey, privateKey, publicKey, certificateTemplate, ttl, keyId } =
await server.services.sshCertificateAuthority.issueSshCreds({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.ISSUE_SSH_CREDS,
metadata: {
certificateTemplateId: certificateTemplate.id,
keyAlgorithm: req.body.keyAlgorithm,
certType: req.body.certType,
principals: req.body.principals,
ttl: String(ttl),
keyId
}
}
});
return {
serialNumber,
signedKey: signedPublicKey,
privateKey,
publicKey,
keyAlgorithm: req.body.keyAlgorithm
};
}
});
};

View File

@ -1,258 +0,0 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { sanitizedSshCertificateTemplate } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-schema";
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
import {
isValidHostPattern,
isValidUserPattern
} from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-validators";
import { SSH_CERTIFICATE_TEMPLATES } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSshCertificateTemplateRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:certificateTemplateId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
certificateTemplateId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.GET.certificateTemplateId)
}),
response: {
200: sanitizedSshCertificateTemplate
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const certificateTemplate = await server.services.sshCertificateTemplate.getSshCertTemplate({
id: req.params.certificateTemplateId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: certificateTemplate.projectId,
event: {
type: EventType.GET_SSH_CERTIFICATE_TEMPLATE,
metadata: {
certificateTemplateId: certificateTemplate.id
}
}
});
return certificateTemplate;
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
body: z
.object({
sshCaId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.sshCaId),
name: z
.string()
.min(1)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Name must be a valid slug"
})
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.name),
ttl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.default("1h")
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.ttl),
maxTTL: z
.string()
.refine((val) => ms(val) > 0, "Max TTL must be a positive number")
.default("30d")
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.maxTTL),
allowedUsers: z
.array(z.string().refine(isValidUserPattern, "Invalid user pattern"))
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowedUsers),
allowedHosts: z
.array(z.string().refine(isValidHostPattern, "Invalid host pattern"))
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowedHosts),
allowUserCertificates: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowUserCertificates),
allowHostCertificates: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowHostCertificates),
allowCustomKeyIds: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowCustomKeyIds)
})
.refine((data) => ms(data.maxTTL) > ms(data.ttl), {
message: "Max TLL must be greater than TTL",
path: ["maxTTL"]
}),
response: {
200: sanitizedSshCertificateTemplate
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { certificateTemplate, ca } = await server.services.sshCertificateTemplate.createSshCertTemplate({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.CREATE_SSH_CERTIFICATE_TEMPLATE,
metadata: {
certificateTemplateId: certificateTemplate.id,
sshCaId: ca.id,
name: certificateTemplate.name,
ttl: certificateTemplate.ttl,
maxTTL: certificateTemplate.maxTTL,
allowedUsers: certificateTemplate.allowedUsers,
allowedHosts: certificateTemplate.allowedHosts,
allowUserCertificates: certificateTemplate.allowUserCertificates,
allowHostCertificates: certificateTemplate.allowHostCertificates,
allowCustomKeyIds: certificateTemplate.allowCustomKeyIds
}
}
});
return certificateTemplate;
}
});
server.route({
method: "PATCH",
url: "/:certificateTemplateId",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
status: z.nativeEnum(SshCertTemplateStatus).optional(),
name: z
.string()
.min(1)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.name),
ttl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.ttl),
maxTTL: z
.string()
.refine((val) => ms(val) > 0, "Max TTL must be a positive number")
.optional()
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.maxTTL),
allowedUsers: z
.array(z.string().refine(isValidUserPattern, "Invalid user pattern"))
.optional()
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowedUsers),
allowedHosts: z
.array(z.string().refine(isValidHostPattern, "Invalid host pattern"))
.optional()
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowedHosts),
allowUserCertificates: z.boolean().optional().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowUserCertificates),
allowHostCertificates: z.boolean().optional().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowHostCertificates),
allowCustomKeyIds: z.boolean().optional().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowCustomKeyIds)
}),
params: z.object({
certificateTemplateId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.certificateTemplateId)
}),
response: {
200: sanitizedSshCertificateTemplate
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { certificateTemplate, projectId } = await server.services.sshCertificateTemplate.updateSshCertTemplate({
...req.body,
id: req.params.certificateTemplateId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.UPDATE_SSH_CERTIFICATE_TEMPLATE,
metadata: {
status: certificateTemplate.status as SshCertTemplateStatus,
certificateTemplateId: certificateTemplate.id,
sshCaId: certificateTemplate.sshCaId,
name: certificateTemplate.name,
ttl: certificateTemplate.ttl,
maxTTL: certificateTemplate.maxTTL,
allowedUsers: certificateTemplate.allowedUsers,
allowedHosts: certificateTemplate.allowedHosts,
allowUserCertificates: certificateTemplate.allowUserCertificates,
allowHostCertificates: certificateTemplate.allowHostCertificates,
allowCustomKeyIds: certificateTemplate.allowCustomKeyIds
}
}
});
return certificateTemplate;
}
});
server.route({
method: "DELETE",
url: "/:certificateTemplateId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
certificateTemplateId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.DELETE.certificateTemplateId)
}),
response: {
200: sanitizedSshCertificateTemplate
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const certificateTemplate = await server.services.sshCertificateTemplate.deleteSshCertTemplate({
id: req.params.certificateTemplateId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: certificateTemplate.projectId,
event: {
type: EventType.DELETE_SSH_CERTIFICATE_TEMPLATE,
metadata: {
certificateTemplateId: certificateTemplate.id
}
}
});
return certificateTemplate;
}
});
};

View File

@ -7,7 +7,6 @@ import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/pr
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
@ -22,7 +21,17 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
schema: {
body: z.object({
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
type: z.discriminatedUnion("isTemporary", [
z.object({
@ -78,7 +87,15 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
}),
body: z
.object({
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
slug: z
.string()
.max(60)
.trim()
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),

View File

@ -7,7 +7,6 @@ import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-p
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
@ -29,7 +28,17 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
body: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
type: z.discriminatedUnion("isTemporary", [
z.object({
@ -91,7 +100,16 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.id)
}),
body: z.object({
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),

View File

@ -1,11 +1,11 @@
import { packRules } from "@casl/ability/extra";
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
@ -29,14 +29,21 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
projectId: z.string().trim().describe(PROJECT_ROLE.CREATE.projectId)
}),
body: z.object({
slug: slugSchema({ min: 1, max: 64 })
slug: z
.string()
.toLowerCase()
.trim()
.min(1)
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
})
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
}),
response: {
@ -83,15 +90,23 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
}),
body: z.object({
slug: slugSchema({ min: 1, max: 64 })
slug: z
.string()
.toLowerCase()
.trim()
.optional()
.describe(PROJECT_ROLE.UPDATE.slug)
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
(val) =>
typeof val === "undefined" ||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.optional()
.describe(PROJECT_ROLE.UPDATE.slug),
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
message: "Slug must be a valid"
}),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
}),
response: {

View File

@ -139,10 +139,5 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
}
};
const softDeleteById = async (policyId: string, tx?: Knex) => {
const softDeletedPolicy = await accessApprovalPolicyOrm.updateById(policyId, { deletedAt: new Date() }, tx);
return softDeletedPolicy;
};
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById };
return { ...accessApprovalPolicyOrm, find, findById };
};

View File

@ -1,6 +1,5 @@
import { ForbiddenError } from "@casl/ability";
import { ProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
@ -9,11 +8,7 @@ import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TAccessApprovalRequestDALFactory } from "../access-approval-request/access-approval-request-dal";
import { TAccessApprovalRequestReviewerDALFactory } from "../access-approval-request/access-approval-request-reviewer-dal";
import { ApprovalStatus } from "../access-approval-request/access-approval-request-types";
import { TGroupDALFactory } from "../group/group-dal";
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
import {
@ -26,7 +21,7 @@ import {
TUpdateAccessApprovalPolicy
} from "./access-approval-policy-types";
type TAccessApprovalPolicyServiceFactoryDep = {
type TSecretApprovalPolicyServiceFactoryDep = {
projectDAL: TProjectDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
accessApprovalPolicyDAL: TAccessApprovalPolicyDALFactory;
@ -35,9 +30,6 @@ type TAccessApprovalPolicyServiceFactoryDep = {
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
groupDAL: TGroupDALFactory;
userDAL: Pick<TUserDALFactory, "find">;
accessApprovalRequestDAL: Pick<TAccessApprovalRequestDALFactory, "update" | "find">;
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update">;
};
export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprovalPolicyServiceFactory>;
@ -49,11 +41,8 @@ export const accessApprovalPolicyServiceFactory = ({
permissionService,
projectEnvDAL,
projectDAL,
userDAL,
accessApprovalRequestDAL,
additionalPrivilegeDAL,
accessApprovalRequestReviewerDAL
}: TAccessApprovalPolicyServiceFactoryDep) => {
userDAL
}: TSecretApprovalPolicyServiceFactoryDep) => {
const createAccessApprovalPolicy = async ({
name,
actor,
@ -87,15 +76,13 @@ export const accessApprovalPolicyServiceFactory = ({
if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length)
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
project.id,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
ProjectPermissionSub.SecretApproval
@ -193,9 +180,16 @@ export const accessApprovalPolicyServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
// Anyone in the project should be able to get the policies.
await permissionService.getProjectPermission(actor, actorId, project.id, actorAuthMethod, actorOrgId);
/* const { permission } = */ await permissionService.getProjectPermission(
actor,
actorId,
project.id,
actorAuthMethod,
actorOrgId
);
// ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id });
return accessApprovalPolicies;
};
@ -237,14 +231,13 @@ export const accessApprovalPolicyServiceFactory = ({
if (!accessApprovalPolicy) {
throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
}
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
accessApprovalPolicy.projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
@ -321,42 +314,19 @@ export const accessApprovalPolicyServiceFactory = ({
const policy = await accessApprovalPolicyDAL.findById(policyId);
if (!policy) throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
policy.projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete,
ProjectPermissionSub.SecretApproval
);
await accessApprovalPolicyDAL.transaction(async (tx) => {
await accessApprovalPolicyDAL.softDeleteById(policyId, tx);
const allAccessApprovalRequests = await accessApprovalRequestDAL.find({ policyId });
if (allAccessApprovalRequests.length) {
const accessApprovalRequestsIds = allAccessApprovalRequests.map((request) => request.id);
const privilegeIdsArray = allAccessApprovalRequests
.map((request) => request.privilegeId)
.filter((id): id is string => id != null);
if (privilegeIdsArray.length) {
await additionalPrivilegeDAL.delete({ $in: { id: privilegeIdsArray } }, tx);
}
await accessApprovalRequestReviewerDAL.update(
{ $in: { id: accessApprovalRequestsIds }, status: ApprovalStatus.PENDING },
{ status: ApprovalStatus.REJECTED },
tx
);
}
});
await accessApprovalPolicyDAL.deleteById(policyId);
return policy;
};
@ -386,11 +356,7 @@ export const accessApprovalPolicyServiceFactory = ({
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
const policies = await accessApprovalPolicyDAL.find({
envId: environment.id,
projectId: project.id,
deletedAt: null
});
const policies = await accessApprovalPolicyDAL.find({ envId: environment.id, projectId: project.id });
if (!policies) throw new NotFoundError({ message: `No policies found in environment with slug '${envSlug}'` });
return { count: policies.length };

View File

@ -61,8 +61,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId")
)
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
@ -119,8 +118,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approvals: doc.policyApprovals,
secretPath: doc.policySecretPath,
enforcementLevel: doc.policyEnforcementLevel,
envId: doc.policyEnvId,
deletedAt: doc.policyDeletedAt
envId: doc.policyEnvId
},
requestedByUser: {
userId: doc.requestedByUserId,
@ -143,7 +141,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
}
: null,
isApproved: !!doc.policyDeletedAt || !!doc.privilegeId
isApproved: !!doc.privilegeId
}),
childrenMapper: [
{
@ -254,8 +252,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals")
);
const findById = async (id: string, tx?: Knex) => {
@ -274,8 +271,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
deletedAt: el.policyDeletedAt
enforcementLevel: el.policyEnforcementLevel
},
requestedByUser: {
userId: el.requestedByUserId,
@ -367,7 +363,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
)
.where(`${TableName.Environment}.projectId`, projectId)
.where(`${TableName.AccessApprovalPolicy}.deletedAt`, null)
.select(selectAllTableCols(TableName.AccessApprovalRequest))
.select(db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"))
.select(db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"));

View File

@ -130,9 +130,6 @@ export const accessApprovalRequestServiceFactory = ({
message: `No policy in environment with slug '${environment.slug}' and with secret path '${secretPath}' was found.`
});
}
if (policy.deletedAt) {
throw new BadRequestError({ message: "The policy linked to this request has been deleted" });
}
const approverIds: string[] = [];
const approverGroupIds: string[] = [];
@ -213,7 +210,7 @@ export const accessApprovalRequestServiceFactory = ({
);
const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`;
const approvalUrl = `${cfg.SITE_URL}/secret-manager/${project.id}/approval`;
const approvalUrl = `${cfg.SITE_URL}/project/${project.id}/approval`;
await triggerSlackNotification({
projectId: project.id,
@ -312,12 +309,6 @@ export const accessApprovalRequestServiceFactory = ({
}
const { policy } = accessApprovalRequest;
if (policy.deletedAt) {
throw new BadRequestError({
message: "The policy associated with this access request has been deleted."
});
}
const { membership, hasRole } = await permissionService.getProjectPermission(
actor,
actorId,

View File

@ -1,7 +1,6 @@
import { RawAxiosRequestHeaders } from "axios";
import { SecretKeyEncoding } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
@ -21,130 +20,27 @@ type TAuditLogQueueServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
};
export type TAuditLogQueueServiceFactory = Awaited<ReturnType<typeof auditLogQueueServiceFactory>>;
export type TAuditLogQueueServiceFactory = ReturnType<typeof auditLogQueueServiceFactory>;
// keep this timeout 5s it must be fast because else the queue will take time to finish
// audit log is a crowded queue thus needs to be fast
export const AUDIT_LOG_STREAM_TIMEOUT = 5 * 1000;
export const auditLogQueueServiceFactory = async ({
export const auditLogQueueServiceFactory = ({
auditLogDAL,
queueService,
projectDAL,
licenseService,
auditLogStreamDAL
}: TAuditLogQueueServiceFactoryDep) => {
const appCfg = getConfig();
const pushToLog = async (data: TCreateAuditLogDTO) => {
if (appCfg.USE_PG_QUEUE && appCfg.SHOULD_INIT_PG_QUEUE) {
await queueService.queuePg<QueueName.AuditLog>(QueueJobs.AuditLog, data, {
retryLimit: 10,
retryBackoff: true
});
} else {
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
await queueService.queue(QueueName.AuditLog, QueueJobs.AuditLog, data, {
removeOnFail: {
count: 3
},
removeOnComplete: true
});
}
};
if (appCfg.SHOULD_INIT_PG_QUEUE) {
await queueService.startPg<QueueName.AuditLog>(
QueueJobs.AuditLog,
async ([job]) => {
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
let { orgId } = job.data;
const MS_IN_DAY = 24 * 60 * 60 * 1000;
let project;
if (!orgId) {
// it will never be undefined for both org and project id
// TODO(akhilmhdh): use caching here in dal to avoid db calls
project = await projectDAL.findById(projectId as string);
orgId = project.orgId;
}
const plan = await licenseService.getPlan(orgId);
if (plan.auditLogsRetentionDays === 0) {
// skip inserting if audit log retention is 0 meaning its not supported
return;
}
// For project actions, set TTL to project-level audit log retention config
// This condition ensures that the plan's audit log retention days cannot be bypassed
const ttlInDays =
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
? project.auditLogsRetentionDays
: plan.auditLogsRetentionDays;
const ttl = ttlInDays * MS_IN_DAY;
const auditLog = await auditLogDAL.create({
actor: actor.type,
actorMetadata: actor.metadata,
userAgent,
projectId,
projectName: project?.name,
ipAddress,
orgId,
eventType: event.type,
expiresAt: new Date(Date.now() + ttl),
eventMetadata: event.metadata,
userAgentType
});
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
await Promise.allSettled(
logStreams.map(
async ({
url,
encryptedHeadersTag,
encryptedHeadersIV,
encryptedHeadersKeyEncoding,
encryptedHeadersCiphertext
}) => {
const streamHeaders =
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
? (JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
iv: encryptedHeadersIV,
tag: encryptedHeadersTag,
ciphertext: encryptedHeadersCiphertext
})
) as LogStreamHeaders[])
: [];
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (streamHeaders.length)
streamHeaders.forEach(({ key, value }) => {
headers[key] = value;
});
return request.post(url, auditLog, {
headers,
// request timeout
timeout: AUDIT_LOG_STREAM_TIMEOUT,
// connection timeout
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
});
}
)
);
},
{
batchSize: 1,
workerCount: 30,
pollingIntervalSeconds: 0.5
}
);
}
queueService.start(QueueName.AuditLog, async (job) => {
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
let { orgId } = job.data;

View File

@ -2,14 +2,9 @@ import {
TCreateProjectTemplateDTO,
TUpdateProjectTemplateDTO
} from "@app/ee/services/project-template/project-template-types";
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
import { TProjectPermission } from "@app/lib/types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { TCreateAppConnectionDTO, TUpdateAppConnectionDTO } from "@app/services/app-connection/app-connection-types";
import { ActorType } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
@ -65,7 +60,6 @@ export enum EventType {
DELETE_SECRETS = "delete-secrets",
GET_WORKSPACE_KEY = "get-workspace-key",
AUTHORIZE_INTEGRATION = "authorize-integration",
UPDATE_INTEGRATION_AUTH = "update-integration-auth",
UNAUTHORIZE_INTEGRATION = "unauthorize-integration",
CREATE_INTEGRATION = "create-integration",
DELETE_INTEGRATION = "delete-integration",
@ -100,11 +94,6 @@ export enum EventType {
UPDATE_IDENTITY_OIDC_AUTH = "update-identity-oidc-auth",
GET_IDENTITY_OIDC_AUTH = "get-identity-oidc-auth",
REVOKE_IDENTITY_OIDC_AUTH = "revoke-identity-oidc-auth",
LOGIN_IDENTITY_JWT_AUTH = "login-identity-jwt-auth",
ADD_IDENTITY_JWT_AUTH = "add-identity-jwt-auth",
UPDATE_IDENTITY_JWT_AUTH = "update-identity-jwt-auth",
GET_IDENTITY_JWT_AUTH = "get-identity-jwt-auth",
REVOKE_IDENTITY_JWT_AUTH = "revoke-identity-jwt-auth",
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
@ -148,17 +137,6 @@ export enum EventType {
SECRET_APPROVAL_REQUEST = "secret-approval-request",
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
SIGN_SSH_KEY = "sign-ssh-key",
ISSUE_SSH_CREDS = "issue-ssh-creds",
CREATE_SSH_CA = "create-ssh-certificate-authority",
GET_SSH_CA = "get-ssh-certificate-authority",
UPDATE_SSH_CA = "update-ssh-certificate-authority",
DELETE_SSH_CA = "delete-ssh-certificate-authority",
GET_SSH_CA_CERTIFICATE_TEMPLATES = "get-ssh-certificate-authority-certificate-templates",
CREATE_SSH_CERTIFICATE_TEMPLATE = "create-ssh-certificate-template",
UPDATE_SSH_CERTIFICATE_TEMPLATE = "update-ssh-certificate-template",
DELETE_SSH_CERTIFICATE_TEMPLATE = "delete-ssh-certificate-template",
GET_SSH_CERTIFICATE_TEMPLATE = "get-ssh-certificate-template",
CREATE_CA = "create-certificate-authority",
GET_CA = "get-certificate-authority",
UPDATE_CA = "update-certificate-authority",
@ -224,12 +202,7 @@ export enum EventType {
CREATE_PROJECT_TEMPLATE = "create-project-template",
UPDATE_PROJECT_TEMPLATE = "update-project-template",
DELETE_PROJECT_TEMPLATE = "delete-project-template",
APPLY_PROJECT_TEMPLATE = "apply-project-template",
GET_APP_CONNECTIONS = "get-app-connections",
GET_APP_CONNECTION = "get-app-connection",
CREATE_APP_CONNECTION = "create-app-connection",
UPDATE_APP_CONNECTION = "update-app-connection",
DELETE_APP_CONNECTION = "delete-app-connection"
APPLY_PROJECT_TEMPLATE = "apply-project-template"
}
interface UserActorMetadata {
@ -384,13 +357,6 @@ interface AuthorizeIntegrationEvent {
};
}
interface UpdateIntegrationAuthEvent {
type: EventType.UPDATE_INTEGRATION_AUTH;
metadata: {
integration: string;
};
}
interface UnauthorizeIntegrationEvent {
type: EventType.UNAUTHORIZE_INTEGRATION;
metadata: {
@ -929,67 +895,6 @@ interface GetIdentityOidcAuthEvent {
};
}
interface LoginIdentityJwtAuthEvent {
type: EventType.LOGIN_IDENTITY_JWT_AUTH;
metadata: {
identityId: string;
identityJwtAuthId: string;
identityAccessTokenId: string;
};
}
interface AddIdentityJwtAuthEvent {
type: EventType.ADD_IDENTITY_JWT_AUTH;
metadata: {
identityId: string;
configurationType: string;
jwksUrl?: string;
jwksCaCert: string;
publicKeys: string[];
boundIssuer: string;
boundAudiences: string;
boundClaims: Record<string, string>;
boundSubject: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface UpdateIdentityJwtAuthEvent {
type: EventType.UPDATE_IDENTITY_JWT_AUTH;
metadata: {
identityId: string;
configurationType?: string;
jwksUrl?: string;
jwksCaCert?: string;
publicKeys?: string[];
boundIssuer?: string;
boundAudiences?: string;
boundClaims?: Record<string, string>;
boundSubject?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface DeleteIdentityJwtAuthEvent {
type: EventType.REVOKE_IDENTITY_JWT_AUTH;
metadata: {
identityId: string;
};
}
interface GetIdentityJwtAuthEvent {
type: EventType.GET_IDENTITY_JWT_AUTH;
metadata: {
identityId: string;
};
}
interface CreateEnvironmentEvent {
type: EventType.CREATE_ENVIRONMENT;
metadata: {
@ -1227,117 +1132,6 @@ interface SecretApprovalRequest {
};
}
interface SignSshKey {
type: EventType.SIGN_SSH_KEY;
metadata: {
certificateTemplateId: string;
certType: SshCertType;
principals: string[];
ttl: string;
keyId: string;
};
}
interface IssueSshCreds {
type: EventType.ISSUE_SSH_CREDS;
metadata: {
certificateTemplateId: string;
keyAlgorithm: CertKeyAlgorithm;
certType: SshCertType;
principals: string[];
ttl: string;
keyId: string;
};
}
interface CreateSshCa {
type: EventType.CREATE_SSH_CA;
metadata: {
sshCaId: string;
friendlyName: string;
};
}
interface GetSshCa {
type: EventType.GET_SSH_CA;
metadata: {
sshCaId: string;
friendlyName: string;
};
}
interface UpdateSshCa {
type: EventType.UPDATE_SSH_CA;
metadata: {
sshCaId: string;
friendlyName: string;
status: SshCaStatus;
};
}
interface DeleteSshCa {
type: EventType.DELETE_SSH_CA;
metadata: {
sshCaId: string;
friendlyName: string;
};
}
interface GetSshCaCertificateTemplates {
type: EventType.GET_SSH_CA_CERTIFICATE_TEMPLATES;
metadata: {
sshCaId: string;
friendlyName: string;
};
}
interface CreateSshCertificateTemplate {
type: EventType.CREATE_SSH_CERTIFICATE_TEMPLATE;
metadata: {
certificateTemplateId: string;
sshCaId: string;
name: string;
ttl: string;
maxTTL: string;
allowedUsers: string[];
allowedHosts: string[];
allowUserCertificates: boolean;
allowHostCertificates: boolean;
allowCustomKeyIds: boolean;
};
}
interface GetSshCertificateTemplate {
type: EventType.GET_SSH_CERTIFICATE_TEMPLATE;
metadata: {
certificateTemplateId: string;
};
}
interface UpdateSshCertificateTemplate {
type: EventType.UPDATE_SSH_CERTIFICATE_TEMPLATE;
metadata: {
certificateTemplateId: string;
sshCaId: string;
name: string;
status: SshCertTemplateStatus;
ttl: string;
maxTTL: string;
allowedUsers: string[];
allowedHosts: string[];
allowUserCertificates: boolean;
allowHostCertificates: boolean;
allowCustomKeyIds: boolean;
};
}
interface DeleteSshCertificateTemplate {
type: EventType.DELETE_SSH_CERTIFICATE_TEMPLATE;
metadata: {
certificateTemplateId: string;
};
}
interface CreateCa {
type: EventType.CREATE_CA;
metadata: {
@ -1874,39 +1668,6 @@ interface ApplyProjectTemplateEvent {
};
}
interface GetAppConnectionsEvent {
type: EventType.GET_APP_CONNECTIONS;
metadata: {
app?: AppConnection;
count: number;
connectionIds: string[];
};
}
interface GetAppConnectionEvent {
type: EventType.GET_APP_CONNECTION;
metadata: {
connectionId: string;
};
}
interface CreateAppConnectionEvent {
type: EventType.CREATE_APP_CONNECTION;
metadata: Omit<TCreateAppConnectionDTO, "credentials"> & { connectionId: string };
}
interface UpdateAppConnectionEvent {
type: EventType.UPDATE_APP_CONNECTION;
metadata: Omit<TUpdateAppConnectionDTO, "credentials"> & { connectionId: string; credentialsUpdated: boolean };
}
interface DeleteAppConnectionEvent {
type: EventType.DELETE_APP_CONNECTION;
metadata: {
connectionId: string;
};
}
export type Event =
| GetSecretsEvent
| GetSecretEvent
@ -1919,7 +1680,6 @@ export type Event =
| DeleteSecretBatchEvent
| GetWorkspaceKeyEvent
| AuthorizeIntegrationEvent
| UpdateIntegrationAuthEvent
| UnauthorizeIntegrationEvent
| CreateIntegrationEvent
| DeleteIntegrationEvent
@ -1973,11 +1733,6 @@ export type Event =
| DeleteIdentityOidcAuthEvent
| UpdateIdentityOidcAuthEvent
| GetIdentityOidcAuthEvent
| LoginIdentityJwtAuthEvent
| AddIdentityJwtAuthEvent
| UpdateIdentityJwtAuthEvent
| GetIdentityJwtAuthEvent
| DeleteIdentityJwtAuthEvent
| CreateEnvironmentEvent
| GetEnvironmentEvent
| UpdateEnvironmentEvent
@ -2002,17 +1757,6 @@ export type Event =
| SecretApprovalClosed
| SecretApprovalRequest
| SecretApprovalReopened
| SignSshKey
| IssueSshCreds
| CreateSshCa
| GetSshCa
| UpdateSshCa
| DeleteSshCa
| GetSshCaCertificateTemplates
| CreateSshCertificateTemplate
| UpdateSshCertificateTemplate
| GetSshCertificateTemplate
| DeleteSshCertificateTemplate
| CreateCa
| GetCa
| UpdateCa
@ -2078,9 +1822,4 @@ export type Event =
| CreateProjectTemplateEvent
| UpdateProjectTemplateEvent
| DeleteProjectTemplateEvent
| ApplyProjectTemplateEvent
| GetAppConnectionsEvent
| GetAppConnectionEvent
| CreateAppConnectionEvent
| UpdateAppConnectionEvent
| DeleteAppConnectionEvent;
| ApplyProjectTemplateEvent;

View File

@ -1,7 +1,7 @@
import { ForbiddenError, subject } from "@casl/ability";
import ms from "ms";
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { SecretKeyEncoding } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
@ -67,14 +67,13 @@ export const dynamicSecretLeaseServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -113,7 +112,7 @@ export const dynamicSecretLeaseServiceFactory = ({
})
) as object;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
const { maxTTL } = dynamicSecretCfg;
const expireAt = new Date(new Date().getTime() + ms(selectedTTL));
if (maxTTL) {
@ -147,14 +146,13 @@ export const dynamicSecretLeaseServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -189,7 +187,7 @@ export const dynamicSecretLeaseServiceFactory = ({
})
) as object;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
const { maxTTL } = dynamicSecretCfg;
const expireAt = new Date(dynamicSecretLease.expireAt.getTime() + ms(selectedTTL));
if (maxTTL) {
@ -227,14 +225,13 @@ export const dynamicSecretLeaseServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })

View File

@ -1,6 +1,6 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { SecretKeyEncoding } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
@ -73,14 +73,13 @@ export const dynamicSecretServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.CreateRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -145,14 +144,13 @@ export const dynamicSecretServiceFactory = ({
const projectId = project.id;
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.EditRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -229,14 +227,13 @@ export const dynamicSecretServiceFactory = ({
const projectId = project.id;
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })

View File

@ -80,7 +80,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
}
};
const $addUserToInfisicalGroup = async (userId: string) => {
const addUserToInfisicalGroup = async (userId: string) => {
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
const addUserToGroupCommand = new ModifyUserGroupCommand({
@ -96,7 +96,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
await ensureInfisicalGroupExists(clusterName);
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
await $addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
return {
userId: creationInput.UserId,
@ -127,7 +127,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
};
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 64)();
};
@ -211,8 +211,8 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
return { entityId };
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
return { entityId };
};

View File

@ -33,7 +33,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const client = new IAMClient({
region: providerInputs.region,
credentials: {
@ -47,7 +47,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
return isConnected;
@ -55,7 +55,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
@ -118,7 +118,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = entityId;
@ -179,8 +179,9 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
// do nothing
const username = entityId;
return { entityId: username };
};
return {

View File

@ -9,7 +9,7 @@ const MSFT_GRAPH_API_URL = "https://graph.microsoft.com/v1.0/";
const MSFT_LOGIN_URL = "https://login.microsoftonline.com";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 64)();
};
@ -23,7 +23,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
return providerInputs;
};
const $getToken = async (
const getToken = async (
tenantId: string,
applicationId: string,
clientSecret: string
@ -51,13 +51,18 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
return data.success;
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
return { entityId };
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
if (!data.success) {
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
}
@ -93,7 +98,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
};
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
const data = await $getToken(tenantId, applicationId, clientSecret);
const data = await getToken(tenantId, applicationId, clientSecret);
if (!data.success) {
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
}
@ -122,11 +127,6 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
return users;
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};
return {
validateProviderInputs,
validateConnection,

View File

@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
@ -27,7 +27,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const client = new cassandra.Client({
sslOptions,
@ -47,7 +47,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
await client.shutdown();
@ -56,7 +56,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -82,7 +82,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = entityId;
const { keyspace } = providerInputs;
@ -99,24 +99,20 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();
const { keyspace } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
keyspace,
expiration
});
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
const queries = renewStatement.toString().split(";").filter(Boolean);
for await (const query of queries) {
for (const query of queries) {
// eslint-disable-next-line
await client.execute(query);
}
await client.shutdown();
return { entityId };
return { entityId: username };
};
return {

View File

@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 64)();
};
@ -24,7 +24,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const connection = new ElasticSearchClient({
node: {
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
@ -55,7 +55,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const infoResponse = await connection
.info()
@ -67,7 +67,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -85,7 +85,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
await connection.security.deleteUser({
username: entityId
@ -95,8 +95,8 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
return { entityId };
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
return { entityId };
};

View File

@ -6,17 +6,15 @@ import { AzureEntraIDProvider } from "./azure-entra-id";
import { CassandraProvider } from "./cassandra";
import { ElasticSearchProvider } from "./elastic-search";
import { LdapProvider } from "./ldap";
import { DynamicSecretProviders, TDynamicProviderFns } from "./models";
import { DynamicSecretProviders } from "./models";
import { MongoAtlasProvider } from "./mongo-atlas";
import { MongoDBProvider } from "./mongo-db";
import { RabbitMqProvider } from "./rabbit-mq";
import { RedisDatabaseProvider } from "./redis";
import { SapAseProvider } from "./sap-ase";
import { SapHanaProvider } from "./sap-hana";
import { SqlDatabaseProvider } from "./sql-database";
import { TotpProvider } from "./totp";
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
export const buildDynamicSecretProviders = () => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
@ -29,7 +27,5 @@ export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TD
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
[DynamicSecretProviders.Ldap]: LdapProvider(),
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
[DynamicSecretProviders.Totp]: TotpProvider(),
[DynamicSecretProviders.SapAse]: SapAseProvider()
[DynamicSecretProviders.Snowflake]: SnowflakeProvider()
});

View File

@ -52,7 +52,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
const getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
return new Promise((resolve, reject) => {
const client = ldapjs.createClient({
url: providerInputs.url,
@ -83,7 +83,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
return client.connected;
};
@ -191,7 +191,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
if (providerInputs.credentialType === LdapCredentialType.Static) {
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
@ -235,7 +235,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
if (providerInputs.credentialType === LdapCredentialType.Static) {
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
@ -268,7 +268,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
};
const renew = async (inputs: unknown, entityId: string) => {
// No renewal necessary
// Do nothing
return { entityId };
};

View File

@ -4,8 +4,7 @@ export enum SqlProviders {
Postgres = "postgres",
MySQL = "mysql2",
Oracle = "oracledb",
MsSQL = "mssql",
SapAse = "sap-ase"
MsSQL = "mssql"
}
export enum ElasticSearchAuthTypes {
@ -18,17 +17,6 @@ export enum LdapCredentialType {
Static = "static"
}
export enum TotpConfigType {
URL = "url",
MANUAL = "manual"
}
export enum TotpAlgorithm {
SHA1 = "sha1",
SHA256 = "sha256",
SHA512 = "sha512"
}
export const DynamicSecretRedisDBSchema = z.object({
host: z.string().trim().toLowerCase(),
port: z.number(),
@ -119,16 +107,6 @@ export const DynamicSecretCassandraSchema = z.object({
ca: z.string().optional()
});
export const DynamicSecretSapAseSchema = z.object({
host: z.string().trim().toLowerCase(),
port: z.number(),
database: z.string().trim(),
username: z.string().trim(),
password: z.string().trim(),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim()
});
export const DynamicSecretAwsIamSchema = z.object({
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
@ -243,34 +221,6 @@ export const LdapSchema = z.union([
})
]);
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
z.object({
configType: z.literal(TotpConfigType.URL),
url: z
.string()
.url()
.trim()
.min(1)
.refine((val) => {
const urlObj = new URL(val);
const secret = urlObj.searchParams.get("secret");
return Boolean(secret);
}, "OTP URL must contain secret field")
}),
z.object({
configType: z.literal(TotpConfigType.MANUAL),
secret: z
.string()
.trim()
.min(1)
.transform((val) => val.replace(/\s+/g, "")),
period: z.number().optional(),
algorithm: z.nativeEnum(TotpAlgorithm).optional(),
digits: z.number().optional()
})
]);
export enum DynamicSecretProviders {
SqlDatabase = "sql-database",
Cassandra = "cassandra",
@ -284,15 +234,12 @@ export enum DynamicSecretProviders {
AzureEntraID = "azure-entra-id",
Ldap = "ldap",
SapHana = "sap-hana",
Snowflake = "snowflake",
Totp = "totp",
SapAse = "sap-ase"
Snowflake = "snowflake"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
z.object({ type: z.literal(DynamicSecretProviders.SapAse), inputs: DynamicSecretSapAseSchema }),
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),
@ -303,8 +250,7 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema })
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema })
]);
export type TDynamicProviderFns = {

View File

@ -8,7 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
@ -22,7 +22,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
const client = axios.create({
baseURL: "https://cloud.mongodb.com/api/atlas",
headers: {
@ -40,7 +40,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const isConnected = await client({
method: "GET",
@ -59,7 +59,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -87,7 +87,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = entityId;
const isExisting = await client({
@ -114,7 +114,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();

View File

@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
@ -23,7 +23,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const isSrv = !providerInputs.port;
const uri = isSrv
? `mongodb+srv://${providerInputs.host}`
@ -42,7 +42,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const isConnected = await client
.db(providerInputs.database)
@ -55,7 +55,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -74,7 +74,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = entityId;
@ -88,7 +88,6 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};

View File

@ -11,7 +11,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 64)();
};
@ -84,7 +84,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const axiosInstance = axios.create({
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
auth: {
@ -105,7 +105,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const infoResponse = await connection.get("/whoami").then(() => true);
@ -114,7 +114,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -134,15 +134,15 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
return { entityId };
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
return { entityId };
};

View File

@ -10,7 +10,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 64)();
};
@ -55,7 +55,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
let connection: Redis | null = null;
try {
connection = new Redis({
@ -92,7 +92,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const pingResponse = await connection
.ping()
@ -104,7 +104,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -126,7 +126,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const username = entityId;
@ -141,9 +141,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();

View File

@ -1,145 +0,0 @@
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import odbc from "odbc";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(25);
};
enum SapCommands {
CreateLogin = "sp_addlogin",
DropLogin = "sp_droplogin"
}
export const SapAseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
const connectionString =
`DRIVER={FreeTDS};` +
`SERVER=${providerInputs.host};` +
`PORT=${providerInputs.port};` +
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
`UID=${providerInputs.username};` +
`PWD=${providerInputs.password};` +
`TDS_VERSION=5.0`;
const client = await odbc.connect(connectionString);
return client;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const masterClient = await $getClient(providerInputs, true);
const client = await $getClient(providerInputs);
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
if (!resultFromSelectedDatabase.version) {
throw new BadRequestError({
message: "Failed to validate SAP ASE connection, version query failed"
});
}
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
throw new BadRequestError({
message: "Failed to validate SAP ASE connection (master), version mismatch"
});
}
return true;
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const username = `inf_${generateUsername()}`;
const password = `${generatePassword()}`;
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password
});
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
for await (const query of queries) {
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
// If not done, then the newly created user won't be able to authenticate.
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
}
await masterClient.close();
await client.close();
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
username
});
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);
// Get all processes for this login and kill them. If there are active connections to the database when drop login happens, it will throw an error.
const result = await masterClient.query<{ spid?: string }>(`sp_who '${username}'`);
if (result && result.length > 0) {
for await (const row of result) {
if (row.spid) {
await masterClient.query(`KILL ${row.spid.trim()}`);
}
}
}
for await (const query of queries) {
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
}
await masterClient.close();
await client.close();
return { entityId: username };
};
const renew = async (_: unknown, username: string) => {
// No need for renewal
return { entityId: username };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@ -32,7 +32,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
const client = hdb.createClient({
host: providerInputs.host,
port: providerInputs.port,
@ -64,9 +64,9 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const testResult = await new Promise<boolean>((resolve, reject) => {
const testResult: boolean = await new Promise((resolve, reject) => {
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
if (err) {
reject();
@ -86,7 +86,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
@ -114,7 +114,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
const queries = revokeStatement.toString().split(";").filter(Boolean);
for await (const query of queries) {
@ -135,15 +135,13 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
return { entityId: username };
};
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const renew = async (inputs: unknown, username: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
try {
const expiration = new Date(expireAt).toISOString();
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username: entityId, expiration });
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
const queries = renewStatement.toString().split(";").filter(Boolean);
for await (const query of queries) {
await new Promise((resolve, reject) => {
@ -163,7 +161,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
client.disconnect();
}
return { entityId };
return { entityId: username };
};
return {

View File

@ -12,7 +12,7 @@ import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
const noop = () => {};
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
@ -34,7 +34,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
const client = snowflake.createConnection({
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
username: providerInputs.username,
@ -49,7 +49,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
let isValidConnection: boolean;
@ -72,7 +72,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -107,7 +107,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
try {
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
@ -131,16 +131,17 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
return { entityId: username };
};
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const renew = async (inputs: unknown, username: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const client = await $getClient(providerInputs);
if (!providerInputs.renewStatement) return { entityId: username };
const client = await getClient(providerInputs);
try {
const expiration = getDaysToExpiry(new Date(expireAt));
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
username,
expiration
});
@ -160,7 +161,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
client.destroy(noop);
}
return { entityId };
return { entityId: username };
};
return {

View File

@ -14,7 +14,7 @@ const generatePassword = (provider: SqlProviders) => {
// oracle has limit of 48 password length
const size = provider === SqlProviders.Oracle ? 30 : 48;
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
@ -32,7 +32,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const db = knex({
client: providerInputs.client,
@ -52,7 +52,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
const db = await getClient(providerInputs);
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
@ -63,7 +63,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
const db = await getClient(providerInputs);
const username = generateUsername(providerInputs.client);
const password = generatePassword(providerInputs.client);
@ -90,7 +90,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
const db = await getClient(providerInputs);
const username = entityId;
const { database } = providerInputs;
@ -110,19 +110,13 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const db = await $getClient(providerInputs);
const db = await getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
expiration,
database
});
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
if (renewStatement) {
const queries = renewStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
@ -134,7 +128,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
}
await db.destroy();
return { entityId };
return { entityId: username };
};
return {

View File

@ -1,90 +0,0 @@
import { authenticator } from "otplib";
import { HashAlgorithms } from "otplib/core";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
export const TotpProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretTotpSchema.parseAsync(inputs);
return providerInputs;
};
const validateConnection = async () => {
return true;
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const entityId = alphaNumericNanoId(32);
const authenticatorInstance = authenticator.clone();
let secret: string;
let period: number | null | undefined;
let digits: number | null | undefined;
let algorithm: HashAlgorithms | null | undefined;
if (providerInputs.configType === TotpConfigType.URL) {
const urlObj = new URL(providerInputs.url);
secret = urlObj.searchParams.get("secret") as string;
const periodFromUrl = urlObj.searchParams.get("period");
const digitsFromUrl = urlObj.searchParams.get("digits");
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
if (periodFromUrl) {
period = +periodFromUrl;
}
if (digitsFromUrl) {
digits = +digitsFromUrl;
}
if (algorithmFromUrl) {
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
}
} else {
secret = providerInputs.secret;
period = providerInputs.period;
digits = providerInputs.digits;
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
}
if (digits) {
authenticatorInstance.options = { digits };
}
if (algorithm) {
authenticatorInstance.options = { algorithm };
}
if (period) {
authenticatorInstance.options = { step: period };
}
return {
entityId,
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
};
};
const revoke = async (_inputs: unknown, entityId: string) => {
return { entityId };
};
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@ -20,8 +20,7 @@ import {
TUpdateExternalKmsDTO
} from "./external-kms-types";
import { AwsKmsProviderFactory } from "./providers/aws-kms";
import { GcpKmsProviderFactory } from "./providers/gcp-kms";
import { ExternalKmsAwsSchema, ExternalKmsGcpSchema, KmsProviders, TExternalKmsGcpSchema } from "./providers/model";
import { ExternalKmsAwsSchema, KmsProviders } from "./providers/model";
type TExternalKmsServiceFactoryDep = {
externalKmsDAL: TExternalKmsDALFactory;
@ -79,13 +78,6 @@ export const externalKmsServiceFactory = ({
await externalKms.validateConnection();
}
break;
case KmsProviders.Gcp:
{
const externalKms = await GcpKmsProviderFactory({ inputs: provider.inputs });
await externalKms.validateConnection();
sanitizedProviderInput = JSON.stringify(provider.inputs);
}
break;
default:
throw new BadRequestError({ message: "external kms provided is invalid" });
}
@ -96,7 +88,7 @@ export const externalKmsServiceFactory = ({
});
const { cipherTextBlob: encryptedProviderInputs } = orgDataKeyEncryptor({
plainText: Buffer.from(sanitizedProviderInput)
plainText: Buffer.from(sanitizedProviderInput, "utf8")
});
const externalKms = await externalKmsDAL.transaction(async (tx) => {
@ -170,7 +162,7 @@ export const externalKmsServiceFactory = ({
case KmsProviders.Aws:
{
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString())
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
);
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
const externalKms = await AwsKmsProviderFactory({ inputs: updatedProviderInput });
@ -178,17 +170,6 @@ export const externalKmsServiceFactory = ({
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
}
break;
case KmsProviders.Gcp:
{
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString())
);
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
const externalKms = await GcpKmsProviderFactory({ inputs: updatedProviderInput });
await externalKms.validateConnection();
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
}
break;
default:
throw new BadRequestError({ message: "external kms provided is invalid" });
}
@ -197,7 +178,7 @@ export const externalKmsServiceFactory = ({
let encryptedProviderInputs: Buffer | undefined;
if (sanitizedProviderInput) {
const { cipherTextBlob } = orgDataKeyEncryptor({
plainText: Buffer.from(sanitizedProviderInput)
plainText: Buffer.from(sanitizedProviderInput, "utf8")
});
encryptedProviderInputs = cipherTextBlob;
}
@ -290,17 +271,10 @@ export const externalKmsServiceFactory = ({
switch (externalKmsDoc.provider) {
case KmsProviders.Aws: {
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString())
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
);
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
}
case KmsProviders.Gcp: {
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString())
);
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
}
default:
throw new BadRequestError({ message: "external kms provided is invalid" });
}
@ -338,34 +312,21 @@ export const externalKmsServiceFactory = ({
switch (externalKmsDoc.provider) {
case KmsProviders.Aws: {
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString())
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
);
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
}
case KmsProviders.Gcp: {
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString())
);
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
}
default:
throw new BadRequestError({ message: "external kms provided is invalid" });
}
};
const fetchGcpKeys = async ({ credential, gcpRegion }: Pick<TExternalKmsGcpSchema, "credential" | "gcpRegion">) => {
const externalKms = await GcpKmsProviderFactory({ inputs: { credential, gcpRegion, keyName: "" } });
return externalKms.getKeysList();
};
return {
create,
updateById,
deleteById,
list,
findById,
findByName,
fetchGcpKeys
findByName
};
};

View File

@ -1,113 +0,0 @@
import { KeyManagementServiceClient } from "@google-cloud/kms";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ExternalKmsGcpSchema, TExternalKmsGcpClientSchema, TExternalKmsProviderFns } from "./model";
const getGcpKmsClient = async ({ credential, gcpRegion }: TExternalKmsGcpClientSchema) => {
const gcpKmsClient = new KeyManagementServiceClient({
credentials: credential
});
const projectId = credential.project_id;
const locationName = gcpKmsClient.locationPath(projectId, gcpRegion);
return {
gcpKmsClient,
locationName
};
};
type GcpKmsProviderArgs = {
inputs: unknown;
};
type TGcpKmsProviderFactoryReturn = TExternalKmsProviderFns & {
getKeysList: () => Promise<{ keys: string[] }>;
};
export const GcpKmsProviderFactory = async ({ inputs }: GcpKmsProviderArgs): Promise<TGcpKmsProviderFactoryReturn> => {
const { credential, gcpRegion, keyName } = await ExternalKmsGcpSchema.parseAsync(inputs);
const { gcpKmsClient, locationName } = await getGcpKmsClient({
credential,
gcpRegion
});
const validateConnection = async () => {
try {
await gcpKmsClient.listKeyRings({
parent: locationName
});
return true;
} catch (error) {
throw new BadRequestError({
message: "Cannot connect to GCP KMS"
});
}
};
// Used when adding the KMS to fetch the list of keys in specified region
const getKeysList = async () => {
try {
const [keyRings] = await gcpKmsClient.listKeyRings({
parent: locationName
});
const validKeyRings = keyRings
.filter(
(keyRing): keyRing is { name: string } =>
keyRing !== null && typeof keyRing === "object" && "name" in keyRing && typeof keyRing.name === "string"
)
.map((keyRing) => keyRing.name);
const keyList: string[] = [];
const keyListPromises = validKeyRings.map((keyRingName) =>
gcpKmsClient
.listCryptoKeys({
parent: keyRingName
})
.then(([cryptoKeys]) =>
cryptoKeys
.filter(
(key): key is { name: string } =>
key !== null && typeof key === "object" && "name" in key && typeof key.name === "string"
)
.map((key) => key.name)
)
);
const cryptoKeyLists = await Promise.all(keyListPromises);
keyList.push(...cryptoKeyLists.flat());
return { keys: keyList };
} catch (error) {
logger.error(error, "Could not validate GCP KMS connection and credentials");
throw new BadRequestError({
message: "Could not validate GCP KMS connection and credentials",
error
});
}
};
const encrypt = async (data: Buffer) => {
const encryptedText = await gcpKmsClient.encrypt({
name: keyName,
plaintext: data
});
if (!encryptedText[0].ciphertext) throw new Error("encryption failed");
return { encryptedBlob: Buffer.from(encryptedText[0].ciphertext) };
};
const decrypt = async (encryptedBlob: Buffer) => {
const decryptedText = await gcpKmsClient.decrypt({
name: keyName,
ciphertext: encryptedBlob
});
if (!decryptedText[0].plaintext) throw new Error("decryption failed");
return { data: Buffer.from(decryptedText[0].plaintext) };
};
return {
validateConnection,
getKeysList,
encrypt,
decrypt
};
};

View File

@ -1,23 +1,13 @@
import { z } from "zod";
export enum KmsProviders {
Aws = "aws",
Gcp = "gcp"
Aws = "aws"
}
export enum KmsAwsCredentialType {
AssumeRole = "assume-role",
AccessKey = "access-key"
}
// Google uses snake_case for their enum values and we need to match that
export enum KmsGcpCredentialType {
ServiceAccount = "service_account"
}
export enum KmsGcpKeyFetchAuthType {
Credential = "credential",
Kms = "kmsId"
}
export const ExternalKmsAwsSchema = z.object({
credential: z
@ -52,44 +42,14 @@ export const ExternalKmsAwsSchema = z.object({
});
export type TExternalKmsAwsSchema = z.infer<typeof ExternalKmsAwsSchema>;
export const ExternalKmsGcpCredentialSchema = z.object({
type: z.literal(KmsGcpCredentialType.ServiceAccount),
project_id: z.string().min(1),
private_key_id: z.string().min(1),
private_key: z.string().min(1),
client_email: z.string().min(1),
client_id: z.string().min(1),
auth_uri: z.string().min(1),
token_uri: z.string().min(1),
auth_provider_x509_cert_url: z.string().min(1),
client_x509_cert_url: z.string().min(1),
universe_domain: z.string().min(1)
});
export type TExternalKmsGcpCredentialSchema = z.infer<typeof ExternalKmsGcpCredentialSchema>;
export const ExternalKmsGcpSchema = z.object({
credential: ExternalKmsGcpCredentialSchema.describe("GCP Service Account JSON credential to connect"),
gcpRegion: z.string().trim().describe("GCP region where the KMS key is located"),
keyName: z.string().trim().describe("GCP key name")
});
export type TExternalKmsGcpSchema = z.infer<typeof ExternalKmsGcpSchema>;
const ExternalKmsGcpClientSchema = ExternalKmsGcpSchema.pick({ gcpRegion: true }).extend({
credential: ExternalKmsGcpCredentialSchema
});
export type TExternalKmsGcpClientSchema = z.infer<typeof ExternalKmsGcpClientSchema>;
// The root schema of the JSON
export const ExternalKmsInputSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema }),
z.object({ type: z.literal(KmsProviders.Gcp), inputs: ExternalKmsGcpSchema })
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema })
]);
export type TExternalKmsInputSchema = z.infer<typeof ExternalKmsInputSchema>;
export const ExternalKmsInputUpdateSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() }),
z.object({ type: z.literal(KmsProviders.Gcp), inputs: ExternalKmsGcpSchema.partial() })
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() })
]);
export type TExternalKmsInputUpdateSchema = z.infer<typeof ExternalKmsInputUpdateSchema>;

View File

@ -5,8 +5,6 @@ import { TableName, TGroups } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt } from "@app/lib/knex";
import { EFilterReturnedUsers } from "./group-types";
export type TGroupDALFactory = ReturnType<typeof groupDALFactory>;
export const groupDALFactory = (db: TDbClient) => {
@ -68,8 +66,7 @@ export const groupDALFactory = (db: TDbClient) => {
offset = 0,
limit,
username, // depreciated in favor of search
search,
filter
search
}: {
orgId: string;
groupId: string;
@ -77,7 +74,6 @@ export const groupDALFactory = (db: TDbClient) => {
limit?: number;
username?: string;
search?: string;
filter?: EFilterReturnedUsers;
}) => {
try {
const query = db
@ -94,7 +90,6 @@ export const groupDALFactory = (db: TDbClient) => {
.select(
db.ref("id").withSchema(TableName.OrgMembership),
db.ref("groupId").withSchema(TableName.UserGroupMembership),
db.ref("createdAt").withSchema(TableName.UserGroupMembership).as("joinedGroupAt"),
db.ref("email").withSchema(TableName.Users),
db.ref("username").withSchema(TableName.Users),
db.ref("firstName").withSchema(TableName.Users),
@ -116,37 +111,17 @@ export const groupDALFactory = (db: TDbClient) => {
void query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
}
switch (filter) {
case EFilterReturnedUsers.EXISTING_MEMBERS:
void query.andWhere(`${TableName.UserGroupMembership}.createdAt`, "is not", null);
break;
case EFilterReturnedUsers.NON_MEMBERS:
void query.andWhere(`${TableName.UserGroupMembership}.createdAt`, "is", null);
break;
default:
break;
}
const members = await query;
return {
members: members.map(
({
email,
username: memberUsername,
firstName,
lastName,
userId,
groupId: memberGroupId,
joinedGroupAt
}) => ({
({ email, username: memberUsername, firstName, lastName, userId, groupId: memberGroupId }) => ({
id: userId,
email,
username: memberUsername,
firstName,
lastName,
isPartOfGroup: !!memberGroupId,
joinedGroupAt
isPartOfGroup: !!memberGroupId
})
),
// @ts-expect-error col select is raw and not strongly typed

View File

@ -222,8 +222,7 @@ export const groupServiceFactory = ({
actorId,
actorAuthMethod,
actorOrgId,
search,
filter
search
}: TListGroupUsersDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
@ -252,8 +251,7 @@ export const groupServiceFactory = ({
offset,
limit,
username,
search,
filter
search
});
return { users: members, totalCount };
@ -285,8 +283,8 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPrivileges)
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to add user to more privileged group" });
const user = await userDAL.findOne({ username });
@ -340,8 +338,8 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPrivileges)
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to delete user from more privileged group" });
const user = await userDAL.findOne({ username });

View File

@ -39,7 +39,6 @@ export type TListGroupUsersDTO = {
limit: number;
username?: string;
search?: string;
filter?: EFilterReturnedUsers;
} & TGenericPermission;
export type TAddUserToGroupDTO = {
@ -102,8 +101,3 @@ export type TConvertPendingGroupAdditionsToGroupMemberships = {
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
tx?: Knex;
};
export enum EFilterReturnedUsers {
EXISTING_MEMBERS = "existingMembers",
NON_MEMBERS = "nonMembers"
}

View File

@ -27,7 +27,7 @@ export const initializeHsmModule = () => {
logger.info("PKCS#11 module initialized");
} catch (err) {
logger.error(err, "Failed to initialize PKCS#11 module");
logger.error("Failed to initialize PKCS#11 module:", err);
throw err;
}
};
@ -39,7 +39,7 @@ export const initializeHsmModule = () => {
isInitialized = false;
logger.info("PKCS#11 module finalized");
} catch (err) {
logger.error(err, "Failed to finalize PKCS#11 module");
logger.error("Failed to finalize PKCS#11 module:", err);
throw err;
}
}

View File

@ -1,4 +1,4 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ForbiddenError } from "@casl/ability";
import { packRules } from "@casl/ability/extra";
import ms from "ms";
@ -62,10 +62,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
ActorType.IDENTITY,
identityId,
@ -142,10 +139,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
ActorType.IDENTITY,
identityProjectMembership.identityId,
@ -222,10 +216,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Identity);
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
ActorType.IDENTITY,
identityProjectMembership.identityId,
@ -267,10 +258,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
return {
...identityPrivilege,
@ -301,10 +289,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@ -336,10 +321,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
const identityPrivileges = await identityProjectAdditionalPrivilegeDAL.find(
{

Some files were not shown because too many files have changed in this diff Show More