mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-11 12:11:38 +00:00
Compare commits
1 Commits
infisical/
...
daniel/acc
Author | SHA1 | Date | |
---|---|---|---|
8f5f56a25e |
@ -74,14 +74,6 @@ CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED=false
|
||||
OTEL_EXPORT_TYPE=prometheus
|
||||
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||
OTEL_OTLP_PUSH_INTERVAL=
|
||||
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
|
@ -1,12 +1,6 @@
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
# Check if infisical is installed
|
||||
if ! command -v infisical >/dev/null 2>&1; then
|
||||
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
npx lint-staged
|
||||
|
||||
infisical scan git-changes --staged -v
|
||||
|
@ -69,21 +69,13 @@ RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js and ODBC
|
||||
# Required for pkcs11js
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -99,21 +91,13 @@ ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js and ODBC
|
||||
# Required for pkcs11js
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -124,24 +108,13 @@ RUN mkdir frontend-build
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
# Install necessary packages including ODBC
|
||||
# Install necessary packages
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
|
@ -72,16 +72,8 @@ RUN addgroup --system --gid 1001 nodejs \
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
# Required for pkcs11js
|
||||
RUN apk add --no-cache python3 make g++
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -96,19 +88,8 @@ FROM base AS backend-runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
# Required for pkcs11js
|
||||
RUN apk add --no-cache python3 make g++
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -119,32 +100,11 @@ RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install all required runtime dependencies
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
bash \
|
||||
curl \
|
||||
git
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
@ -167,6 +127,7 @@ ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
@ -188,4 +149,4 @@ EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
|
4
Makefile
4
Makefile
@ -10,9 +10,6 @@ up-dev:
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
|
||||
up-dev-metrics:
|
||||
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
|
||||
@ -30,3 +27,4 @@ reviewable-api:
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
|
@ -9,15 +9,6 @@ RUN apk --update add \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -37,17 +28,6 @@ RUN apk --update add \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
@ -7,7 +7,7 @@ ARG SOFTHSM2_VERSION=2.5.0
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
# install build dependencies including python3
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
@ -19,19 +19,7 @@ RUN apk --update add \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# build and install SoftHSM2
|
||||
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
|
@ -10,15 +10,12 @@ export const mockQueue = (): TQueueServiceFactory => {
|
||||
queue: async (name, jobData) => {
|
||||
job[name] = jobData;
|
||||
},
|
||||
queuePg: async () => {},
|
||||
initialize: async () => {},
|
||||
shutdown: async () => undefined,
|
||||
stopRepeatableJob: async () => true,
|
||||
start: (name, jobFn) => {
|
||||
queues[name] = jobFn;
|
||||
workers[name] = jobFn;
|
||||
},
|
||||
startPg: async () => {},
|
||||
listen: (name, event) => {
|
||||
events[name] = event;
|
||||
},
|
||||
|
@ -1,86 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret Recursive Testing", async () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
const folderAndSecretNames = [
|
||||
{ name: "deep1", path: "/", expectedSecretCount: 4 },
|
||||
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
|
||||
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
|
||||
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
|
||||
];
|
||||
|
||||
beforeAll(async () => {
|
||||
const rootFolderIds: string[] = [];
|
||||
for (const folder of folderAndSecretNames) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const createdFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: folder.path,
|
||||
name: folder.name
|
||||
});
|
||||
|
||||
if (folder.path === "/") {
|
||||
rootFolderIds.push(createdFolder.id);
|
||||
}
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2({
|
||||
secretPath: folder.path,
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
key: folder.name,
|
||||
value: folder.name
|
||||
});
|
||||
}
|
||||
|
||||
return async () => {
|
||||
await Promise.all(
|
||||
rootFolderIds.map((id) =>
|
||||
deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod",
|
||||
key: folderAndSecretNames[0].name
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
|
||||
const secrets = await getSecretsV2({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: path,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod",
|
||||
recursive: true
|
||||
});
|
||||
|
||||
expect(secrets.secrets.length).toEqual(expectedSecretCount);
|
||||
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
|
||||
folderAndSecretNames
|
||||
.filter((el) => el.path.startsWith(path))
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.name,
|
||||
secretValue: el.name
|
||||
})
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
@ -97,7 +97,6 @@ export const getSecretsV2 = async (dto: {
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
recursive?: boolean;
|
||||
}) => {
|
||||
const getSecretsResponse = await testServer.inject({
|
||||
method: "GET",
|
||||
@ -110,8 +109,7 @@ export const getSecretsV2 = async (dto: {
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true",
|
||||
recursive: String(dto.recursive || false)
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(getSecretsResponse.statusCode).toBe(200);
|
||||
|
@ -53,13 +53,13 @@ export default {
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL, cfg.DB_CONNECTION_URI);
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() });
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
|
1797
backend/package-lock.json
generated
1797
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -132,25 +132,14 @@
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
@ -191,7 +180,6 @@
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
@ -201,7 +189,6 @@
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"pg-boss": "^10.1.5",
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
|
@ -1,7 +0,0 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
4
backend/src/@types/fastify-zod.d.ts
vendored
4
backend/src/@types/fastify-zod.d.ts
vendored
@ -1,6 +1,6 @@
|
||||
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||
|
||||
declare global {
|
||||
@ -8,7 +8,7 @@ declare global {
|
||||
RawServerDefault,
|
||||
RawRequestDefaultExpression<RawServerDefault>,
|
||||
RawReplyDefaultExpression<RawServerDefault>,
|
||||
Readonly<CustomLogger>,
|
||||
Readonly<Logger>,
|
||||
ZodTypeProvider
|
||||
>;
|
||||
|
||||
|
7
backend/src/@types/fastify.d.ts
vendored
7
backend/src/@types/fastify.d.ts
vendored
@ -1,7 +1,5 @@
|
||||
import "fastify";
|
||||
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
import { TUsers } from "@app/db/schemas";
|
||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
|
||||
@ -89,10 +87,6 @@ import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "fastify" {
|
||||
interface Session {
|
||||
callbackPort: string;
|
||||
}
|
||||
|
||||
interface FastifyRequest {
|
||||
realIp: string;
|
||||
// used for mfa session authentication
|
||||
@ -121,7 +115,6 @@ declare module "fastify" {
|
||||
}
|
||||
|
||||
interface FastifyInstance {
|
||||
redis: Redis;
|
||||
services: {
|
||||
login: TAuthLoginFactory;
|
||||
password: TAuthPasswordFactory;
|
||||
|
@ -1,23 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
||||
|
||||
if (!hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.string("description");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
||||
|
||||
if (hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,25 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasIdentityIdColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "identityId");
|
||||
const hasAuthMethodColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
if (hasIdentityIdColumn && hasAuthMethodColumn) {
|
||||
t.index(["authMethod", "identityId"]);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasIdentityIdColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "identityId");
|
||||
const hasAuthMethodColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
if (hasIdentityIdColumn && hasAuthMethodColumn) {
|
||||
t.dropIndex(["authMethod", "identityId"]);
|
||||
}
|
||||
});
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex(TableName.IdentityMetadata).whereNull("value").delete();
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -12,7 +12,7 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const KmsRootConfigSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedRootKey: zodBuffer,
|
||||
encryptionStrategy: z.string().default("SOFTWARE").nullable().optional(),
|
||||
encryptionStrategy: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
@ -23,8 +23,7 @@ export const ProjectsSchema = z.object({
|
||||
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
|
||||
auditLogsRetentionDays: z.number().nullable().optional(),
|
||||
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
description: z.string().nullable().optional()
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
@ -4,15 +4,9 @@ import { ExternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import {
|
||||
ExternalKmsAwsSchema,
|
||||
ExternalKmsGcpCredentialSchema,
|
||||
ExternalKmsGcpSchema,
|
||||
ExternalKmsInputSchema,
|
||||
ExternalKmsInputUpdateSchema,
|
||||
KmsGcpKeyFetchAuthType,
|
||||
KmsProviders,
|
||||
TExternalKmsGcpCredentialSchema
|
||||
ExternalKmsInputUpdateSchema
|
||||
} from "@app/ee/services/external-kms/providers/model";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -50,8 +44,7 @@ const sanitizedExternalSchemaForGetById = KmsKeysSchema.extend({
|
||||
statusDetails: true,
|
||||
provider: true
|
||||
}).extend({
|
||||
// for GCP, we don't return the credential object as it is sensitive data that should not be exposed
|
||||
providerInput: z.union([ExternalKmsAwsSchema, ExternalKmsGcpSchema.pick({ gcpRegion: true, keyName: true })])
|
||||
providerInput: ExternalKmsAwsSchema
|
||||
})
|
||||
});
|
||||
|
||||
@ -293,67 +286,4 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
return { externalKms };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/gcp/keys",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.discriminatedUnion("authMethod", [
|
||||
z.object({
|
||||
authMethod: z.literal(KmsGcpKeyFetchAuthType.Credential),
|
||||
region: z.string().trim().min(1),
|
||||
credential: ExternalKmsGcpCredentialSchema
|
||||
}),
|
||||
z.object({
|
||||
authMethod: z.literal(KmsGcpKeyFetchAuthType.Kms),
|
||||
region: z.string().trim().min(1),
|
||||
kmsId: z.string().trim().min(1)
|
||||
})
|
||||
]),
|
||||
response: {
|
||||
200: z.object({
|
||||
keys: z.string().array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { region, authMethod } = req.body;
|
||||
let credentialJson: TExternalKmsGcpCredentialSchema | undefined;
|
||||
|
||||
if (authMethod === KmsGcpKeyFetchAuthType.Credential) {
|
||||
credentialJson = req.body.credential;
|
||||
} else if (authMethod === KmsGcpKeyFetchAuthType.Kms) {
|
||||
const externalKms = await server.services.externalKms.findById({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.body.kmsId
|
||||
});
|
||||
|
||||
if (!externalKms || externalKms.external.provider !== KmsProviders.Gcp) {
|
||||
throw new NotFoundError({ message: "KMS not found or not of type GCP" });
|
||||
}
|
||||
|
||||
credentialJson = externalKms.external.providerInput.credential as TExternalKmsGcpCredentialSchema;
|
||||
}
|
||||
|
||||
if (!credentialJson) {
|
||||
throw new NotFoundError({
|
||||
message: "Something went wrong while fetching the GCP credential, please check inputs and try again"
|
||||
});
|
||||
}
|
||||
|
||||
const results = await server.services.externalKms.fetchGcpKeys({
|
||||
credential: credentialJson,
|
||||
gcpRegion: region
|
||||
});
|
||||
|
||||
return results;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -9,6 +9,7 @@
|
||||
import { Authenticator, Strategy } from "@fastify/passport";
|
||||
import fastifySession from "@fastify/session";
|
||||
import RedisStore from "connect-redis";
|
||||
import { Redis } from "ioredis";
|
||||
import { z } from "zod";
|
||||
|
||||
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
|
||||
@ -20,6 +21,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
const redis = new Redis(appCfg.REDIS_URL);
|
||||
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
||||
|
||||
/*
|
||||
@ -28,7 +30,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
|
||||
*/
|
||||
const redisStore = new RedisStore({
|
||||
client: server.redis,
|
||||
client: redis,
|
||||
prefix: "oidc-session:",
|
||||
ttl: 600 // 10 minutes
|
||||
});
|
||||
|
@ -1,7 +1,6 @@
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
@ -21,129 +20,26 @@ type TAuditLogQueueServiceFactoryDep = {
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TAuditLogQueueServiceFactory = Awaited<ReturnType<typeof auditLogQueueServiceFactory>>;
|
||||
export type TAuditLogQueueServiceFactory = ReturnType<typeof auditLogQueueServiceFactory>;
|
||||
|
||||
// keep this timeout 5s it must be fast because else the queue will take time to finish
|
||||
// audit log is a crowded queue thus needs to be fast
|
||||
export const AUDIT_LOG_STREAM_TIMEOUT = 5 * 1000;
|
||||
|
||||
export const auditLogQueueServiceFactory = async ({
|
||||
export const auditLogQueueServiceFactory = ({
|
||||
auditLogDAL,
|
||||
queueService,
|
||||
projectDAL,
|
||||
licenseService,
|
||||
auditLogStreamDAL
|
||||
}: TAuditLogQueueServiceFactoryDep) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
||||
if (appCfg.USE_PG_QUEUE && appCfg.SHOULD_INIT_PG_QUEUE) {
|
||||
await queueService.queuePg<QueueName.AuditLog>(QueueJobs.AuditLog, data, {
|
||||
retryLimit: 10,
|
||||
retryBackoff: true
|
||||
});
|
||||
} else {
|
||||
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
removeOnFail: {
|
||||
count: 3
|
||||
},
|
||||
removeOnComplete: true
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
if (appCfg.SHOULD_INIT_PG_QUEUE) {
|
||||
await queueService.startPg<QueueName.AuditLog>(
|
||||
QueueJobs.AuditLog,
|
||||
async ([job]) => {
|
||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||
let { orgId } = job.data;
|
||||
const MS_IN_DAY = 24 * 60 * 60 * 1000;
|
||||
let project;
|
||||
|
||||
if (!orgId) {
|
||||
// it will never be undefined for both org and project id
|
||||
// TODO(akhilmhdh): use caching here in dal to avoid db calls
|
||||
project = await projectDAL.findById(projectId as string);
|
||||
orgId = project.orgId;
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan.auditLogsRetentionDays === 0) {
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
return;
|
||||
}
|
||||
|
||||
// For project actions, set TTL to project-level audit log retention config
|
||||
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
||||
const ttlInDays =
|
||||
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
||||
? project.auditLogsRetentionDays
|
||||
: plan.auditLogsRetentionDays;
|
||||
|
||||
const ttl = ttlInDays * MS_IN_DAY;
|
||||
|
||||
const auditLog = await auditLogDAL.create({
|
||||
actor: actor.type,
|
||||
actorMetadata: actor.metadata,
|
||||
userAgent,
|
||||
projectId,
|
||||
projectName: project?.name,
|
||||
ipAddress,
|
||||
orgId,
|
||||
eventType: event.type,
|
||||
expiresAt: new Date(Date.now() + ttl),
|
||||
eventMetadata: event.metadata,
|
||||
userAgentType
|
||||
});
|
||||
|
||||
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
||||
await Promise.allSettled(
|
||||
logStreams.map(
|
||||
async ({
|
||||
url,
|
||||
encryptedHeadersTag,
|
||||
encryptedHeadersIV,
|
||||
encryptedHeadersKeyEncoding,
|
||||
encryptedHeadersCiphertext
|
||||
}) => {
|
||||
const streamHeaders =
|
||||
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
||||
? (JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
||||
iv: encryptedHeadersIV,
|
||||
tag: encryptedHeadersTag,
|
||||
ciphertext: encryptedHeadersCiphertext
|
||||
})
|
||||
) as LogStreamHeaders[])
|
||||
: [];
|
||||
|
||||
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
||||
if (streamHeaders.length)
|
||||
streamHeaders.forEach(({ key, value }) => {
|
||||
headers[key] = value;
|
||||
});
|
||||
|
||||
return request.post(url, auditLog, {
|
||||
headers,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
});
|
||||
}
|
||||
)
|
||||
);
|
||||
await queueService.queue(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
removeOnFail: {
|
||||
count: 3
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 30,
|
||||
pollingIntervalSeconds: 0.5
|
||||
}
|
||||
);
|
||||
}
|
||||
removeOnComplete: true
|
||||
});
|
||||
};
|
||||
|
||||
queueService.start(QueueName.AuditLog, async (job) => {
|
||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||
|
@ -112,7 +112,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
})
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
|
||||
const { maxTTL } = dynamicSecretCfg;
|
||||
const expireAt = new Date(new Date().getTime() + ms(selectedTTL));
|
||||
if (maxTTL) {
|
||||
@ -187,7 +187,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
})
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
|
||||
const { maxTTL } = dynamicSecretCfg;
|
||||
const expireAt = new Date(dynamicSecretLease.expireAt.getTime() + ms(selectedTTL));
|
||||
if (maxTTL) {
|
||||
|
@ -80,7 +80,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
}
|
||||
};
|
||||
|
||||
const $addUserToInfisicalGroup = async (userId: string) => {
|
||||
const addUserToInfisicalGroup = async (userId: string) => {
|
||||
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
|
||||
|
||||
const addUserToGroupCommand = new ModifyUserGroupCommand({
|
||||
@ -96,7 +96,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
await ensureInfisicalGroupExists(clusterName);
|
||||
|
||||
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
|
||||
await $addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||
await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||
|
||||
return {
|
||||
userId: creationInput.UserId,
|
||||
@ -127,7 +127,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
};
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -211,8 +211,8 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -33,7 +33,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||
const client = new IAMClient({
|
||||
region: providerInputs.region,
|
||||
credentials: {
|
||||
@ -47,7 +47,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
|
||||
return isConnected;
|
||||
@ -55,7 +55,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
|
||||
@ -118,7 +118,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -179,8 +179,9 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
// do nothing
|
||||
const username = entityId;
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -9,7 +9,7 @@ const MSFT_GRAPH_API_URL = "https://graph.microsoft.com/v1.0/";
|
||||
const MSFT_LOGIN_URL = "https://login.microsoftonline.com";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -23,7 +23,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getToken = async (
|
||||
const getToken = async (
|
||||
tenantId: string,
|
||||
applicationId: string,
|
||||
clientSecret: string
|
||||
@ -51,13 +51,18 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
return data.success;
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
@ -93,7 +98,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
};
|
||||
|
||||
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
|
||||
const data = await $getToken(tenantId, applicationId, clientSecret);
|
||||
const data = await getToken(tenantId, applicationId, clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
@ -122,11 +127,6 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
return users;
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
|
@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -27,7 +27,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const client = new cassandra.Client({
|
||||
sslOptions,
|
||||
@ -47,7 +47,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
||||
await client.shutdown();
|
||||
@ -56,7 +56,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -82,7 +82,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { keyspace } = providerInputs;
|
||||
@ -99,24 +99,20 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { keyspace } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
keyspace,
|
||||
expiration
|
||||
});
|
||||
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId };
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -24,7 +24,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
@ -55,7 +55,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection
|
||||
.info()
|
||||
@ -67,7 +67,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -85,7 +85,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
@ -95,8 +95,8 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -6,17 +6,15 @@ import { AzureEntraIDProvider } from "./azure-entra-id";
|
||||
import { CassandraProvider } from "./cassandra";
|
||||
import { ElasticSearchProvider } from "./elastic-search";
|
||||
import { LdapProvider } from "./ldap";
|
||||
import { DynamicSecretProviders, TDynamicProviderFns } from "./models";
|
||||
import { DynamicSecretProviders } from "./models";
|
||||
import { MongoAtlasProvider } from "./mongo-atlas";
|
||||
import { MongoDBProvider } from "./mongo-db";
|
||||
import { RabbitMqProvider } from "./rabbit-mq";
|
||||
import { RedisDatabaseProvider } from "./redis";
|
||||
import { SapAseProvider } from "./sap-ase";
|
||||
import { SapHanaProvider } from "./sap-hana";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
import { TotpProvider } from "./totp";
|
||||
|
||||
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
|
||||
export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||
@ -29,7 +27,5 @@ export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TD
|
||||
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
||||
[DynamicSecretProviders.Ldap]: LdapProvider(),
|
||||
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
|
||||
[DynamicSecretProviders.Totp]: TotpProvider(),
|
||||
[DynamicSecretProviders.SapAse]: SapAseProvider()
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider()
|
||||
});
|
||||
|
@ -52,7 +52,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
||||
const getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const client = ldapjs.createClient({
|
||||
url: providerInputs.url,
|
||||
@ -83,7 +83,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
return client.connected;
|
||||
};
|
||||
|
||||
@ -191,7 +191,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||
@ -235,7 +235,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||
@ -268,7 +268,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -4,8 +4,7 @@ export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2",
|
||||
Oracle = "oracledb",
|
||||
MsSQL = "mssql",
|
||||
SapAse = "sap-ase"
|
||||
MsSQL = "mssql"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
@ -18,17 +17,6 @@ export enum LdapCredentialType {
|
||||
Static = "static"
|
||||
}
|
||||
|
||||
export enum TotpConfigType {
|
||||
URL = "url",
|
||||
MANUAL = "manual"
|
||||
}
|
||||
|
||||
export enum TotpAlgorithm {
|
||||
SHA1 = "sha1",
|
||||
SHA256 = "sha256",
|
||||
SHA512 = "sha512"
|
||||
}
|
||||
|
||||
export const DynamicSecretRedisDBSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
@ -119,16 +107,6 @@ export const DynamicSecretCassandraSchema = z.object({
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretSapAseSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
database: z.string().trim(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim()
|
||||
});
|
||||
|
||||
export const DynamicSecretAwsIamSchema = z.object({
|
||||
accessKey: z.string().trim().min(1),
|
||||
secretAccessKey: z.string().trim().min(1),
|
||||
@ -243,34 +221,6 @@ export const LdapSchema = z.union([
|
||||
})
|
||||
]);
|
||||
|
||||
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.URL),
|
||||
url: z
|
||||
.string()
|
||||
.url()
|
||||
.trim()
|
||||
.min(1)
|
||||
.refine((val) => {
|
||||
const urlObj = new URL(val);
|
||||
const secret = urlObj.searchParams.get("secret");
|
||||
|
||||
return Boolean(secret);
|
||||
}, "OTP URL must contain secret field")
|
||||
}),
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.MANUAL),
|
||||
secret: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.transform((val) => val.replace(/\s+/g, "")),
|
||||
period: z.number().optional(),
|
||||
algorithm: z.nativeEnum(TotpAlgorithm).optional(),
|
||||
digits: z.number().optional()
|
||||
})
|
||||
]);
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database",
|
||||
Cassandra = "cassandra",
|
||||
@ -284,15 +234,12 @@ export enum DynamicSecretProviders {
|
||||
AzureEntraID = "azure-entra-id",
|
||||
Ldap = "ldap",
|
||||
SapHana = "sap-hana",
|
||||
Snowflake = "snowflake",
|
||||
Totp = "totp",
|
||||
SapAse = "sap-ase"
|
||||
Snowflake = "snowflake"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SapAse), inputs: DynamicSecretSapAseSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),
|
||||
@ -303,8 +250,7 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
@ -8,7 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -22,7 +22,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const client = axios.create({
|
||||
baseURL: "https://cloud.mongodb.com/api/atlas",
|
||||
headers: {
|
||||
@ -40,7 +40,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
@ -59,7 +59,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -87,7 +87,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const isExisting = await client({
|
||||
@ -114,7 +114,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -23,7 +23,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
@ -42,7 +42,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client
|
||||
.db(providerInputs.database)
|
||||
@ -55,7 +55,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -74,7 +74,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -88,7 +88,6 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -11,7 +11,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -84,7 +84,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
@ -105,7 +105,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||
|
||||
@ -114,7 +114,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -134,15 +134,15 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -10,7 +10,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -55,7 +55,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
let connection: Redis | null = null;
|
||||
try {
|
||||
connection = new Redis({
|
||||
@ -92,7 +92,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const pingResponse = await connection
|
||||
.ping()
|
||||
@ -104,7 +104,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -126,7 +126,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -141,9 +141,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const connection = await $getClient(providerInputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
@ -1,145 +0,0 @@
|
||||
import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import odbc from "odbc";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(25);
|
||||
};
|
||||
|
||||
enum SapCommands {
|
||||
CreateLogin = "sp_addlogin",
|
||||
DropLogin = "sp_droplogin"
|
||||
}
|
||||
|
||||
export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
|
||||
const connectionString =
|
||||
`DRIVER={FreeTDS};` +
|
||||
`SERVER=${providerInputs.host};` +
|
||||
`PORT=${providerInputs.port};` +
|
||||
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
|
||||
`UID=${providerInputs.username};` +
|
||||
`PWD=${providerInputs.password};` +
|
||||
`TDS_VERSION=5.0`;
|
||||
|
||||
const client = await odbc.connect(connectionString);
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
|
||||
if (!resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection, version query failed"
|
||||
});
|
||||
}
|
||||
|
||||
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection (master), version mismatch"
|
||||
});
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const username = `inf_${generateUsername()}`;
|
||||
const password = `${generatePassword()}`;
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password
|
||||
});
|
||||
|
||||
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
|
||||
for await (const query of queries) {
|
||||
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||
// If not done, then the newly created user won't be able to authenticate.
|
||||
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
|
||||
username
|
||||
});
|
||||
|
||||
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
|
||||
// Get all processes for this login and kill them. If there are active connections to the database when drop login happens, it will throw an error.
|
||||
const result = await masterClient.query<{ spid?: string }>(`sp_who '${username}'`);
|
||||
|
||||
if (result && result.length > 0) {
|
||||
for await (const row of result) {
|
||||
if (row.spid) {
|
||||
await masterClient.query(`KILL ${row.spid.trim()}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for await (const query of queries) {
|
||||
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_: unknown, username: string) => {
|
||||
// No need for renewal
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -32,7 +32,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const client = hdb.createClient({
|
||||
host: providerInputs.host,
|
||||
port: providerInputs.port,
|
||||
@ -64,9 +64,9 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const testResult = await new Promise<boolean>((resolve, reject) => {
|
||||
const testResult: boolean = await new Promise((resolve, reject) => {
|
||||
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||
if (err) {
|
||||
reject();
|
||||
@ -86,7 +86,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
@ -114,7 +114,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
@ -135,15 +135,13 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
try {
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username: entityId, expiration });
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
@ -163,7 +161,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
client.disconnect();
|
||||
}
|
||||
|
||||
return { entityId };
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -12,7 +12,7 @@ import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
|
||||
const noop = () => {};
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -34,7 +34,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
||||
const client = snowflake.createConnection({
|
||||
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
|
||||
username: providerInputs.username,
|
||||
@ -49,7 +49,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
let isValidConnection: boolean;
|
||||
|
||||
@ -72,7 +72,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -107,7 +107,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
try {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
@ -131,16 +131,17 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId: username };
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
try {
|
||||
const expiration = getDaysToExpiry(new Date(expireAt));
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
username,
|
||||
expiration
|
||||
});
|
||||
|
||||
@ -160,7 +161,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
||||
return { entityId };
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -14,7 +14,7 @@ const generatePassword = (provider: SqlProviders) => {
|
||||
// oracle has limit of 48 password length
|
||||
const size = provider === SqlProviders.Oracle ? 30 : 48;
|
||||
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -32,7 +32,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const db = knex({
|
||||
client: providerInputs.client,
|
||||
@ -52,7 +52,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
const db = await getClient(providerInputs);
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
||||
@ -63,7 +63,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
const db = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(providerInputs.client);
|
||||
const password = generatePassword(providerInputs.client);
|
||||
@ -90,7 +90,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
const db = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { database } = providerInputs;
|
||||
@ -110,19 +110,13 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const db = await $getClient(providerInputs);
|
||||
const db = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { database } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
@ -134,7 +128,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
}
|
||||
|
||||
await db.destroy();
|
||||
return { entityId };
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -1,90 +0,0 @@
|
||||
import { authenticator } from "otplib";
|
||||
import { HashAlgorithms } from "otplib/core";
|
||||
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
|
||||
|
||||
export const TotpProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretTotpSchema.parseAsync(inputs);
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const validateConnection = async () => {
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
const authenticatorInstance = authenticator.clone();
|
||||
|
||||
let secret: string;
|
||||
let period: number | null | undefined;
|
||||
let digits: number | null | undefined;
|
||||
let algorithm: HashAlgorithms | null | undefined;
|
||||
|
||||
if (providerInputs.configType === TotpConfigType.URL) {
|
||||
const urlObj = new URL(providerInputs.url);
|
||||
secret = urlObj.searchParams.get("secret") as string;
|
||||
const periodFromUrl = urlObj.searchParams.get("period");
|
||||
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||
|
||||
if (periodFromUrl) {
|
||||
period = +periodFromUrl;
|
||||
}
|
||||
|
||||
if (digitsFromUrl) {
|
||||
digits = +digitsFromUrl;
|
||||
}
|
||||
|
||||
if (algorithmFromUrl) {
|
||||
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||
}
|
||||
} else {
|
||||
secret = providerInputs.secret;
|
||||
period = providerInputs.period;
|
||||
digits = providerInputs.digits;
|
||||
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||
}
|
||||
|
||||
if (digits) {
|
||||
authenticatorInstance.options = { digits };
|
||||
}
|
||||
|
||||
if (algorithm) {
|
||||
authenticatorInstance.options = { algorithm };
|
||||
}
|
||||
|
||||
if (period) {
|
||||
authenticatorInstance.options = { step: period };
|
||||
}
|
||||
|
||||
return {
|
||||
entityId,
|
||||
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -20,8 +20,7 @@ import {
|
||||
TUpdateExternalKmsDTO
|
||||
} from "./external-kms-types";
|
||||
import { AwsKmsProviderFactory } from "./providers/aws-kms";
|
||||
import { GcpKmsProviderFactory } from "./providers/gcp-kms";
|
||||
import { ExternalKmsAwsSchema, ExternalKmsGcpSchema, KmsProviders, TExternalKmsGcpSchema } from "./providers/model";
|
||||
import { ExternalKmsAwsSchema, KmsProviders } from "./providers/model";
|
||||
|
||||
type TExternalKmsServiceFactoryDep = {
|
||||
externalKmsDAL: TExternalKmsDALFactory;
|
||||
@ -79,13 +78,6 @@ export const externalKmsServiceFactory = ({
|
||||
await externalKms.validateConnection();
|
||||
}
|
||||
break;
|
||||
case KmsProviders.Gcp:
|
||||
{
|
||||
const externalKms = await GcpKmsProviderFactory({ inputs: provider.inputs });
|
||||
await externalKms.validateConnection();
|
||||
sanitizedProviderInput = JSON.stringify(provider.inputs);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||
}
|
||||
@ -96,7 +88,7 @@ export const externalKmsServiceFactory = ({
|
||||
});
|
||||
|
||||
const { cipherTextBlob: encryptedProviderInputs } = orgDataKeyEncryptor({
|
||||
plainText: Buffer.from(sanitizedProviderInput)
|
||||
plainText: Buffer.from(sanitizedProviderInput, "utf8")
|
||||
});
|
||||
|
||||
const externalKms = await externalKmsDAL.transaction(async (tx) => {
|
||||
@ -170,7 +162,7 @@ export const externalKmsServiceFactory = ({
|
||||
case KmsProviders.Aws:
|
||||
{
|
||||
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
||||
);
|
||||
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
|
||||
const externalKms = await AwsKmsProviderFactory({ inputs: updatedProviderInput });
|
||||
@ -178,17 +170,6 @@ export const externalKmsServiceFactory = ({
|
||||
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
|
||||
}
|
||||
break;
|
||||
case KmsProviders.Gcp:
|
||||
{
|
||||
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
|
||||
const externalKms = await GcpKmsProviderFactory({ inputs: updatedProviderInput });
|
||||
await externalKms.validateConnection();
|
||||
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||
}
|
||||
@ -197,7 +178,7 @@ export const externalKmsServiceFactory = ({
|
||||
let encryptedProviderInputs: Buffer | undefined;
|
||||
if (sanitizedProviderInput) {
|
||||
const { cipherTextBlob } = orgDataKeyEncryptor({
|
||||
plainText: Buffer.from(sanitizedProviderInput)
|
||||
plainText: Buffer.from(sanitizedProviderInput, "utf8")
|
||||
});
|
||||
encryptedProviderInputs = cipherTextBlob;
|
||||
}
|
||||
@ -290,17 +271,10 @@ export const externalKmsServiceFactory = ({
|
||||
switch (externalKmsDoc.provider) {
|
||||
case KmsProviders.Aws: {
|
||||
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
||||
);
|
||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||
}
|
||||
case KmsProviders.Gcp: {
|
||||
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
|
||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||
}
|
||||
default:
|
||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||
}
|
||||
@ -338,34 +312,21 @@ export const externalKmsServiceFactory = ({
|
||||
switch (externalKmsDoc.provider) {
|
||||
case KmsProviders.Aws: {
|
||||
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
||||
);
|
||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||
}
|
||||
case KmsProviders.Gcp: {
|
||||
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
|
||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||
}
|
||||
default:
|
||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||
}
|
||||
};
|
||||
|
||||
const fetchGcpKeys = async ({ credential, gcpRegion }: Pick<TExternalKmsGcpSchema, "credential" | "gcpRegion">) => {
|
||||
const externalKms = await GcpKmsProviderFactory({ inputs: { credential, gcpRegion, keyName: "" } });
|
||||
return externalKms.getKeysList();
|
||||
};
|
||||
|
||||
return {
|
||||
create,
|
||||
updateById,
|
||||
deleteById,
|
||||
list,
|
||||
findById,
|
||||
findByName,
|
||||
fetchGcpKeys
|
||||
findByName
|
||||
};
|
||||
};
|
||||
|
@ -1,113 +0,0 @@
|
||||
import { KeyManagementServiceClient } from "@google-cloud/kms";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { ExternalKmsGcpSchema, TExternalKmsGcpClientSchema, TExternalKmsProviderFns } from "./model";
|
||||
|
||||
const getGcpKmsClient = async ({ credential, gcpRegion }: TExternalKmsGcpClientSchema) => {
|
||||
const gcpKmsClient = new KeyManagementServiceClient({
|
||||
credentials: credential
|
||||
});
|
||||
const projectId = credential.project_id;
|
||||
const locationName = gcpKmsClient.locationPath(projectId, gcpRegion);
|
||||
|
||||
return {
|
||||
gcpKmsClient,
|
||||
locationName
|
||||
};
|
||||
};
|
||||
|
||||
type GcpKmsProviderArgs = {
|
||||
inputs: unknown;
|
||||
};
|
||||
type TGcpKmsProviderFactoryReturn = TExternalKmsProviderFns & {
|
||||
getKeysList: () => Promise<{ keys: string[] }>;
|
||||
};
|
||||
|
||||
export const GcpKmsProviderFactory = async ({ inputs }: GcpKmsProviderArgs): Promise<TGcpKmsProviderFactoryReturn> => {
|
||||
const { credential, gcpRegion, keyName } = await ExternalKmsGcpSchema.parseAsync(inputs);
|
||||
const { gcpKmsClient, locationName } = await getGcpKmsClient({
|
||||
credential,
|
||||
gcpRegion
|
||||
});
|
||||
|
||||
const validateConnection = async () => {
|
||||
try {
|
||||
await gcpKmsClient.listKeyRings({
|
||||
parent: locationName
|
||||
});
|
||||
return true;
|
||||
} catch (error) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot connect to GCP KMS"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Used when adding the KMS to fetch the list of keys in specified region
|
||||
const getKeysList = async () => {
|
||||
try {
|
||||
const [keyRings] = await gcpKmsClient.listKeyRings({
|
||||
parent: locationName
|
||||
});
|
||||
|
||||
const validKeyRings = keyRings
|
||||
.filter(
|
||||
(keyRing): keyRing is { name: string } =>
|
||||
keyRing !== null && typeof keyRing === "object" && "name" in keyRing && typeof keyRing.name === "string"
|
||||
)
|
||||
.map((keyRing) => keyRing.name);
|
||||
const keyList: string[] = [];
|
||||
const keyListPromises = validKeyRings.map((keyRingName) =>
|
||||
gcpKmsClient
|
||||
.listCryptoKeys({
|
||||
parent: keyRingName
|
||||
})
|
||||
.then(([cryptoKeys]) =>
|
||||
cryptoKeys
|
||||
.filter(
|
||||
(key): key is { name: string } =>
|
||||
key !== null && typeof key === "object" && "name" in key && typeof key.name === "string"
|
||||
)
|
||||
.map((key) => key.name)
|
||||
)
|
||||
);
|
||||
|
||||
const cryptoKeyLists = await Promise.all(keyListPromises);
|
||||
keyList.push(...cryptoKeyLists.flat());
|
||||
return { keys: keyList };
|
||||
} catch (error) {
|
||||
logger.error(error, "Could not validate GCP KMS connection and credentials");
|
||||
throw new BadRequestError({
|
||||
message: "Could not validate GCP KMS connection and credentials",
|
||||
error
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const encrypt = async (data: Buffer) => {
|
||||
const encryptedText = await gcpKmsClient.encrypt({
|
||||
name: keyName,
|
||||
plaintext: data
|
||||
});
|
||||
if (!encryptedText[0].ciphertext) throw new Error("encryption failed");
|
||||
return { encryptedBlob: Buffer.from(encryptedText[0].ciphertext) };
|
||||
};
|
||||
|
||||
const decrypt = async (encryptedBlob: Buffer) => {
|
||||
const decryptedText = await gcpKmsClient.decrypt({
|
||||
name: keyName,
|
||||
ciphertext: encryptedBlob
|
||||
});
|
||||
if (!decryptedText[0].plaintext) throw new Error("decryption failed");
|
||||
return { data: Buffer.from(decryptedText[0].plaintext) };
|
||||
};
|
||||
|
||||
return {
|
||||
validateConnection,
|
||||
getKeysList,
|
||||
encrypt,
|
||||
decrypt
|
||||
};
|
||||
};
|
@ -1,23 +1,13 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export enum KmsProviders {
|
||||
Aws = "aws",
|
||||
Gcp = "gcp"
|
||||
Aws = "aws"
|
||||
}
|
||||
|
||||
export enum KmsAwsCredentialType {
|
||||
AssumeRole = "assume-role",
|
||||
AccessKey = "access-key"
|
||||
}
|
||||
// Google uses snake_case for their enum values and we need to match that
|
||||
export enum KmsGcpCredentialType {
|
||||
ServiceAccount = "service_account"
|
||||
}
|
||||
|
||||
export enum KmsGcpKeyFetchAuthType {
|
||||
Credential = "credential",
|
||||
Kms = "kmsId"
|
||||
}
|
||||
|
||||
export const ExternalKmsAwsSchema = z.object({
|
||||
credential: z
|
||||
@ -52,44 +42,14 @@ export const ExternalKmsAwsSchema = z.object({
|
||||
});
|
||||
export type TExternalKmsAwsSchema = z.infer<typeof ExternalKmsAwsSchema>;
|
||||
|
||||
export const ExternalKmsGcpCredentialSchema = z.object({
|
||||
type: z.literal(KmsGcpCredentialType.ServiceAccount),
|
||||
project_id: z.string().min(1),
|
||||
private_key_id: z.string().min(1),
|
||||
private_key: z.string().min(1),
|
||||
client_email: z.string().min(1),
|
||||
client_id: z.string().min(1),
|
||||
auth_uri: z.string().min(1),
|
||||
token_uri: z.string().min(1),
|
||||
auth_provider_x509_cert_url: z.string().min(1),
|
||||
client_x509_cert_url: z.string().min(1),
|
||||
universe_domain: z.string().min(1)
|
||||
});
|
||||
|
||||
export type TExternalKmsGcpCredentialSchema = z.infer<typeof ExternalKmsGcpCredentialSchema>;
|
||||
|
||||
export const ExternalKmsGcpSchema = z.object({
|
||||
credential: ExternalKmsGcpCredentialSchema.describe("GCP Service Account JSON credential to connect"),
|
||||
gcpRegion: z.string().trim().describe("GCP region where the KMS key is located"),
|
||||
keyName: z.string().trim().describe("GCP key name")
|
||||
});
|
||||
export type TExternalKmsGcpSchema = z.infer<typeof ExternalKmsGcpSchema>;
|
||||
|
||||
const ExternalKmsGcpClientSchema = ExternalKmsGcpSchema.pick({ gcpRegion: true }).extend({
|
||||
credential: ExternalKmsGcpCredentialSchema
|
||||
});
|
||||
export type TExternalKmsGcpClientSchema = z.infer<typeof ExternalKmsGcpClientSchema>;
|
||||
|
||||
// The root schema of the JSON
|
||||
export const ExternalKmsInputSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema }),
|
||||
z.object({ type: z.literal(KmsProviders.Gcp), inputs: ExternalKmsGcpSchema })
|
||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema })
|
||||
]);
|
||||
export type TExternalKmsInputSchema = z.infer<typeof ExternalKmsInputSchema>;
|
||||
|
||||
export const ExternalKmsInputUpdateSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() }),
|
||||
z.object({ type: z.literal(KmsProviders.Gcp), inputs: ExternalKmsGcpSchema.partial() })
|
||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() })
|
||||
]);
|
||||
export type TExternalKmsInputUpdateSchema = z.infer<typeof ExternalKmsInputUpdateSchema>;
|
||||
|
||||
|
@ -27,7 +27,7 @@ export const initializeHsmModule = () => {
|
||||
|
||||
logger.info("PKCS#11 module initialized");
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to initialize PKCS#11 module");
|
||||
logger.error("Failed to initialize PKCS#11 module:", err);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
@ -39,7 +39,7 @@ export const initializeHsmModule = () => {
|
||||
isInitialized = false;
|
||||
logger.info("PKCS#11 module finalized");
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to finalize PKCS#11 module");
|
||||
logger.error("Failed to finalize PKCS#11 module:", err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
@ -62,10 +62,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityId,
|
||||
@ -142,10 +139,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
@ -222,10 +216,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Identity);
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
@ -267,10 +258,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
|
||||
return {
|
||||
...identityPrivilege,
|
||||
@ -301,10 +289,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -336,10 +321,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
|
||||
const identityPrivileges = await identityProjectAdditionalPrivilegeDAL.find(
|
||||
{
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { ForbiddenError, MongoAbility, RawRuleOf, subject } from "@casl/ability";
|
||||
import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
|
||||
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
@ -69,11 +69,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityId,
|
||||
@ -150,11 +146,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
@ -249,11 +241,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
@ -306,10 +294,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -348,11 +333,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
|
||||
const identityPrivileges = await identityProjectAdditionalPrivilegeDAL.find({
|
||||
projectMembershipId: identityProjectMembership.id
|
||||
|
@ -36,7 +36,8 @@ export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean>
|
||||
});
|
||||
|
||||
ldapClient.on("error", (err) => {
|
||||
logger.error(err, "LDAP client error");
|
||||
logger.error("LDAP client error:", err);
|
||||
logger.error(err);
|
||||
resolve(false);
|
||||
});
|
||||
|
||||
|
@ -161,8 +161,8 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
error,
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`,
|
||||
error
|
||||
);
|
||||
await keyStore.setItemWithExpiry(
|
||||
FEATURE_CACHE_KEY(orgId),
|
||||
|
@ -31,6 +31,7 @@ export enum OrgPermissionSubjects {
|
||||
}
|
||||
|
||||
export type OrgPermissionSet =
|
||||
| [OrgPermissionActions.Read, OrgPermissionSubjects.Workspace]
|
||||
| [OrgPermissionActions.Create, OrgPermissionSubjects.Workspace]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Role]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Member]
|
||||
@ -51,6 +52,7 @@ export type OrgPermissionSet =
|
||||
const buildAdminPermission = () => {
|
||||
const { can, rules } = new AbilityBuilder<MongoAbility<OrgPermissionSet>>(createMongoAbility);
|
||||
// ws permissions
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
|
||||
// role permission
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
|
||||
@ -133,6 +135,7 @@ export const orgAdminPermissions = buildAdminPermission();
|
||||
const buildMemberPermission = () => {
|
||||
const { can, rules } = new AbilityBuilder<MongoAbility<OrgPermissionSet>>(createMongoAbility);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
|
||||
|
@ -127,15 +127,14 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getProjectPermission = async (userId: string, projectId: string) => {
|
||||
try {
|
||||
const subQueryUserGroups = db(TableName.UserGroupMembership).where("userId", userId).select("groupId");
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.Users)
|
||||
.where(`${TableName.Users}.id`, userId)
|
||||
.leftJoin(TableName.UserGroupMembership, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.GroupProjectMembership, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.GroupProjectMembership}.projectId`, db.raw("?", [projectId]))
|
||||
// @ts-expect-error akhilmhdh: this is valid knexjs query. Its just ts type argument is missing it
|
||||
.andOnIn(`${TableName.GroupProjectMembership}.groupId`, subQueryUserGroups);
|
||||
.andOn(`${TableName.GroupProjectMembership}.groupId`, `${TableName.UserGroupMembership}.groupId`);
|
||||
})
|
||||
.leftJoin(
|
||||
TableName.GroupProjectMembershipRole,
|
||||
|
@ -1,7 +1,14 @@
|
||||
import picomatch from "picomatch";
|
||||
import { z } from "zod";
|
||||
|
||||
import { PermissionConditionOperators } from "@app/lib/casl";
|
||||
export enum PermissionConditionOperators {
|
||||
$IN = "$in",
|
||||
$ALL = "$all",
|
||||
$REGEX = "$regex",
|
||||
$EQ = "$eq",
|
||||
$NEQ = "$ne",
|
||||
$GLOB = "$glob"
|
||||
}
|
||||
|
||||
export const PermissionConditionSchema = {
|
||||
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { AbilityBuilder, createMongoAbility, ForcedSubject, MongoAbility } from "@casl/ability";
|
||||
import { z } from "zod";
|
||||
|
||||
import { conditionsMatcher, PermissionConditionOperators } from "@app/lib/casl";
|
||||
import { conditionsMatcher } from "@app/lib/casl";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
|
||||
import { PermissionConditionSchema } from "./permission-types";
|
||||
import { PermissionConditionOperators, PermissionConditionSchema } from "./permission-types";
|
||||
|
||||
export enum ProjectPermissionActions {
|
||||
Read = "read",
|
||||
@ -82,10 +82,6 @@ export type SecretImportSubjectFields = {
|
||||
secretPath: string;
|
||||
};
|
||||
|
||||
export type IdentityManagementSubjectFields = {
|
||||
identityId: string;
|
||||
};
|
||||
|
||||
export type ProjectPermissionSet =
|
||||
| [
|
||||
ProjectPermissionActions,
|
||||
@ -125,10 +121,7 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.ServiceTokens]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.SecretApproval]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.SecretRotation]
|
||||
| [
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSub.Identity | (ForcedSubject<ProjectPermissionSub.Identity> & IdentityManagementSubjectFields)
|
||||
]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Identity]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Certificates]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.CertificateTemplates]
|
||||
@ -220,21 +213,6 @@ const SecretConditionV2Schema = z
|
||||
})
|
||||
.partial();
|
||||
|
||||
const IdentityManagementConditionSchema = z
|
||||
.object({
|
||||
identityId: z.union([
|
||||
z.string(),
|
||||
z
|
||||
.object({
|
||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
|
||||
})
|
||||
.partial()
|
||||
])
|
||||
})
|
||||
.partial();
|
||||
|
||||
const GeneralPermissionSchema = [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.SecretApproval).describe("The entity this permission pertains to."),
|
||||
@ -284,6 +262,12 @@ const GeneralPermissionSchema = [
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.ServiceTokens).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
@ -389,12 +373,6 @@ export const ProjectPermissionV1Schema = z.discriminatedUnion("subject", [
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
...GeneralPermissionSchema
|
||||
]);
|
||||
|
||||
@ -439,16 +417,6 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: IdentityManagementConditionSchema.describe(
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
...GeneralPermissionSchema
|
||||
]);
|
||||
|
||||
@ -729,26 +697,26 @@ export const buildServiceTokenProjectPermission = (
|
||||
[ProjectPermissionSub.Secrets, ProjectPermissionSub.SecretImports, ProjectPermissionSub.SecretFolders].forEach(
|
||||
(subject) => {
|
||||
if (canWrite) {
|
||||
// TODO: @Akhi
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Edit, subject, {
|
||||
// TODO: @Akhi
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Create, subject, {
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Delete, subject, {
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
}
|
||||
if (canRead) {
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Read, subject, {
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
|
@ -46,7 +46,7 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
}
|
||||
return rateLimit;
|
||||
} catch (err) {
|
||||
logger.error(err, "Error fetching rate limits");
|
||||
logger.error("Error fetching rate limits %o", err);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@ -69,12 +69,12 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
mfaRateLimit: rateLimit.mfaRateLimit
|
||||
};
|
||||
|
||||
logger.info(newRateLimitMaxConfiguration, "syncRateLimitConfiguration: rate limit configuration");
|
||||
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
|
||||
Object.freeze(newRateLimitMaxConfiguration);
|
||||
rateLimitMaxConfiguration = newRateLimitMaxConfiguration;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(error, "Error syncing rate limit configurations");
|
||||
logger.error(`Error syncing rate limit configurations: %o`, error);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -18,7 +18,6 @@ import { TGroupProjectDALFactory } from "@app/services/group-project/group-proje
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { deleteOrgMembershipFn } from "@app/services/org/org-fns";
|
||||
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
||||
import { OrgAuthMethod } from "@app/services/org/org-types";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
@ -72,7 +71,6 @@ type TScimServiceFactoryDep = {
|
||||
| "deleteMembershipById"
|
||||
| "transaction"
|
||||
| "updateMembershipById"
|
||||
| "findOrgById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<
|
||||
TOrgMembershipDALFactory,
|
||||
@ -290,7 +288,8 @@ export const scimServiceFactory = ({
|
||||
const createScimUser = async ({ externalId, email, firstName, lastName, orgId }: TCreateScimUserDTO) => {
|
||||
if (!email) throw new ScimRequestError({ detail: "Invalid request. Missing email.", status: 400 });
|
||||
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
if (!org)
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization not found",
|
||||
@ -303,24 +302,13 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
if (!org.orgAuthMethod) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Neither SAML or OIDC SSO is configured",
|
||||
status: 400
|
||||
});
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
const aliasType = org.orgAuthMethod === OrgAuthMethod.OIDC ? UserAliasType.OIDC : UserAliasType.SAML;
|
||||
const trustScimEmails =
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails;
|
||||
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType
|
||||
aliasType: UserAliasType.SAML
|
||||
});
|
||||
|
||||
const { user: createdUser, orgMembership: createdOrgMembership } = await userDAL.transaction(async (tx) => {
|
||||
@ -361,7 +349,7 @@ export const scimServiceFactory = ({
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (trustScimEmails) {
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
user = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
@ -379,9 +367,9 @@ export const scimServiceFactory = ({
|
||||
);
|
||||
user = await userDAL.create(
|
||||
{
|
||||
username: trustScimEmails ? email : uniqueUsername,
|
||||
username: serverCfg.trustSamlEmails ? email : uniqueUsername,
|
||||
email,
|
||||
isEmailVerified: trustScimEmails,
|
||||
isEmailVerified: serverCfg.trustSamlEmails,
|
||||
firstName,
|
||||
lastName,
|
||||
authMethods: [],
|
||||
@ -394,7 +382,7 @@ export const scimServiceFactory = ({
|
||||
await userAliasDAL.create(
|
||||
{
|
||||
userId: user.id,
|
||||
aliasType,
|
||||
aliasType: UserAliasType.SAML,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
orgId
|
||||
@ -449,7 +437,7 @@ export const scimServiceFactory = ({
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
organizationName: org.name,
|
||||
callback_url: `${appCfg.SITE_URL}/api/v1/sso/redirect/organizations/${org.slug}`
|
||||
callback_url: `${appCfg.SITE_URL}/api/v1/sso/redirect/saml2/organizations/${org.slug}`
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -468,14 +456,6 @@ export const scimServiceFactory = ({
|
||||
|
||||
// partial
|
||||
const updateScimUser = async ({ orgMembershipId, orgId, operations }: TUpdateScimUserDTO) => {
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org.orgAuthMethod) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Neither SAML or OIDC SSO is configured",
|
||||
status: 400
|
||||
});
|
||||
}
|
||||
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
@ -513,9 +493,6 @@ export const scimServiceFactory = ({
|
||||
scimPatch(scimUser, operations);
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
const trustScimEmails =
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails;
|
||||
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await orgMembershipDAL.updateById(
|
||||
membership.id,
|
||||
@ -531,7 +508,7 @@ export const scimServiceFactory = ({
|
||||
firstName: scimUser.name.givenName,
|
||||
email: scimUser.emails[0].value,
|
||||
lastName: scimUser.name.familyName,
|
||||
isEmailVerified: hasEmailChanged ? trustScimEmails : true
|
||||
isEmailVerified: hasEmailChanged ? serverCfg.trustSamlEmails : true
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -549,14 +526,6 @@ export const scimServiceFactory = ({
|
||||
email,
|
||||
externalId
|
||||
}: TReplaceScimUserDTO) => {
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org.orgAuthMethod) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Neither SAML or OIDC SSO is configured",
|
||||
status: 400
|
||||
});
|
||||
}
|
||||
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
@ -586,7 +555,7 @@ export const scimServiceFactory = ({
|
||||
await userAliasDAL.update(
|
||||
{
|
||||
orgId,
|
||||
aliasType: org.orgAuthMethod === OrgAuthMethod.OIDC ? UserAliasType.OIDC : UserAliasType.SAML,
|
||||
aliasType: UserAliasType.SAML,
|
||||
userId: membership.userId
|
||||
},
|
||||
{
|
||||
@ -607,8 +576,7 @@ export const scimServiceFactory = ({
|
||||
firstName,
|
||||
email,
|
||||
lastName,
|
||||
isEmailVerified:
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails
|
||||
isEmailVerified: serverCfg.trustSamlEmails
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -238,11 +238,11 @@ export const secretScanningQueueFactory = ({
|
||||
});
|
||||
|
||||
queueService.listen(QueueName.SecretPushEventScan, "failed", (job, err) => {
|
||||
logger.error(err, "Failed to secret scan on push", job?.data);
|
||||
logger.error("Failed to secret scan on push", job?.data, err);
|
||||
});
|
||||
|
||||
queueService.listen(QueueName.SecretFullRepoScan, "failed", (job, err) => {
|
||||
logger.error(err, "Failed to do full repo secret scan", job?.data);
|
||||
logger.error("Failed to do full repo secret scan", job?.data, err);
|
||||
});
|
||||
|
||||
return { startFullRepoScan, startPushEventScan };
|
||||
|
@ -391,7 +391,6 @@ export const PROJECTS = {
|
||||
CREATE: {
|
||||
organizationSlug: "The slug of the organization to create the project in.",
|
||||
projectName: "The name of the project to create.",
|
||||
projectDescription: "An optional description label for the project.",
|
||||
slug: "An optional slug for the project.",
|
||||
template: "The name of the project template, if specified, to apply to this project."
|
||||
},
|
||||
@ -404,7 +403,6 @@ export const PROJECTS = {
|
||||
UPDATE: {
|
||||
workspaceId: "The ID of the project to update.",
|
||||
name: "The new name of the project.",
|
||||
projectDescription: "An optional description label for the project.",
|
||||
autoCapitalization: "Disable or enable auto-capitalization for the project."
|
||||
},
|
||||
GET_KEY: {
|
||||
@ -1082,8 +1080,7 @@ export const INTEGRATION = {
|
||||
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
|
||||
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
|
||||
shouldProtectSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Protected'.",
|
||||
shouldEnableDelete: "The flag to enable deletion of secrets.",
|
||||
octopusDeployScopeValues: "Specifies the scope values to set on synced secrets to Octopus Deploy."
|
||||
shouldEnableDelete: "The flag to enable deletion of secrets."
|
||||
}
|
||||
},
|
||||
UPDATE: {
|
||||
|
@ -54,12 +54,3 @@ export const isAtLeastAsPrivileged = (permissions1: MongoAbility, permissions2:
|
||||
|
||||
return set1.size >= set2.size;
|
||||
};
|
||||
|
||||
export enum PermissionConditionOperators {
|
||||
$IN = "$in",
|
||||
$ALL = "$all",
|
||||
$REGEX = "$regex",
|
||||
$EQ = "$eq",
|
||||
$NEQ = "$ne",
|
||||
$GLOB = "$glob"
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Logger } from "pino";
|
||||
import { z } from "zod";
|
||||
|
||||
import { removeTrailingSlash } from "../fn";
|
||||
import { CustomLogger } from "../logger/logger";
|
||||
import { zpStr } from "../zod";
|
||||
|
||||
export const GITLAB_URL = "https://gitlab.com";
|
||||
@ -10,7 +10,7 @@ export const GITLAB_URL = "https://gitlab.com";
|
||||
export const IS_PACKAGED = (process as any)?.pkg !== undefined;
|
||||
|
||||
const zodStrBool = z
|
||||
.string()
|
||||
.enum(["true", "false"])
|
||||
.optional()
|
||||
.transform((val) => val === "true");
|
||||
|
||||
@ -157,15 +157,6 @@ const envSchema = z
|
||||
INFISICAL_CLOUD: zodStrBool.default("false"),
|
||||
MAINTENANCE_MODE: zodStrBool.default("false"),
|
||||
CAPTCHA_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// TELEMETRY
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED: zodStrBool.default("false"),
|
||||
OTEL_EXPORT_OTLP_ENDPOINT: zpStr(z.string().optional()),
|
||||
OTEL_OTLP_PUSH_INTERVAL: z.coerce.number().default(30000),
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME: zpStr(z.string().optional()),
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD: zpStr(z.string().optional()),
|
||||
OTEL_EXPORT_TYPE: z.enum(["prometheus", "otlp"]).optional(),
|
||||
|
||||
PLAIN_API_KEY: zpStr(z.string().optional()),
|
||||
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()),
|
||||
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
|
||||
@ -178,10 +169,7 @@ const envSchema = z
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
||||
HSM_SLOT: z.coerce.number().optional().default(0),
|
||||
|
||||
USE_PG_QUEUE: zodStrBool.default("false"),
|
||||
SHOULD_INIT_PG_QUEUE: zodStrBool.default("false")
|
||||
HSM_SLOT: z.coerce.number().optional().default(0)
|
||||
})
|
||||
// To ensure that basic encryption is always possible.
|
||||
.refine(
|
||||
@ -215,11 +203,11 @@ let envCfg: Readonly<z.infer<typeof envSchema>>;
|
||||
|
||||
export const getConfig = () => envCfg;
|
||||
// cannot import singleton logger directly as it needs config to load various transport
|
||||
export const initEnvConfig = (logger?: CustomLogger) => {
|
||||
export const initEnvConfig = (logger: Logger) => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
(logger ?? console).error("Invalid environment variables. Check the error below");
|
||||
(logger ?? console).error(parsedEnv.error.issues);
|
||||
logger.error("Invalid environment variables. Check the error below");
|
||||
logger.error(parsedEnv.error.issues);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,6 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
// logger follows a singleton pattern
|
||||
// easier to use it that's all.
|
||||
import { requestContext } from "@fastify/request-context";
|
||||
import pino, { Logger } from "pino";
|
||||
import { z } from "zod";
|
||||
|
||||
@ -15,37 +13,14 @@ const logLevelToSeverityLookup: Record<string, string> = {
|
||||
"60": "CRITICAL"
|
||||
};
|
||||
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let logger: Readonly<Logger>;
|
||||
// akhilmhdh:
|
||||
// The logger is not placed in the main app config to avoid a circular dependency.
|
||||
// The config requires the logger to display errors when an invalid environment is supplied.
|
||||
// On the other hand, the logger needs the config to obtain credentials for AWS or other transports.
|
||||
// By keeping the logger separate, it becomes an independent package.
|
||||
|
||||
// We define our own custom logger interface to enforce structure to the logging methods.
|
||||
|
||||
export interface CustomLogger extends Omit<Logger, "info" | "error" | "warn" | "debug"> {
|
||||
info: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
|
||||
error: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
warn: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
debug: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let logger: Readonly<CustomLogger>;
|
||||
|
||||
const loggerConfig = z.object({
|
||||
AWS_CLOUDWATCH_LOG_GROUP_NAME: z.string().default("infisical-log-stream"),
|
||||
AWS_CLOUDWATCH_LOG_REGION: z.string().default("us-east-1"),
|
||||
@ -87,17 +62,6 @@ const redactedKeys = [
|
||||
"config"
|
||||
];
|
||||
|
||||
const UNKNOWN_REQUEST_ID = "UNKNOWN_REQUEST_ID";
|
||||
|
||||
const extractReqId = () => {
|
||||
try {
|
||||
return requestContext.get("reqId") || UNKNOWN_REQUEST_ID;
|
||||
} catch (err) {
|
||||
console.log("failed to get request context", err);
|
||||
return UNKNOWN_REQUEST_ID;
|
||||
}
|
||||
};
|
||||
|
||||
export const initLogger = async () => {
|
||||
const cfg = loggerConfig.parse(process.env);
|
||||
const targets: pino.TransportMultiOptions["targets"][number][] = [
|
||||
@ -130,30 +94,6 @@ export const initLogger = async () => {
|
||||
targets
|
||||
});
|
||||
|
||||
const wrapLogger = (originalLogger: Logger): CustomLogger => {
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.info = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ reqId: extractReqId() }).info(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.error = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ reqId: extractReqId() }).error(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.warn = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ reqId: extractReqId() }).warn(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.debug = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ reqId: extractReqId() }).debug(obj, msg, ...args);
|
||||
};
|
||||
|
||||
return originalLogger;
|
||||
};
|
||||
|
||||
logger = pino(
|
||||
{
|
||||
mixin(_context, level) {
|
||||
@ -173,6 +113,5 @@ export const initLogger = async () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
transport
|
||||
);
|
||||
|
||||
return wrapLogger(logger);
|
||||
return logger;
|
||||
};
|
||||
|
@ -1,91 +0,0 @@
|
||||
import opentelemetry, { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
|
||||
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
|
||||
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
|
||||
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
|
||||
import { registerInstrumentations } from "@opentelemetry/instrumentation";
|
||||
import { Resource } from "@opentelemetry/resources";
|
||||
import { AggregationTemporality, MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
|
||||
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
|
||||
import dotenv from "dotenv";
|
||||
|
||||
import { initEnvConfig } from "../config/env";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const initTelemetryInstrumentation = ({
|
||||
exportType,
|
||||
otlpURL,
|
||||
otlpUser,
|
||||
otlpPassword,
|
||||
otlpPushInterval
|
||||
}: {
|
||||
exportType?: string;
|
||||
otlpURL?: string;
|
||||
otlpUser?: string;
|
||||
otlpPassword?: string;
|
||||
otlpPushInterval?: number;
|
||||
}) => {
|
||||
diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG);
|
||||
|
||||
const resource = Resource.default().merge(
|
||||
new Resource({
|
||||
[ATTR_SERVICE_NAME]: "infisical-core",
|
||||
[ATTR_SERVICE_VERSION]: "0.1.0"
|
||||
})
|
||||
);
|
||||
|
||||
const metricReaders = [];
|
||||
switch (exportType) {
|
||||
case "prometheus": {
|
||||
const promExporter = new PrometheusExporter();
|
||||
metricReaders.push(promExporter);
|
||||
break;
|
||||
}
|
||||
case "otlp": {
|
||||
const otlpExporter = new OTLPMetricExporter({
|
||||
url: `${otlpURL}/v1/metrics`,
|
||||
headers: {
|
||||
Authorization: `Basic ${btoa(`${otlpUser}:${otlpPassword}`)}`
|
||||
},
|
||||
temporalityPreference: AggregationTemporality.DELTA
|
||||
});
|
||||
metricReaders.push(
|
||||
new PeriodicExportingMetricReader({
|
||||
exporter: otlpExporter,
|
||||
exportIntervalMillis: otlpPushInterval
|
||||
})
|
||||
);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid OTEL export type");
|
||||
}
|
||||
|
||||
const meterProvider = new MeterProvider({
|
||||
resource,
|
||||
readers: metricReaders
|
||||
});
|
||||
|
||||
opentelemetry.metrics.setGlobalMeterProvider(meterProvider);
|
||||
|
||||
registerInstrumentations({
|
||||
instrumentations: [getNodeAutoInstrumentations()]
|
||||
});
|
||||
};
|
||||
|
||||
const setupTelemetry = () => {
|
||||
const appCfg = initEnvConfig();
|
||||
|
||||
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||
console.log("Initializing telemetry instrumentation");
|
||||
initTelemetryInstrumentation({
|
||||
otlpURL: appCfg.OTEL_EXPORT_OTLP_ENDPOINT,
|
||||
otlpUser: appCfg.OTEL_COLLECTOR_BASIC_AUTH_USERNAME,
|
||||
otlpPassword: appCfg.OTEL_COLLECTOR_BASIC_AUTH_PASSWORD,
|
||||
otlpPushInterval: appCfg.OTEL_OTLP_PUSH_INTERVAL,
|
||||
exportType: appCfg.OTEL_EXPORT_TYPE
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
void setupTelemetry();
|
@ -1,7 +1,4 @@
|
||||
import "./lib/telemetry/instrumentation";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import { Redis } from "ioredis";
|
||||
import path from "path";
|
||||
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
@ -21,7 +18,6 @@ dotenv.config();
|
||||
const run = async () => {
|
||||
const logger = await initLogger();
|
||||
const appCfg = initEnvConfig(logger);
|
||||
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.DB_ROOT_CERT,
|
||||
@ -56,17 +52,13 @@ const run = async () => {
|
||||
}
|
||||
|
||||
const smtp = smtpServiceFactory(formatSmtpConfig());
|
||||
|
||||
const queue = queueServiceFactory(appCfg.REDIS_URL, appCfg.DB_CONNECTION_URI);
|
||||
await queue.initialize();
|
||||
|
||||
const queue = queueServiceFactory(appCfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
|
||||
const redis = new Redis(appCfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore, redis });
|
||||
const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore });
|
||||
const bootstrap = await bootstrapCheck({ db });
|
||||
|
||||
// eslint-disable-next-line
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { Job, JobsOptions, Queue, QueueOptions, RepeatOptions, Worker, WorkerListener } from "bullmq";
|
||||
import Redis from "ioredis";
|
||||
import PgBoss, { WorkOptions } from "pg-boss";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
@ -8,8 +7,6 @@ import {
|
||||
TScanFullRepoEventPayload,
|
||||
TScanPushEventPayload
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import {
|
||||
TFailedIntegrationSyncEmailsPayload,
|
||||
TIntegrationSyncPayload,
|
||||
@ -187,39 +184,17 @@ export type TQueueJobTypes = {
|
||||
};
|
||||
|
||||
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
|
||||
export const queueServiceFactory = (redisUrl: string, dbConnectionUrl: string) => {
|
||||
export const queueServiceFactory = (redisUrl: string) => {
|
||||
const connection = new Redis(redisUrl, { maxRetriesPerRequest: null });
|
||||
const queueContainer = {} as Record<
|
||||
QueueName,
|
||||
Queue<TQueueJobTypes[QueueName]["payload"], void, TQueueJobTypes[QueueName]["name"]>
|
||||
>;
|
||||
|
||||
const pgBoss = new PgBoss({
|
||||
connectionString: dbConnectionUrl,
|
||||
archiveCompletedAfterSeconds: 60,
|
||||
archiveFailedAfterSeconds: 1000, // we want to keep failed jobs for a longer time so that it can be retried
|
||||
deleteAfterSeconds: 30
|
||||
});
|
||||
|
||||
const queueContainerPg = {} as Record<QueueJobs, boolean>;
|
||||
|
||||
const workerContainer = {} as Record<
|
||||
QueueName,
|
||||
Worker<TQueueJobTypes[QueueName]["payload"], void, TQueueJobTypes[QueueName]["name"]>
|
||||
>;
|
||||
|
||||
const initialize = async () => {
|
||||
const appCfg = getConfig();
|
||||
if (appCfg.SHOULD_INIT_PG_QUEUE) {
|
||||
logger.info("Initializing pg-queue...");
|
||||
await pgBoss.start();
|
||||
|
||||
pgBoss.on("error", (error) => {
|
||||
logger.error(error, "pg-queue error");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const start = <T extends QueueName>(
|
||||
name: T,
|
||||
jobFn: (job: Job<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>, token?: string) => Promise<void>,
|
||||
@ -240,27 +215,6 @@ export const queueServiceFactory = (redisUrl: string, dbConnectionUrl: string) =
|
||||
});
|
||||
};
|
||||
|
||||
const startPg = async <T extends QueueName>(
|
||||
jobName: QueueJobs,
|
||||
jobsFn: (jobs: PgBoss.Job<TQueueJobTypes[T]["payload"]>[]) => Promise<void>,
|
||||
options: WorkOptions & {
|
||||
workerCount: number;
|
||||
}
|
||||
) => {
|
||||
if (queueContainerPg[jobName]) {
|
||||
throw new Error(`${jobName} queue is already initialized`);
|
||||
}
|
||||
|
||||
await pgBoss.createQueue(jobName);
|
||||
queueContainerPg[jobName] = true;
|
||||
|
||||
await Promise.all(
|
||||
Array.from({ length: options.workerCount }).map(() =>
|
||||
pgBoss.work<TQueueJobTypes[T]["payload"]>(jobName, options, jobsFn)
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
const listen = <
|
||||
T extends QueueName,
|
||||
U extends keyof WorkerListener<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>
|
||||
@ -284,18 +238,6 @@ export const queueServiceFactory = (redisUrl: string, dbConnectionUrl: string) =
|
||||
await q.add(job, data, opts);
|
||||
};
|
||||
|
||||
const queuePg = async <T extends QueueName>(
|
||||
job: TQueueJobTypes[T]["name"],
|
||||
data: TQueueJobTypes[T]["payload"],
|
||||
opts?: PgBoss.SendOptions & { jobId?: string }
|
||||
) => {
|
||||
await pgBoss.send({
|
||||
name: job,
|
||||
data,
|
||||
options: opts
|
||||
});
|
||||
};
|
||||
|
||||
const stopRepeatableJob = async <T extends QueueName>(
|
||||
name: T,
|
||||
job: TQueueJobTypes[T]["name"],
|
||||
@ -332,17 +274,5 @@ export const queueServiceFactory = (redisUrl: string, dbConnectionUrl: string) =
|
||||
await Promise.all(Object.values(workerContainer).map((worker) => worker.close()));
|
||||
};
|
||||
|
||||
return {
|
||||
initialize,
|
||||
start,
|
||||
listen,
|
||||
queue,
|
||||
shutdown,
|
||||
stopRepeatableJob,
|
||||
stopRepeatableJobByJobId,
|
||||
clearQueue,
|
||||
stopJobById,
|
||||
startPg,
|
||||
queuePg
|
||||
};
|
||||
return { start, listen, queue, shutdown, stopRepeatableJob, stopRepeatableJobByJobId, clearQueue, stopJobById };
|
||||
};
|
||||
|
@ -10,22 +10,18 @@ import fastifyFormBody from "@fastify/formbody";
|
||||
import helmet from "@fastify/helmet";
|
||||
import type { FastifyRateLimitOptions } from "@fastify/rate-limit";
|
||||
import ratelimiter from "@fastify/rate-limit";
|
||||
import { fastifyRequestContext } from "@fastify/request-context";
|
||||
import fastify from "fastify";
|
||||
import { Redis } from "ioredis";
|
||||
import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
import { TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
import { globalRateLimiterCfg } from "./config/rateLimiter";
|
||||
import { addErrorsToResponseSchemas } from "./plugins/add-errors-to-response-schemas";
|
||||
import { apiMetrics } from "./plugins/api-metrics";
|
||||
import { fastifyErrHandler } from "./plugins/error-handler";
|
||||
import { registerExternalNextjs } from "./plugins/external-nextjs";
|
||||
import { serializerCompiler, validatorCompiler, ZodTypeProvider } from "./plugins/fastify-zod";
|
||||
@ -38,22 +34,19 @@ type TMain = {
|
||||
auditLogDb?: Knex;
|
||||
db: Knex;
|
||||
smtp: TSmtpService;
|
||||
logger?: CustomLogger;
|
||||
logger?: Logger;
|
||||
queue: TQueueServiceFactory;
|
||||
keyStore: TKeyStoreFactory;
|
||||
hsmModule: HsmModule;
|
||||
redis: Redis;
|
||||
};
|
||||
|
||||
// Run the server!
|
||||
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore, redis }: TMain) => {
|
||||
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore }: TMain) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const server = fastify({
|
||||
logger: appCfg.NODE_ENV === "test" ? false : logger,
|
||||
genReqId: () => `req-${alphaNumericNanoId(14)}`,
|
||||
trustProxy: true,
|
||||
|
||||
connectionTimeout: appCfg.isHsmConfigured ? 90_000 : 30_000,
|
||||
ignoreTrailingSlash: true,
|
||||
pluginTimeout: 40_000
|
||||
@ -62,7 +55,6 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
server.setValidatorCompiler(validatorCompiler);
|
||||
server.setSerializerCompiler(serializerCompiler);
|
||||
|
||||
server.decorate("redis", redis);
|
||||
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
const strBody = body instanceof Buffer ? body.toString() : body;
|
||||
@ -94,10 +86,6 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
// pull ip based on various proxy headers
|
||||
await server.register(fastifyIp);
|
||||
|
||||
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||
await server.register(apiMetrics);
|
||||
}
|
||||
|
||||
await server.register(fastifySwagger);
|
||||
await server.register(fastifyFormBody);
|
||||
await server.register(fastifyErrHandler);
|
||||
@ -111,13 +99,6 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
|
||||
await server.register(maintenanceMode);
|
||||
|
||||
await server.register(fastifyRequestContext, {
|
||||
defaultStoreValues: (req) => ({
|
||||
reqId: req.id,
|
||||
log: req.log.child({ reqId: req.id })
|
||||
})
|
||||
});
|
||||
|
||||
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule });
|
||||
|
||||
if (appCfg.isProductionMode) {
|
||||
|
@ -1,21 +0,0 @@
|
||||
import opentelemetry from "@opentelemetry/api";
|
||||
import fp from "fastify-plugin";
|
||||
|
||||
export const apiMetrics = fp(async (fastify) => {
|
||||
const apiMeter = opentelemetry.metrics.getMeter("API");
|
||||
const latencyHistogram = apiMeter.createHistogram("API_latency", {
|
||||
unit: "ms"
|
||||
});
|
||||
|
||||
fastify.addHook("onResponse", async (request, reply) => {
|
||||
const { method } = request;
|
||||
const route = request.routerPath;
|
||||
const { statusCode } = reply;
|
||||
|
||||
latencyHistogram.record(reply.elapsedTime, {
|
||||
route,
|
||||
method,
|
||||
statusCode
|
||||
});
|
||||
});
|
||||
});
|
@ -1,4 +1,4 @@
|
||||
import { ForbiddenError, PureAbility } from "@casl/ability";
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import fastifyPlugin from "fastify-plugin";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { ZodError } from "zod";
|
||||
@ -27,7 +27,6 @@ enum HttpStatusCodes {
|
||||
NotFound = 404,
|
||||
Unauthorized = 401,
|
||||
Forbidden = 403,
|
||||
UnprocessableContent = 422,
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
InternalServerError = 500,
|
||||
GatewayTimeout = 504,
|
||||
@ -40,102 +39,77 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
||||
if (error instanceof BadRequestError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.BadRequest)
|
||||
.send({ reqId: req.id, statusCode: HttpStatusCodes.BadRequest, message: error.message, error: error.name });
|
||||
.send({ statusCode: HttpStatusCodes.BadRequest, message: error.message, error: error.name });
|
||||
} else if (error instanceof NotFoundError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.NotFound)
|
||||
.send({ reqId: req.id, statusCode: HttpStatusCodes.NotFound, message: error.message, error: error.name });
|
||||
.send({ statusCode: HttpStatusCodes.NotFound, message: error.message, error: error.name });
|
||||
} else if (error instanceof UnauthorizedError) {
|
||||
void res.status(HttpStatusCodes.Unauthorized).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.Unauthorized,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
void res
|
||||
.status(HttpStatusCodes.Unauthorized)
|
||||
.send({ statusCode: HttpStatusCodes.Unauthorized, message: error.message, error: error.name });
|
||||
} else if (error instanceof DatabaseError || error instanceof InternalServerError) {
|
||||
void res.status(HttpStatusCodes.InternalServerError).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.InternalServerError,
|
||||
message: "Something went wrong",
|
||||
error: error.name
|
||||
});
|
||||
void res
|
||||
.status(HttpStatusCodes.InternalServerError)
|
||||
.send({ statusCode: HttpStatusCodes.InternalServerError, message: "Something went wrong", error: error.name });
|
||||
} else if (error instanceof GatewayTimeoutError) {
|
||||
void res.status(HttpStatusCodes.GatewayTimeout).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.GatewayTimeout,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
void res
|
||||
.status(HttpStatusCodes.GatewayTimeout)
|
||||
.send({ statusCode: HttpStatusCodes.GatewayTimeout, message: error.message, error: error.name });
|
||||
} else if (error instanceof ZodError) {
|
||||
void res.status(HttpStatusCodes.UnprocessableContent).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.UnprocessableContent,
|
||||
error: "ValidationFailure",
|
||||
message: error.issues
|
||||
});
|
||||
void res
|
||||
.status(HttpStatusCodes.Unauthorized)
|
||||
.send({ statusCode: HttpStatusCodes.Unauthorized, error: "ValidationFailure", message: error.issues });
|
||||
} else if (error instanceof ForbiddenError) {
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
error: "PermissionDenied",
|
||||
message: `You are not allowed to ${error.action} on ${error.subjectType}`,
|
||||
details: (error.ability as PureAbility).rulesFor(error.action as string, error.subjectType).map((el) => ({
|
||||
action: el.action,
|
||||
inverted: el.inverted,
|
||||
subject: el.subject,
|
||||
conditions: el.conditions
|
||||
}))
|
||||
message: `You are not allowed to ${error.action} on ${error.subjectType} - ${JSON.stringify(error.subject)}`
|
||||
});
|
||||
} else if (error instanceof ForbiddenRequestError) {
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof RateLimitError) {
|
||||
void res.status(HttpStatusCodes.TooManyRequests).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.TooManyRequests,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof ScimRequestError) {
|
||||
void res.status(error.status).send({
|
||||
reqId: req.id,
|
||||
schemas: error.schemas,
|
||||
status: error.status,
|
||||
detail: error.detail
|
||||
});
|
||||
} else if (error instanceof OidcAuthError) {
|
||||
void res.status(HttpStatusCodes.InternalServerError).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.InternalServerError,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
void res
|
||||
.status(HttpStatusCodes.InternalServerError)
|
||||
.send({ statusCode: HttpStatusCodes.InternalServerError, message: error.message, error: error.name });
|
||||
} else if (error instanceof jwt.JsonWebTokenError) {
|
||||
let errorMessage = error.message;
|
||||
const message = (() => {
|
||||
if (error.message === JWTErrors.JwtExpired) {
|
||||
return "Your token has expired. Please re-authenticate.";
|
||||
}
|
||||
if (error.message === JWTErrors.JwtMalformed) {
|
||||
return "The provided access token is malformed. Please use a valid token or generate a new one and try again.";
|
||||
}
|
||||
if (error.message === JWTErrors.InvalidAlgorithm) {
|
||||
return "The access token is signed with an invalid algorithm. Please provide a valid token and try again.";
|
||||
}
|
||||
|
||||
if (error.message === JWTErrors.JwtExpired) {
|
||||
errorMessage = "Your token has expired. Please re-authenticate.";
|
||||
} else if (error.message === JWTErrors.JwtMalformed) {
|
||||
errorMessage =
|
||||
"The provided access token is malformed. Please use a valid token or generate a new one and try again.";
|
||||
} else if (error.message === JWTErrors.InvalidAlgorithm) {
|
||||
errorMessage =
|
||||
"The access token is signed with an invalid algorithm. Please provide a valid token and try again.";
|
||||
}
|
||||
return error.message;
|
||||
})();
|
||||
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
error: "TokenError",
|
||||
message: errorMessage
|
||||
message
|
||||
});
|
||||
} else {
|
||||
void res.status(HttpStatusCodes.InternalServerError).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.InternalServerError,
|
||||
error: "InternalServerError",
|
||||
message: "Something went wrong"
|
||||
|
@ -19,7 +19,7 @@ export const registerSecretScannerGhApp = async (server: FastifyZodProvider) =>
|
||||
|
||||
app.on("installation", async (context) => {
|
||||
const { payload } = context;
|
||||
logger.info({ repositories: payload.repositories }, "Installed secret scanner to");
|
||||
logger.info("Installed secret scanner to:", { repositories: payload.repositories });
|
||||
});
|
||||
|
||||
app.on("push", async (context) => {
|
||||
|
@ -394,14 +394,13 @@ export const registerRoutes = async (
|
||||
permissionService
|
||||
});
|
||||
|
||||
const auditLogQueue = await auditLogQueueServiceFactory({
|
||||
const auditLogQueue = auditLogQueueServiceFactory({
|
||||
auditLogDAL,
|
||||
queueService,
|
||||
projectDAL,
|
||||
licenseService,
|
||||
auditLogStreamDAL
|
||||
});
|
||||
|
||||
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
|
||||
const auditLogStreamService = auditLogStreamServiceFactory({
|
||||
licenseService,
|
||||
|
@ -30,39 +30,26 @@ export const integrationAuthPubSchema = IntegrationAuthsSchema.pick({
|
||||
|
||||
export const DefaultResponseErrorsSchema = {
|
||||
400: z.object({
|
||||
reqId: z.string(),
|
||||
statusCode: z.literal(400),
|
||||
message: z.string(),
|
||||
error: z.string()
|
||||
}),
|
||||
404: z.object({
|
||||
reqId: z.string(),
|
||||
statusCode: z.literal(404),
|
||||
message: z.string(),
|
||||
error: z.string()
|
||||
}),
|
||||
401: z.object({
|
||||
reqId: z.string(),
|
||||
statusCode: z.literal(401),
|
||||
message: z.string(),
|
||||
error: z.string()
|
||||
}),
|
||||
403: z.object({
|
||||
reqId: z.string(),
|
||||
statusCode: z.literal(403),
|
||||
message: z.string(),
|
||||
details: z.any().optional(),
|
||||
error: z.string()
|
||||
}),
|
||||
// Zod errors return a message of varying shapes and sizes, so z.any() is used here
|
||||
422: z.object({
|
||||
reqId: z.string(),
|
||||
statusCode: z.literal(422),
|
||||
message: z.any(),
|
||||
error: z.string()
|
||||
}),
|
||||
403: z.object({
|
||||
statusCode: z.literal(403),
|
||||
message: z.string(),
|
||||
error: z.string()
|
||||
}),
|
||||
500: z.object({
|
||||
reqId: z.string(),
|
||||
statusCode: z.literal(500),
|
||||
message: z.string(),
|
||||
error: z.string()
|
||||
@ -219,7 +206,6 @@ export const SanitizedAuditLogStreamSchema = z.object({
|
||||
export const SanitizedProjectSchema = ProjectsSchema.pick({
|
||||
id: true,
|
||||
name: true,
|
||||
description: true,
|
||||
slug: true,
|
||||
autoCapitalization: true,
|
||||
orgId: true,
|
||||
|
@ -5,7 +5,6 @@ import { INTEGRATION_AUTH } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { OctopusDeployScope } from "@app/services/integration-auth/integration-auth-types";
|
||||
|
||||
import { integrationAuthPubSchema } from "../sanitizedSchemas";
|
||||
|
||||
@ -1009,118 +1008,4 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider)
|
||||
return { buildConfigs };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:integrationAuthId/octopus-deploy/scope-values",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
integrationAuthId: z.string().trim()
|
||||
}),
|
||||
querystring: z.object({
|
||||
scope: z.nativeEnum(OctopusDeployScope),
|
||||
spaceId: z.string().trim(),
|
||||
resourceId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Environments: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array(),
|
||||
Machines: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array(),
|
||||
Actions: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array(),
|
||||
Roles: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array(),
|
||||
Channels: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array(),
|
||||
TenantTags: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array(),
|
||||
Processes: z
|
||||
.object({
|
||||
ProcessType: z.string(),
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const scopeValues = await server.services.integrationAuth.getOctopusDeployScopeValues({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.integrationAuthId,
|
||||
scope: req.query.scope,
|
||||
spaceId: req.query.spaceId,
|
||||
resourceId: req.query.resourceId
|
||||
});
|
||||
return scopeValues;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:integrationAuthId/octopus-deploy/spaces",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
integrationAuthId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
spaces: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string(),
|
||||
IsDefault: z.boolean()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const spaces = await server.services.integrationAuth.getOctopusDeploySpaces({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.integrationAuthId
|
||||
});
|
||||
return { spaces };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -9,7 +9,6 @@ import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { IntegrationMetadataSchema } from "@app/services/integration/integration-schema";
|
||||
import { Integrations } from "@app/services/integration-auth/integration-list";
|
||||
import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
import {} from "../sanitizedSchemas";
|
||||
@ -207,33 +206,6 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
id: req.params.integrationId
|
||||
});
|
||||
|
||||
if (integration.region) {
|
||||
integration.metadata = {
|
||||
...(integration.metadata || {}),
|
||||
region: integration.region
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
integration.integration === Integrations.AWS_SECRET_MANAGER ||
|
||||
integration.integration === Integrations.AWS_PARAMETER_STORE
|
||||
) {
|
||||
const awsRoleDetails = await server.services.integration.getIntegrationAWSIamRole({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.integrationId
|
||||
});
|
||||
|
||||
if (awsRoleDetails) {
|
||||
integration.metadata = {
|
||||
...(integration.metadata || {}),
|
||||
awsIamRole: awsRoleDetails.role
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return { integration };
|
||||
}
|
||||
});
|
||||
|
@ -296,12 +296,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
.max(64, { message: "Name must be 64 or fewer characters" })
|
||||
.optional()
|
||||
.describe(PROJECTS.UPDATE.name),
|
||||
description: z
|
||||
.string()
|
||||
.trim()
|
||||
.max(256, { message: "Description must be 256 or fewer characters" })
|
||||
.optional()
|
||||
.describe(PROJECTS.UPDATE.projectDescription),
|
||||
autoCapitalization: z.boolean().optional().describe(PROJECTS.UPDATE.autoCapitalization)
|
||||
}),
|
||||
response: {
|
||||
@ -319,7 +313,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
update: {
|
||||
name: req.body.name,
|
||||
description: req.body.description,
|
||||
autoCapitalization: req.body.autoCapitalization
|
||||
},
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
|
@ -8,38 +8,21 @@
|
||||
|
||||
import { Authenticator } from "@fastify/passport";
|
||||
import fastifySession from "@fastify/session";
|
||||
import RedisStore from "connect-redis";
|
||||
import { Strategy as GitHubStrategy } from "passport-github";
|
||||
import { Strategy as GitLabStrategy } from "passport-gitlab2";
|
||||
import { Strategy as GoogleStrategy } from "passport-google-oauth20";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { fetchGithubEmails } from "@app/lib/requests/github";
|
||||
import { authRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { AuthMethod } from "@app/services/auth/auth-type";
|
||||
import { OrgAuthMethod } from "@app/services/org/org-types";
|
||||
|
||||
export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const passport = new Authenticator({ key: "sso", userProperty: "passportUser" });
|
||||
const redisStore = new RedisStore({
|
||||
client: server.redis,
|
||||
prefix: "oauth-session:",
|
||||
ttl: 600 // 10 minutes
|
||||
});
|
||||
|
||||
await server.register(fastifySession, {
|
||||
secret: appCfg.COOKIE_SECRET_SIGN_KEY,
|
||||
store: redisStore,
|
||||
cookie: {
|
||||
secure: appCfg.HTTPS_ENABLED,
|
||||
sameSite: "lax" // we want cookies to be sent to Infisical in redirects originating from IDP server
|
||||
}
|
||||
});
|
||||
await server.register(fastifySession, { secret: appCfg.COOKIE_SECRET_SIGN_KEY });
|
||||
await server.register(passport.initialize());
|
||||
await server.register(passport.secureSession());
|
||||
// passport oauth strategy for Google
|
||||
@ -52,15 +35,11 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
clientID: appCfg.CLIENT_ID_GOOGLE_LOGIN as string,
|
||||
clientSecret: appCfg.CLIENT_SECRET_GOOGLE_LOGIN as string,
|
||||
callbackURL: `${appCfg.SITE_URL}/api/v1/sso/google`,
|
||||
scope: ["profile", " email"],
|
||||
state: true
|
||||
scope: ["profile", " email"]
|
||||
},
|
||||
// eslint-disable-next-line
|
||||
async (req, _accessToken, _refreshToken, profile, cb) => {
|
||||
try {
|
||||
// @ts-expect-error this is because this is express type and not fastify
|
||||
const callbackPort = req.session.get("callbackPort");
|
||||
|
||||
const email = profile?.emails?.[0]?.value;
|
||||
if (!email)
|
||||
throw new NotFoundError({
|
||||
@ -73,7 +52,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
firstName: profile?.name?.givenName || "",
|
||||
lastName: profile?.name?.familyName || "",
|
||||
authMethod: AuthMethod.GOOGLE,
|
||||
callbackPort
|
||||
callbackPort: req.query.state as string
|
||||
});
|
||||
cb(null, { isUserCompleted, providerAuthToken });
|
||||
} catch (error) {
|
||||
@ -95,14 +74,10 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
clientID: appCfg.CLIENT_ID_GITHUB_LOGIN as string,
|
||||
clientSecret: appCfg.CLIENT_SECRET_GITHUB_LOGIN as string,
|
||||
callbackURL: `${appCfg.SITE_URL}/api/v1/sso/github`,
|
||||
scope: ["user:email"],
|
||||
// akhilmhdh: because the ts type for this is outdated by the maintainer
|
||||
state: true as unknown as string
|
||||
scope: ["user:email"]
|
||||
},
|
||||
// eslint-disable-next-line
|
||||
async (req, accessToken, _refreshToken, profile, cb) => {
|
||||
// @ts-expect-error this is because this is express type and not fastify
|
||||
const callbackPort = req.session.get("callbackPort");
|
||||
try {
|
||||
const ghEmails = await fetchGithubEmails(accessToken);
|
||||
const { email } = ghEmails.filter((gitHubEmail) => gitHubEmail.primary)[0];
|
||||
@ -111,7 +86,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
firstName: profile.displayName,
|
||||
lastName: "",
|
||||
authMethod: AuthMethod.GITHUB,
|
||||
callbackPort
|
||||
callbackPort: req.query.state as string
|
||||
});
|
||||
return cb(null, { isUserCompleted, providerAuthToken });
|
||||
} catch (error) {
|
||||
@ -135,20 +110,17 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
clientID: appCfg.CLIENT_ID_GITLAB_LOGIN,
|
||||
clientSecret: appCfg.CLIENT_SECRET_GITLAB_LOGIN,
|
||||
callbackURL: `${appCfg.SITE_URL}/api/v1/sso/gitlab`,
|
||||
baseURL: appCfg.CLIENT_GITLAB_LOGIN_URL,
|
||||
state: true
|
||||
baseURL: appCfg.CLIENT_GITLAB_LOGIN_URL
|
||||
},
|
||||
async (req: any, _accessToken: string, _refreshToken: string, profile: any, cb: any) => {
|
||||
try {
|
||||
const callbackPort = req.session.get("callbackPort");
|
||||
|
||||
const email = profile.emails[0].value;
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.login.oauth2Login({
|
||||
email,
|
||||
firstName: profile.displayName,
|
||||
lastName: "",
|
||||
authMethod: AuthMethod.GITLAB,
|
||||
callbackPort
|
||||
callbackPort: req.query.state as string
|
||||
});
|
||||
|
||||
return cb(null, { isUserCompleted, providerAuthToken });
|
||||
@ -169,24 +141,17 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
callback_port: z.string().optional()
|
||||
})
|
||||
},
|
||||
preValidation: [
|
||||
async (req, res) => {
|
||||
const { callback_port: callbackPort } = req.query;
|
||||
// ensure fresh session state per login attempt
|
||||
await req.session.regenerate();
|
||||
if (callbackPort) {
|
||||
req.session.set("callbackPort", callbackPort);
|
||||
}
|
||||
return (
|
||||
passport.authenticate("google", {
|
||||
scope: ["profile", "email"],
|
||||
authInfo: false
|
||||
// this is due to zod type difference
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
}) as any
|
||||
)(req, res);
|
||||
}
|
||||
],
|
||||
preValidation: (req, res) =>
|
||||
(
|
||||
passport.authenticate("google", {
|
||||
scope: ["profile", "email"],
|
||||
session: false,
|
||||
state: req.query.callback_port,
|
||||
authInfo: false
|
||||
// this is due to zod type difference
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
}) as any
|
||||
)(req, res),
|
||||
handler: () => {}
|
||||
});
|
||||
|
||||
@ -199,8 +164,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
authInfo: false
|
||||
// this is due to zod type difference
|
||||
}) as never,
|
||||
handler: async (req, res) => {
|
||||
await req.session.destroy();
|
||||
handler: (req, res) => {
|
||||
if (req.passportUser.isUserCompleted) {
|
||||
return res.redirect(
|
||||
`${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
|
||||
@ -220,65 +184,18 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
callback_port: z.string().optional()
|
||||
})
|
||||
},
|
||||
preValidation: [
|
||||
async (req, res) => {
|
||||
const { callback_port: callbackPort } = req.query;
|
||||
// ensure fresh session state per login attempt
|
||||
await req.session.regenerate();
|
||||
if (callbackPort) {
|
||||
req.session.set("callbackPort", callbackPort);
|
||||
}
|
||||
|
||||
return (
|
||||
passport.authenticate("github", {
|
||||
session: false,
|
||||
authInfo: false
|
||||
// this is due to zod type difference
|
||||
}) as any
|
||||
)(req, res);
|
||||
}
|
||||
],
|
||||
preValidation: (req, res) =>
|
||||
(
|
||||
passport.authenticate("github", {
|
||||
session: false,
|
||||
state: req.query.callback_port,
|
||||
authInfo: false
|
||||
// this is due to zod type difference
|
||||
}) as any
|
||||
)(req, res),
|
||||
handler: () => {}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/redirect/organizations/:orgSlug",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: authRateLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgSlug: z.string().trim()
|
||||
}),
|
||||
querystring: z.object({
|
||||
callback_port: z.string().optional()
|
||||
})
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
const org = await server.services.org.findOrgBySlug(req.params.orgSlug);
|
||||
if (org.orgAuthMethod === OrgAuthMethod.SAML) {
|
||||
return res.redirect(
|
||||
`${appCfg.SITE_URL}/api/v1/sso/redirect/saml2/organizations/${org.slug}?${
|
||||
req.query.callback_port ? `callback_port=${req.query.callback_port}` : ""
|
||||
}`
|
||||
);
|
||||
}
|
||||
|
||||
if (org.orgAuthMethod === OrgAuthMethod.OIDC) {
|
||||
return res.redirect(
|
||||
`${appCfg.SITE_URL}/api/v1/sso/oidc/login?orgSlug=${org.slug}${
|
||||
req.query.callback_port ? `&callbackPort=${req.query.callback_port}` : ""
|
||||
}`
|
||||
);
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: "The organization does not have any SSO configured."
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/github",
|
||||
method: "GET",
|
||||
@ -288,8 +205,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
authInfo: false
|
||||
// this is due to zod type difference
|
||||
}) as any,
|
||||
handler: async (req, res) => {
|
||||
await req.session.destroy();
|
||||
handler: (req, res) => {
|
||||
if (req.passportUser.isUserCompleted) {
|
||||
return res.redirect(
|
||||
`${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
|
||||
@ -309,25 +225,16 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
callback_port: z.string().optional()
|
||||
})
|
||||
},
|
||||
preValidation: [
|
||||
async (req, res) => {
|
||||
const { callback_port: callbackPort } = req.query;
|
||||
// ensure fresh session state per login attempt
|
||||
await req.session.regenerate();
|
||||
if (callbackPort) {
|
||||
req.session.set("callbackPort", callbackPort);
|
||||
}
|
||||
|
||||
return (
|
||||
passport.authenticate("gitlab", {
|
||||
session: false,
|
||||
authInfo: false
|
||||
// this is due to zod type difference
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
}) as any
|
||||
)(req, res);
|
||||
}
|
||||
],
|
||||
preValidation: (req, res) =>
|
||||
(
|
||||
passport.authenticate("gitlab", {
|
||||
session: false,
|
||||
state: req.query.callback_port,
|
||||
authInfo: false
|
||||
// this is due to zod type difference
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
}) as any
|
||||
)(req, res),
|
||||
handler: () => {}
|
||||
});
|
||||
|
||||
@ -341,8 +248,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
// this is due to zod type difference
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
}) as any,
|
||||
handler: async (req, res) => {
|
||||
await req.session.destroy();
|
||||
handler: (req, res) => {
|
||||
if (req.passportUser.isUserCompleted) {
|
||||
return res.redirect(
|
||||
`${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
|
||||
|
@ -161,7 +161,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
],
|
||||
body: z.object({
|
||||
projectName: z.string().trim().describe(PROJECTS.CREATE.projectName),
|
||||
projectDescription: z.string().trim().optional().describe(PROJECTS.CREATE.projectDescription),
|
||||
slug: z
|
||||
.string()
|
||||
.min(5)
|
||||
@ -195,7 +194,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
workspaceName: req.body.projectName,
|
||||
workspaceDescription: req.body.projectDescription,
|
||||
slug: req.body.slug,
|
||||
kmsKeyId: req.body.kmsKeyId,
|
||||
template: req.body.template
|
||||
@ -314,9 +312,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
slug: slugSchema.describe("The slug of the project to update.")
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().trim().optional().describe(PROJECTS.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECTS.UPDATE.projectDescription),
|
||||
autoCapitalization: z.boolean().optional().describe(PROJECTS.UPDATE.autoCapitalization)
|
||||
name: z.string().trim().optional().describe("The new name of the project."),
|
||||
autoCapitalization: z.boolean().optional().describe("The new auto-capitalization setting.")
|
||||
}),
|
||||
response: {
|
||||
200: SanitizedProjectSchema
|
||||
@ -333,7 +330,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
update: {
|
||||
name: req.body.name,
|
||||
description: req.body.description,
|
||||
autoCapitalization: req.body.autoCapitalization
|
||||
},
|
||||
actorId: req.permission.id,
|
||||
|
@ -119,6 +119,13 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
|
||||
if (!userAgent) throw new Error("user agent header is required");
|
||||
const appCfg = getConfig();
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
if (!serverCfg.allowSignUp) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Signup's are disabled"
|
||||
});
|
||||
}
|
||||
|
||||
const { user, accessToken, refreshToken, organizationId } =
|
||||
await server.services.signup.completeEmailAccountSignup({
|
||||
...req.body,
|
||||
|
@ -9,7 +9,7 @@ import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { generateUserSrpKeys, getUserPrivateKey } from "@app/lib/crypto/srp";
|
||||
import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { isDisposableEmail } from "@app/lib/validator";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@ -23,7 +23,6 @@ import { TOrgServiceFactory } from "../org/org-service";
|
||||
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
|
||||
import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||
import { getServerCfg } from "../super-admin/super-admin-service";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { UserEncryption } from "../user/user-types";
|
||||
import { TAuthDALFactory } from "./auth-dal";
|
||||
@ -152,8 +151,6 @@ export const authSignupServiceFactory = ({
|
||||
authorization
|
||||
}: TCompleteAccountSignupDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
const user = await userDAL.findOne({ username: email });
|
||||
if (!user || (user && user.isAccepted)) {
|
||||
throw new Error("Failed to complete account for complete user");
|
||||
@ -166,12 +163,6 @@ export const authSignupServiceFactory = ({
|
||||
authMethod = userAuthMethod;
|
||||
organizationId = orgId;
|
||||
} else {
|
||||
// disallow signup if disabled. we are not doing this for providerAuthToken because we allow signups via saml or sso
|
||||
if (!serverCfg.allowSignUp) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Signup's are disabled"
|
||||
});
|
||||
}
|
||||
validateSignUpAuthorization(authorization, user.id);
|
||||
}
|
||||
|
||||
|
@ -29,7 +29,7 @@ import {
|
||||
} from "./identity-aws-auth-types";
|
||||
|
||||
type TIdentityAwsAuthServiceFactoryDep = {
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@ -346,8 +346,6 @@ export const identityAwsAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => {
|
||||
const deletedAwsAuth = await identityAwsAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.AWS_AUTH }, tx);
|
||||
|
||||
return { ...deletedAwsAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityAwsAuth;
|
||||
|
@ -30,7 +30,7 @@ type TIdentityAzureAuthServiceFactoryDep = {
|
||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||
>;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@ -319,8 +319,6 @@ export const identityAzureAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityAzureAuth = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||
const deletedAzureAuth = await identityAzureAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.AZURE_AUTH }, tx);
|
||||
|
||||
return { ...deletedAzureAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityAzureAuth;
|
||||
|
@ -28,7 +28,7 @@ import {
|
||||
type TIdentityGcpAuthServiceFactoryDep = {
|
||||
identityGcpAuthDAL: Pick<TIdentityGcpAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@ -365,8 +365,6 @@ export const identityGcpAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityGcpAuth = await identityGcpAuthDAL.transaction(async (tx) => {
|
||||
const deletedGcpAuth = await identityGcpAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.GCP_AUTH }, tx);
|
||||
|
||||
return { ...deletedGcpAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityGcpAuth;
|
||||
|
@ -41,7 +41,7 @@ type TIdentityKubernetesAuthServiceFactoryDep = {
|
||||
TIdentityKubernetesAuthDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "delete"
|
||||
>;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
@ -120,8 +120,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
apiVersion: "authentication.k8s.io/v1",
|
||||
kind: "TokenReview",
|
||||
spec: {
|
||||
token: serviceAccountJwt,
|
||||
...(identityKubernetesAuth.allowedAudience ? { audiences: [identityKubernetesAuth.allowedAudience] } : {})
|
||||
token: serviceAccountJwt
|
||||
}
|
||||
},
|
||||
{
|
||||
@ -623,7 +622,6 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
||||
const deletedKubernetesAuth = await identityKubernetesAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.KUBERNETES_AUTH }, tx);
|
||||
return { ...deletedKubernetesAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityKubernetesAuth;
|
||||
|
@ -39,7 +39,7 @@ import {
|
||||
type TIdentityOidcAuthServiceFactoryDep = {
|
||||
identityOidcAuthDAL: TIdentityOidcAuthDALFactory;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
@ -539,8 +539,6 @@ export const identityOidcAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||
const deletedOidcAuth = await identityOidcAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.OIDC_AUTH }, tx);
|
||||
|
||||
return { ...deletedOidcAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import ms from "ms";
|
||||
|
||||
import { ProjectMembershipRole } from "@app/db/schemas";
|
||||
@ -61,12 +61,7 @@ export const identityProjectServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Identity, {
|
||||
identityId
|
||||
})
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Identity);
|
||||
|
||||
const existingIdentity = await identityProjectDAL.findOne({ identityId, projectId });
|
||||
if (existingIdentity)
|
||||
@ -166,10 +161,7 @@ export const identityProjectServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
|
||||
const projectIdentity = await identityProjectDAL.findOne({ identityId, projectId });
|
||||
if (!projectIdentity)
|
||||
@ -190,12 +182,7 @@ export const identityProjectServiceFactory = ({
|
||||
|
||||
// validate custom roles input
|
||||
const customInputRoles = roles.filter(
|
||||
({ role }) =>
|
||||
!Object.values(ProjectMembershipRole)
|
||||
// we don't want to include custom in this check;
|
||||
// this unintentionally enables setting slug to custom which is reserved
|
||||
.filter((r) => r !== ProjectMembershipRole.Custom)
|
||||
.includes(role as ProjectMembershipRole)
|
||||
({ role }) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole)
|
||||
);
|
||||
const hasCustomRole = Boolean(customInputRoles.length);
|
||||
const customRoles = hasCustomRole
|
||||
@ -261,11 +248,7 @@ export const identityProjectServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Identity);
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityId,
|
||||
@ -329,11 +312,7 @@ export const identityProjectServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
|
||||
const [identityMembership] = await identityProjectDAL.findByProjectId(projectId, { identityId });
|
||||
if (!identityMembership)
|
||||
|
@ -385,8 +385,8 @@ export const identityTokenAuthServiceFactory = ({
|
||||
actorOrgId
|
||||
}: TUpdateTokenAuthTokenDTO) => {
|
||||
const foundToken = await identityAccessTokenDAL.findOne({
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
|
||||
[`${TableName.IdentityAccessToken}.authMethod` as "authMethod"]: IdentityAuthMethod.TOKEN_AUTH
|
||||
id: tokenId,
|
||||
authMethod: IdentityAuthMethod.TOKEN_AUTH
|
||||
});
|
||||
if (!foundToken) throw new NotFoundError({ message: `Token with ID ${tokenId} not found` });
|
||||
|
||||
@ -444,8 +444,8 @@ export const identityTokenAuthServiceFactory = ({
|
||||
}: TRevokeTokenAuthTokenDTO) => {
|
||||
const identityAccessToken = await identityAccessTokenDAL.findOne({
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
|
||||
[`${TableName.IdentityAccessToken}.isAccessTokenRevoked` as "isAccessTokenRevoked"]: false,
|
||||
[`${TableName.IdentityAccessToken}.authMethod` as "authMethod"]: IdentityAuthMethod.TOKEN_AUTH
|
||||
isAccessTokenRevoked: false,
|
||||
authMethod: IdentityAuthMethod.TOKEN_AUTH
|
||||
});
|
||||
if (!identityAccessToken)
|
||||
throw new NotFoundError({
|
||||
|
@ -1,7 +1,6 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { createAppAuth } from "@octokit/auth-app";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { Client as OctopusDeployClient, ProjectRepository as OctopusDeployRepository } from "@octopusdeploy/api-client";
|
||||
|
||||
import { TIntegrationAuths } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@ -1088,33 +1087,6 @@ const getAppsAzureDevOps = async ({ accessToken, orgName }: { accessToken: strin
|
||||
return apps;
|
||||
};
|
||||
|
||||
const getAppsOctopusDeploy = async ({
|
||||
apiKey,
|
||||
instanceURL,
|
||||
spaceName = "Default"
|
||||
}: {
|
||||
apiKey: string;
|
||||
instanceURL: string;
|
||||
spaceName?: string;
|
||||
}) => {
|
||||
const client = await OctopusDeployClient.create({
|
||||
instanceURL,
|
||||
apiKey,
|
||||
userAgentApp: "Infisical Integration"
|
||||
});
|
||||
|
||||
const repository = new OctopusDeployRepository(client, spaceName);
|
||||
|
||||
const projects = await repository.list({
|
||||
take: 1000
|
||||
});
|
||||
|
||||
return projects.Items.map((project) => ({
|
||||
name: project.Name,
|
||||
appId: project.Id
|
||||
}));
|
||||
};
|
||||
|
||||
export const getApps = async ({
|
||||
integration,
|
||||
integrationAuth,
|
||||
@ -1288,13 +1260,6 @@ export const getApps = async ({
|
||||
orgName: azureDevOpsOrgName as string
|
||||
});
|
||||
|
||||
case Integrations.OCTOPUS_DEPLOY:
|
||||
return getAppsOctopusDeploy({
|
||||
apiKey: accessToken,
|
||||
instanceURL: url!,
|
||||
spaceName: workspaceSlug
|
||||
});
|
||||
|
||||
default:
|
||||
throw new NotFoundError({ message: `Integration '${integration}' not found` });
|
||||
}
|
||||
|
@ -1,7 +1,6 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { createAppAuth } from "@octokit/auth-app";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { Client as OctopusClient, SpaceRepository as OctopusSpaceRepository } from "@octopusdeploy/api-client";
|
||||
import AWS from "aws-sdk";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding, TIntegrationAuths, TIntegrationAuthsInsert } from "@app/db/schemas";
|
||||
@ -10,7 +9,7 @@ import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TGenericPermission, TProjectPermission } from "@app/lib/types";
|
||||
|
||||
import { TIntegrationDALFactory } from "../integration/integration-dal";
|
||||
@ -21,7 +20,6 @@ import { getApps } from "./integration-app-list";
|
||||
import { TIntegrationAuthDALFactory } from "./integration-auth-dal";
|
||||
import { IntegrationAuthMetadataSchema, TIntegrationAuthMetadata } from "./integration-auth-schema";
|
||||
import {
|
||||
OctopusDeployScope,
|
||||
TBitbucketEnvironment,
|
||||
TBitbucketWorkspace,
|
||||
TChecklyGroups,
|
||||
@ -40,8 +38,6 @@ import {
|
||||
TIntegrationAuthGithubOrgsDTO,
|
||||
TIntegrationAuthHerokuPipelinesDTO,
|
||||
TIntegrationAuthNorthflankSecretGroupDTO,
|
||||
TIntegrationAuthOctopusDeployProjectScopeValuesDTO,
|
||||
TIntegrationAuthOctopusDeploySpacesDTO,
|
||||
TIntegrationAuthQoveryEnvironmentsDTO,
|
||||
TIntegrationAuthQoveryOrgsDTO,
|
||||
TIntegrationAuthQoveryProjectDTO,
|
||||
@ -52,7 +48,6 @@ import {
|
||||
TIntegrationAuthVercelBranchesDTO,
|
||||
TNorthflankSecretGroup,
|
||||
TOauthExchangeDTO,
|
||||
TOctopusDeployVariableSet,
|
||||
TSaveIntegrationAccessTokenDTO,
|
||||
TTeamCityBuildConfig,
|
||||
TVercelBranches
|
||||
@ -1526,88 +1521,6 @@ export const integrationAuthServiceFactory = ({
|
||||
return integrationAuthDAL.create(newIntegrationAuth);
|
||||
};
|
||||
|
||||
const getOctopusDeploySpaces = async ({
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
id
|
||||
}: TIntegrationAuthOctopusDeploySpacesDTO) => {
|
||||
const integrationAuth = await integrationAuthDAL.findById(id);
|
||||
if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
integrationAuth.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId);
|
||||
const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey);
|
||||
|
||||
const client = await OctopusClient.create({
|
||||
apiKey: accessToken,
|
||||
instanceURL: integrationAuth.url!,
|
||||
userAgentApp: "Infisical Integration"
|
||||
});
|
||||
|
||||
const spaceRepository = new OctopusSpaceRepository(client);
|
||||
|
||||
const spaces = await spaceRepository.list({
|
||||
partialName: "", // throws error if no string is present...
|
||||
take: 1000
|
||||
});
|
||||
|
||||
return spaces.Items;
|
||||
};
|
||||
|
||||
const getOctopusDeployScopeValues = async ({
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
id,
|
||||
scope,
|
||||
spaceId,
|
||||
resourceId
|
||||
}: TIntegrationAuthOctopusDeployProjectScopeValuesDTO) => {
|
||||
const integrationAuth = await integrationAuthDAL.findById(id);
|
||||
if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
integrationAuth.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId);
|
||||
const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey);
|
||||
|
||||
let url: string;
|
||||
switch (scope) {
|
||||
case OctopusDeployScope.Project:
|
||||
url = `${integrationAuth.url}/api/${spaceId}/projects/${resourceId}/variables`;
|
||||
break;
|
||||
// future support tenant, variable set etc.
|
||||
default:
|
||||
throw new InternalServerError({ message: `Unhandled Octopus Deploy scope` });
|
||||
}
|
||||
|
||||
// SDK doesn't support variable set...
|
||||
const { data: variableSet } = await request.get<TOctopusDeployVariableSet>(url, {
|
||||
headers: {
|
||||
"X-NuGet-ApiKey": accessToken,
|
||||
Accept: "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
return variableSet.ScopeValues;
|
||||
};
|
||||
|
||||
return {
|
||||
listIntegrationAuthByProjectId,
|
||||
listOrgIntegrationAuth,
|
||||
@ -1639,8 +1552,6 @@ export const integrationAuthServiceFactory = ({
|
||||
getBitbucketWorkspaces,
|
||||
getBitbucketEnvironments,
|
||||
getIntegrationAccessToken,
|
||||
duplicateIntegrationAuth,
|
||||
getOctopusDeploySpaces,
|
||||
getOctopusDeployScopeValues
|
||||
duplicateIntegrationAuth
|
||||
};
|
||||
};
|
||||
|
@ -193,72 +193,3 @@ export type TIntegrationsWithEnvironment = TIntegrations & {
|
||||
| null
|
||||
| undefined;
|
||||
};
|
||||
|
||||
export type TIntegrationAuthOctopusDeploySpacesDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TIntegrationAuthOctopusDeployProjectScopeValuesDTO = {
|
||||
id: string;
|
||||
spaceId: string;
|
||||
resourceId: string;
|
||||
scope: OctopusDeployScope;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export enum OctopusDeployScope {
|
||||
Project = "project"
|
||||
// add tenant, variable set, etc.
|
||||
}
|
||||
|
||||
export type TOctopusDeployVariableSet = {
|
||||
Id: string;
|
||||
OwnerId: string;
|
||||
Version: number;
|
||||
Variables: {
|
||||
Id: string;
|
||||
Name: string;
|
||||
Value: string;
|
||||
Description: string;
|
||||
Scope: {
|
||||
Environment?: string[];
|
||||
Machine?: string[];
|
||||
Role?: string[];
|
||||
TargetRole?: string[];
|
||||
Action?: string[];
|
||||
User?: string[];
|
||||
Trigger?: string[];
|
||||
ParentDeployment?: string[];
|
||||
Private?: string[];
|
||||
Channel?: string[];
|
||||
TenantTag?: string[];
|
||||
Tenant?: string[];
|
||||
ProcessOwner?: string[];
|
||||
};
|
||||
IsEditable: boolean;
|
||||
Prompt: {
|
||||
Description: string;
|
||||
DisplaySettings: Record<string, string>;
|
||||
Label: string;
|
||||
Required: boolean;
|
||||
} | null;
|
||||
Type: "String";
|
||||
IsSensitive: boolean;
|
||||
}[];
|
||||
ScopeValues: {
|
||||
Environments: { Id: string; Name: string }[];
|
||||
Machines: { Id: string; Name: string }[];
|
||||
Actions: { Id: string; Name: string }[];
|
||||
Roles: { Id: string; Name: string }[];
|
||||
Channels: { Id: string; Name: string }[];
|
||||
TenantTags: { Id: string; Name: string }[];
|
||||
Processes: {
|
||||
ProcessType: string;
|
||||
Id: string;
|
||||
Name: string;
|
||||
}[];
|
||||
};
|
||||
SpaceId: string;
|
||||
Links: {
|
||||
Self: string;
|
||||
};
|
||||
};
|
||||
|
@ -34,8 +34,7 @@ export enum Integrations {
|
||||
HASURA_CLOUD = "hasura-cloud",
|
||||
RUNDECK = "rundeck",
|
||||
AZURE_DEVOPS = "azure-devops",
|
||||
AZURE_APP_CONFIGURATION = "azure-app-configuration",
|
||||
OCTOPUS_DEPLOY = "octopus-deploy"
|
||||
AZURE_APP_CONFIGURATION = "azure-app-configuration"
|
||||
}
|
||||
|
||||
export enum IntegrationType {
|
||||
@ -414,15 +413,6 @@ export const getIntegrationOptions = async () => {
|
||||
type: "pat",
|
||||
clientId: "",
|
||||
docsLink: ""
|
||||
},
|
||||
{
|
||||
name: "Octopus Deploy",
|
||||
slug: "octopus-deploy",
|
||||
image: "Octopus Deploy.png",
|
||||
isAvailable: true,
|
||||
type: "sat",
|
||||
clientId: "",
|
||||
docsLink: ""
|
||||
}
|
||||
];
|
||||
|
||||
|
@ -32,14 +32,14 @@ import { z } from "zod";
|
||||
import { SecretType, TIntegrationAuths, TIntegrations } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TCreateManySecretsRawFn, TUpdateManySecretsRawFn } from "@app/services/secret/secret-types";
|
||||
|
||||
import { TIntegrationDALFactory } from "../integration/integration-dal";
|
||||
import { IntegrationMetadataSchema } from "../integration/integration-schema";
|
||||
import { IntegrationAuthMetadataSchema } from "./integration-auth-schema";
|
||||
import { OctopusDeployScope, TIntegrationsWithEnvironment, TOctopusDeployVariableSet } from "./integration-auth-types";
|
||||
import { TIntegrationsWithEnvironment } from "./integration-auth-types";
|
||||
import {
|
||||
IntegrationInitialSyncBehavior,
|
||||
IntegrationMappingBehavior,
|
||||
@ -473,7 +473,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
id: string; // secret URI
|
||||
value: string;
|
||||
attributes: {
|
||||
enabled: boolean;
|
||||
enabled: true;
|
||||
created: number;
|
||||
updated: number;
|
||||
recoveryLevel: string;
|
||||
@ -509,19 +509,10 @@ const syncSecretsAzureKeyVault = async ({
|
||||
|
||||
const getAzureKeyVaultSecrets = await paginateAzureKeyVaultSecrets(`${integration.app}/secrets?api-version=7.3`);
|
||||
|
||||
const enabledAzureKeyVaultSecrets = getAzureKeyVaultSecrets.filter((secret) => secret.attributes.enabled);
|
||||
|
||||
// disabled keys to skip sending updates to
|
||||
const disabledAzureKeyVaultSecretKeys = getAzureKeyVaultSecrets
|
||||
.filter(({ attributes }) => !attributes.enabled)
|
||||
.map((getAzureKeyVaultSecret) => {
|
||||
return getAzureKeyVaultSecret.id.substring(getAzureKeyVaultSecret.id.lastIndexOf("/") + 1);
|
||||
});
|
||||
|
||||
let lastSlashIndex: number;
|
||||
const res = (
|
||||
await Promise.all(
|
||||
enabledAzureKeyVaultSecrets.map(async (getAzureKeyVaultSecret) => {
|
||||
getAzureKeyVaultSecrets.map(async (getAzureKeyVaultSecret) => {
|
||||
if (!lastSlashIndex) {
|
||||
lastSlashIndex = getAzureKeyVaultSecret.id.lastIndexOf("/");
|
||||
}
|
||||
@ -667,7 +658,6 @@ const syncSecretsAzureKeyVault = async ({
|
||||
}) => {
|
||||
let isSecretSet = false;
|
||||
let maxTries = 6;
|
||||
if (disabledAzureKeyVaultSecretKeys.includes(key)) return;
|
||||
|
||||
while (!isSecretSet && maxTries > 0) {
|
||||
// try to set secret
|
||||
@ -3085,7 +3075,7 @@ const syncSecretsTerraformCloud = async ({
|
||||
}) => {
|
||||
// get secrets from Terraform Cloud
|
||||
const terraformSecrets = (
|
||||
await request.get<{ data: { attributes: { key: string; value: string; sensitive: boolean }; id: string }[] }>(
|
||||
await request.get<{ data: { attributes: { key: string; value: string }; id: string }[] }>(
|
||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars`,
|
||||
{
|
||||
headers: {
|
||||
@ -3099,7 +3089,7 @@ const syncSecretsTerraformCloud = async ({
|
||||
...obj,
|
||||
[secret.attributes.key]: secret
|
||||
}),
|
||||
{} as Record<string, { attributes: { key: string; value: string; sensitive: boolean }; id: string }>
|
||||
{} as Record<string, { attributes: { key: string; value: string }; id: string }>
|
||||
);
|
||||
|
||||
const secretsToAdd: { [key: string]: string } = {};
|
||||
@ -3180,8 +3170,7 @@ const syncSecretsTerraformCloud = async ({
|
||||
attributes: {
|
||||
key,
|
||||
value: secrets[key]?.value,
|
||||
category: integration.targetService,
|
||||
sensitive: true
|
||||
category: integration.targetService
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -3194,11 +3183,7 @@ const syncSecretsTerraformCloud = async ({
|
||||
}
|
||||
);
|
||||
// case: secret exists in Terraform Cloud
|
||||
} else if (
|
||||
// we now set secrets to sensitive in Terraform Cloud, this checks if existing secrets are not sensitive and updates them accordingly
|
||||
!terraformSecrets[key].attributes.sensitive ||
|
||||
secrets[key]?.value !== terraformSecrets[key].attributes.value
|
||||
) {
|
||||
} else if (secrets[key]?.value !== terraformSecrets[key].attributes.value) {
|
||||
// -> update secret
|
||||
await request.patch(
|
||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${terraformSecrets[key].id}`,
|
||||
@ -3208,8 +3193,7 @@ const syncSecretsTerraformCloud = async ({
|
||||
id: terraformSecrets[key].id,
|
||||
attributes: {
|
||||
...terraformSecrets[key],
|
||||
value: secrets[key]?.value,
|
||||
sensitive: true
|
||||
value: secrets[key]?.value
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -4211,61 +4195,6 @@ const syncSecretsRundeck = async ({
|
||||
}
|
||||
};
|
||||
|
||||
const syncSecretsOctopusDeploy = async ({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessToken
|
||||
}: {
|
||||
integration: TIntegrations;
|
||||
integrationAuth: TIntegrationAuths;
|
||||
secrets: Record<string, { value: string; comment?: string }>;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let url: string;
|
||||
switch (integration.scope) {
|
||||
case OctopusDeployScope.Project:
|
||||
url = `${integrationAuth.url}/api/${integration.targetEnvironmentId}/projects/${integration.appId}/variables`;
|
||||
break;
|
||||
// future support tenant, variable set, etc.
|
||||
default:
|
||||
throw new InternalServerError({ message: `Unhandled Octopus Deploy scope: ${integration.scope}` });
|
||||
}
|
||||
|
||||
// SDK doesn't support variable set...
|
||||
const { data: variableSet } = await request.get<TOctopusDeployVariableSet>(url, {
|
||||
headers: {
|
||||
"X-NuGet-ApiKey": accessToken,
|
||||
Accept: "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
await request.put(
|
||||
url,
|
||||
{
|
||||
...variableSet,
|
||||
Variables: Object.entries(secrets).map(([key, value]) => ({
|
||||
Name: key,
|
||||
Value: value.value,
|
||||
Description: value.comment ?? "",
|
||||
Scope:
|
||||
(integration.metadata as { octopusDeployScopeValues: TOctopusDeployVariableSet["ScopeValues"] })
|
||||
?.octopusDeployScopeValues ?? {},
|
||||
IsEditable: false,
|
||||
Prompt: null,
|
||||
Type: "String",
|
||||
IsSensitive: true
|
||||
}))
|
||||
} as unknown as TOctopusDeployVariableSet,
|
||||
{
|
||||
headers: {
|
||||
"X-NuGet-ApiKey": accessToken,
|
||||
Accept: "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to [app] in integration named [integration]
|
||||
*
|
||||
@ -4578,14 +4507,6 @@ export const syncIntegrationSecrets = async ({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case Integrations.OCTOPUS_DEPLOY:
|
||||
await syncSecretsOctopusDeploy({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new BadRequestError({ message: "Invalid integration" });
|
||||
}
|
||||
|
@ -46,18 +46,5 @@ export const IntegrationMetadataSchema = z.object({
|
||||
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete),
|
||||
shouldEnableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldEnableDelete),
|
||||
shouldMaskSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldMaskSecrets),
|
||||
shouldProtectSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldProtectSecrets),
|
||||
|
||||
octopusDeployScopeValues: z
|
||||
.object({
|
||||
// in Octopus Deploy Scope Value Format
|
||||
Environment: z.string().array().optional(),
|
||||
Action: z.string().array().optional(),
|
||||
Channel: z.string().array().optional(),
|
||||
Machine: z.string().array().optional(),
|
||||
ProcessOwner: z.string().array().optional(),
|
||||
Role: z.string().array().optional()
|
||||
})
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.octopusDeployScopeValues)
|
||||
shouldProtectSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldProtectSecrets)
|
||||
});
|
||||
|
@ -9,7 +9,6 @@ import { TIntegrationAuthDALFactory } from "../integration-auth/integration-auth
|
||||
import { TIntegrationAuthServiceFactory } from "../integration-auth/integration-auth-service";
|
||||
import { deleteIntegrationSecrets } from "../integration-auth/integration-delete-secret";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { KmsDataKey } from "../kms/kms-types";
|
||||
import { TProjectBotServiceFactory } from "../project-bot/project-bot-service";
|
||||
import { TSecretDALFactory } from "../secret/secret-dal";
|
||||
import { TSecretQueueFactory } from "../secret/secret-queue";
|
||||
@ -238,46 +237,6 @@ export const integrationServiceFactory = ({
|
||||
return { ...integration, envId: integration.environment.id };
|
||||
};
|
||||
|
||||
const getIntegrationAWSIamRole = async ({ id, actor, actorAuthMethod, actorId, actorOrgId }: TGetIntegrationDTO) => {
|
||||
const integration = await integrationDAL.findById(id);
|
||||
|
||||
if (!integration) {
|
||||
throw new NotFoundError({
|
||||
message: `Integration with ID '${id}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
integration?.projectId || "",
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
|
||||
const integrationAuth = await integrationAuthDAL.findById(integration.integrationAuthId);
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: integration.projectId
|
||||
});
|
||||
let awsIamRole: string | null = null;
|
||||
if (integrationAuth.encryptedAwsAssumeIamRoleArn) {
|
||||
const awsAssumeRoleArn = secretManagerDecryptor({
|
||||
cipherTextBlob: Buffer.from(integrationAuth.encryptedAwsAssumeIamRoleArn)
|
||||
}).toString();
|
||||
if (awsAssumeRoleArn) {
|
||||
const [, role] = awsAssumeRoleArn.split(":role/");
|
||||
awsIamRole = role;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
role: awsIamRole
|
||||
};
|
||||
};
|
||||
|
||||
const deleteIntegration = async ({
|
||||
actorId,
|
||||
id,
|
||||
@ -370,7 +329,6 @@ export const integrationServiceFactory = ({
|
||||
deleteIntegration,
|
||||
listIntegrationByProject,
|
||||
getIntegration,
|
||||
getIntegrationAWSIamRole,
|
||||
syncIntegration
|
||||
};
|
||||
};
|
||||
|
@ -4,10 +4,8 @@ import { z } from "zod";
|
||||
|
||||
import { KmsKeysSchema, TKmsRootConfig } from "@app/db/schemas";
|
||||
import { AwsKmsProviderFactory } from "@app/ee/services/external-kms/providers/aws-kms";
|
||||
import { GcpKmsProviderFactory } from "@app/ee/services/external-kms/providers/gcp-kms";
|
||||
import {
|
||||
ExternalKmsAwsSchema,
|
||||
ExternalKmsGcpSchema,
|
||||
KmsProviders,
|
||||
TExternalKmsProviderFns
|
||||
} from "@app/ee/services/external-kms/providers/model";
|
||||
@ -293,16 +291,6 @@ export const kmsServiceFactory = ({
|
||||
});
|
||||
break;
|
||||
}
|
||||
case KmsProviders.Gcp: {
|
||||
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
||||
);
|
||||
|
||||
externalKms = await GcpKmsProviderFactory({
|
||||
inputs: decryptedProviderInput
|
||||
});
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid KMS provider.");
|
||||
}
|
||||
@ -365,16 +353,6 @@ export const kmsServiceFactory = ({
|
||||
});
|
||||
break;
|
||||
}
|
||||
case KmsProviders.Gcp: {
|
||||
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
||||
);
|
||||
|
||||
externalKms = await GcpKmsProviderFactory({
|
||||
inputs: decryptedProviderInput
|
||||
});
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid KMS provider.");
|
||||
}
|
||||
|
@ -14,8 +14,6 @@ import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt, withTransaction } from "@app/lib/knex";
|
||||
import { generateKnexQueryFromScim } from "@app/lib/knex/scim";
|
||||
|
||||
import { OrgAuthMethod } from "./org-types";
|
||||
|
||||
export type TOrgDALFactory = ReturnType<typeof orgDALFactory>;
|
||||
|
||||
export const orgDALFactory = (db: TDbClient) => {
|
||||
@ -23,78 +21,13 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findOrgById = async (orgId: string) => {
|
||||
try {
|
||||
const org = (await db
|
||||
.replicaNode()(TableName.Organization)
|
||||
.where({ [`${TableName.Organization}.id` as "id"]: orgId })
|
||||
.leftJoin(TableName.SamlConfig, (qb) => {
|
||||
qb.on(`${TableName.SamlConfig}.orgId`, "=", `${TableName.Organization}.id`).andOn(
|
||||
`${TableName.SamlConfig}.isActive`,
|
||||
"=",
|
||||
db.raw("true")
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.OidcConfig, (qb) => {
|
||||
qb.on(`${TableName.OidcConfig}.orgId`, "=", `${TableName.Organization}.id`).andOn(
|
||||
`${TableName.OidcConfig}.isActive`,
|
||||
"=",
|
||||
db.raw("true")
|
||||
);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.Organization))
|
||||
.select(
|
||||
db.raw(`
|
||||
CASE
|
||||
WHEN ${TableName.SamlConfig}."orgId" IS NOT NULL THEN '${OrgAuthMethod.SAML}'
|
||||
WHEN ${TableName.OidcConfig}."orgId" IS NOT NULL THEN '${OrgAuthMethod.OIDC}'
|
||||
ELSE ''
|
||||
END as "orgAuthMethod"
|
||||
`)
|
||||
)
|
||||
.first()) as TOrganizations & { orgAuthMethod?: string };
|
||||
|
||||
const org = await db.replicaNode()(TableName.Organization).where({ id: orgId }).first();
|
||||
return org;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find org by id" });
|
||||
}
|
||||
};
|
||||
|
||||
const findOrgBySlug = async (orgSlug: string) => {
|
||||
try {
|
||||
const org = (await db
|
||||
.replicaNode()(TableName.Organization)
|
||||
.where({ [`${TableName.Organization}.slug` as "slug"]: orgSlug })
|
||||
.leftJoin(TableName.SamlConfig, (qb) => {
|
||||
qb.on(`${TableName.SamlConfig}.orgId`, "=", `${TableName.Organization}.id`).andOn(
|
||||
`${TableName.SamlConfig}.isActive`,
|
||||
"=",
|
||||
db.raw("true")
|
||||
);
|
||||
})
|
||||
.leftJoin(TableName.OidcConfig, (qb) => {
|
||||
qb.on(`${TableName.OidcConfig}.orgId`, "=", `${TableName.Organization}.id`).andOn(
|
||||
`${TableName.OidcConfig}.isActive`,
|
||||
"=",
|
||||
db.raw("true")
|
||||
);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.Organization))
|
||||
.select(
|
||||
db.raw(`
|
||||
CASE
|
||||
WHEN ${TableName.SamlConfig}."orgId" IS NOT NULL THEN '${OrgAuthMethod.SAML}'
|
||||
WHEN ${TableName.OidcConfig}."orgId" IS NOT NULL THEN '${OrgAuthMethod.OIDC}'
|
||||
ELSE ''
|
||||
END as "orgAuthMethod"
|
||||
`)
|
||||
)
|
||||
.first()) as TOrganizations & { orgAuthMethod?: string };
|
||||
|
||||
return org;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find org by slug" });
|
||||
}
|
||||
};
|
||||
|
||||
// special query
|
||||
const findAllOrgsByUserId = async (userId: string): Promise<(TOrganizations & { orgAuthMethod: string })[]> => {
|
||||
try {
|
||||
@ -465,7 +398,6 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
findAllOrgMembers,
|
||||
countAllOrgMembers,
|
||||
findOrgById,
|
||||
findOrgBySlug,
|
||||
findAllOrgsByUserId,
|
||||
ghostUserExists,
|
||||
findOrgMembersByUsername,
|
||||
|
@ -187,15 +187,6 @@ export const orgServiceFactory = ({
|
||||
return members;
|
||||
};
|
||||
|
||||
const findOrgBySlug = async (slug: string) => {
|
||||
const org = await orgDAL.findOrgBySlug(slug);
|
||||
if (!org) {
|
||||
throw new NotFoundError({ message: `Organization with slug '${slug}' not found` });
|
||||
}
|
||||
|
||||
return org;
|
||||
};
|
||||
|
||||
const findAllWorkspaces = async ({ actor, actorId, orgId }: TFindAllWorkspacesDTO) => {
|
||||
const organizationWorkspaceIds = new Set((await projectDAL.find({ orgId })).map((workspace) => workspace.id));
|
||||
|
||||
@ -284,7 +275,6 @@ export const orgServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
const currentOrg = await orgDAL.findOrgById(actorOrgId);
|
||||
|
||||
if (enforceMfa !== undefined) {
|
||||
if (!plan.enforceMfa) {
|
||||
@ -315,11 +305,6 @@ export const orgServiceFactory = ({
|
||||
"Failed to enable/disable SCIM provisioning due to plan restriction. Upgrade plan to enable/disable SCIM provisioning."
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Scim);
|
||||
if (scimEnabled && !currentOrg.orgAuthMethod) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot enable SCIM when neither SAML or OIDC is configured."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (authEnforced) {
|
||||
@ -1147,7 +1132,6 @@ export const orgServiceFactory = ({
|
||||
createIncidentContact,
|
||||
deleteIncidentContact,
|
||||
getOrgGroups,
|
||||
listProjectMembershipsByOrgMembershipId,
|
||||
findOrgBySlug
|
||||
listProjectMembershipsByOrgMembershipId
|
||||
};
|
||||
};
|
||||
|
@ -74,8 +74,3 @@ export type TGetOrgGroupsDTO = TOrgPermission;
|
||||
export type TListProjectMembershipsByOrgMembershipIdDTO = {
|
||||
orgMembershipId: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
export enum OrgAuthMethod {
|
||||
OIDC = "oidc",
|
||||
SAML = "saml"
|
||||
}
|
||||
|
@ -280,12 +280,7 @@ export const projectMembershipServiceFactory = ({
|
||||
|
||||
// validate custom roles input
|
||||
const customInputRoles = roles.filter(
|
||||
({ role }) =>
|
||||
!Object.values(ProjectMembershipRole)
|
||||
// we don't want to include custom in this check;
|
||||
// this unintentionally enables setting slug to custom which is reserved
|
||||
.filter((r) => r !== ProjectMembershipRole.Custom)
|
||||
.includes(role as ProjectMembershipRole)
|
||||
({ role }) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole)
|
||||
);
|
||||
const hasCustomRole = Boolean(customInputRoles.length);
|
||||
if (hasCustomRole) {
|
||||
|
@ -191,10 +191,6 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
|
||||
return project;
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new DatabaseError({ error, name: "Find all projects" });
|
||||
}
|
||||
};
|
||||
@ -244,10 +240,6 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
|
||||
return project;
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError || error instanceof UnauthorizedError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new DatabaseError({ error, name: "Find project by slug" });
|
||||
}
|
||||
};
|
||||
@ -268,7 +260,7 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
throw new BadRequestError({ message: "Invalid filter type" });
|
||||
} catch (error) {
|
||||
if (error instanceof BadRequestError || error instanceof NotFoundError || error instanceof UnauthorizedError) {
|
||||
if (error instanceof BadRequestError) {
|
||||
throw error;
|
||||
}
|
||||
throw new DatabaseError({ error, name: `Failed to find project by ${filter.type}` });
|
||||
|
@ -285,14 +285,11 @@ export const projectQueueFactory = ({
|
||||
|
||||
if (!orgMembership) {
|
||||
// This can happen. Since we don't remove project memberships and project keys when a user is removed from an org, this is a valid case.
|
||||
logger.info(
|
||||
{
|
||||
userId: key.receiverId,
|
||||
orgId: project.orgId,
|
||||
projectId: project.id
|
||||
},
|
||||
"User is not in organization"
|
||||
);
|
||||
logger.info("User is not in organization", {
|
||||
userId: key.receiverId,
|
||||
orgId: project.orgId,
|
||||
projectId: project.id
|
||||
});
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
@ -554,10 +551,10 @@ export const projectQueueFactory = ({
|
||||
.catch(() => [null]);
|
||||
|
||||
if (!project) {
|
||||
logger.error(data, "Failed to upgrade project, because no project was found");
|
||||
logger.error("Failed to upgrade project, because no project was found", data);
|
||||
} else {
|
||||
await projectDAL.setProjectUpgradeStatus(data.projectId, ProjectUpgradeStatus.Failed);
|
||||
logger.error(err, "Failed to upgrade project", {
|
||||
logger.error("Failed to upgrade project", err, {
|
||||
extra: {
|
||||
project,
|
||||
jobData: data
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
|
||||
import { ProjectMembershipRole, ProjectVersion, TProjectEnvironments } from "@app/db/schemas";
|
||||
import { OrgMembershipRole, ProjectMembershipRole, ProjectVersion, TProjectEnvironments } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
@ -9,6 +9,7 @@ import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services
|
||||
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
|
||||
import { InfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-types";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { groupBy } from "@app/lib/fn";
|
||||
@ -148,7 +149,6 @@ export const projectServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
workspaceName,
|
||||
workspaceDescription,
|
||||
slug: projectSlug,
|
||||
kmsKeyId,
|
||||
tx: trx,
|
||||
@ -206,7 +206,6 @@ export const projectServiceFactory = ({
|
||||
const project = await projectDAL.create(
|
||||
{
|
||||
name: workspaceName,
|
||||
description: workspaceDescription,
|
||||
orgId: organization.id,
|
||||
slug: projectSlug || slugify(`${workspaceName}-${alphaNumericNanoId(4)}`),
|
||||
kmsSecretManagerKeyId: kmsKeyId,
|
||||
@ -369,6 +368,20 @@ export const projectServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
// Get the role permission for the identity
|
||||
const { permission: rolePermission, role: customRole } = await permissionService.getOrgPermissionByRole(
|
||||
OrgMembershipRole.Member,
|
||||
organization.id
|
||||
);
|
||||
|
||||
// Identity has to be at least a member in order to create projects
|
||||
const hasPrivilege = isAtLeastAsPrivileged(permission, rolePermission);
|
||||
if (!hasPrivilege)
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Failed to add identity to project with more privileged role"
|
||||
});
|
||||
const isCustomRole = Boolean(customRole);
|
||||
|
||||
const identityProjectMembership = await identityProjectDAL.create(
|
||||
{
|
||||
identityId: actorId,
|
||||
@ -380,7 +393,8 @@ export const projectServiceFactory = ({
|
||||
await identityProjectMembershipRoleDAL.create(
|
||||
{
|
||||
projectMembershipId: identityProjectMembership.id,
|
||||
role: ProjectMembershipRole.Admin
|
||||
role: isCustomRole ? ProjectMembershipRole.Custom : ProjectMembershipRole.Admin,
|
||||
customRoleId: customRole?.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -482,7 +496,6 @@ export const projectServiceFactory = ({
|
||||
|
||||
const updatedProject = await projectDAL.updateById(project.id, {
|
||||
name: update.name,
|
||||
description: update.description,
|
||||
autoCapitalization: update.autoCapitalization
|
||||
});
|
||||
return updatedProject;
|
||||
|
@ -29,7 +29,6 @@ export type TCreateProjectDTO = {
|
||||
actorId: string;
|
||||
actorOrgId?: string;
|
||||
workspaceName: string;
|
||||
workspaceDescription?: string;
|
||||
slug?: string;
|
||||
kmsKeyId?: string;
|
||||
createDefaultEnvs?: boolean;
|
||||
@ -70,7 +69,6 @@ export type TUpdateProjectDTO = {
|
||||
filter: Filter;
|
||||
update: {
|
||||
name?: string;
|
||||
description?: string;
|
||||
autoCapitalization?: boolean;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user