Compare commits

..

5 Commits

Author SHA1 Message Date
Daniel Hougaard
97c96acea5 Update secret-approval-policy-service.ts 2025-07-11 00:59:28 +04:00
Daniel Hougaard
5e24015f2a requested changes 2025-07-11 00:54:28 +04:00
Daniel Hougaard
e71b136859 requested changes 2025-07-10 16:14:40 +04:00
Daniel Hougaard
7d2d69fc7d requested changes 2025-07-05 01:56:35 +04:00
Daniel Hougaard
0569c7e692 fix(approval-policies): improve policies handling 2025-07-04 03:14:43 +04:00
309 changed files with 1009 additions and 6797 deletions

View File

@@ -23,7 +23,7 @@ REDIS_URL=redis://redis:6379
# Required
SITE_URL=http://localhost:8080
# Mail/SMTP
# Mail/SMTP
SMTP_HOST=
SMTP_PORT=
SMTP_FROM_ADDRESS=
@@ -132,6 +132,3 @@ DATADOG_PROFILING_ENABLED=
DATADOG_ENV=
DATADOG_SERVICE=
DATADOG_HOSTNAME=
# kubernetes
KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN=false

View File

@@ -19,7 +19,7 @@ WORKDIR /app
# Copy dependencies
COPY --from=frontend-dependencies /app/node_modules ./node_modules
# Copy all files
# Copy all files
COPY /frontend .
ENV NODE_ENV production
@@ -32,7 +32,7 @@ ENV VITE_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@@ -134,7 +134,7 @@ RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-li
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.89 \
&& apt-get update && apt-get install -y infisical=0.41.2 \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
@@ -155,7 +155,7 @@ ENV INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
WORKDIR /
COPY --from=backend-runner /app /backend
@@ -166,9 +166,9 @@ ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
ENV NODE_OPTIONS="--max-old-space-size=1024"

View File

@@ -20,7 +20,7 @@ WORKDIR /app
# Copy dependencies
COPY --from=frontend-dependencies /app/node_modules ./node_modules
# Copy all files
# Copy all files
COPY /frontend .
ENV NODE_ENV production
@@ -33,8 +33,7 @@ ENV VITE_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NODE_OPTIONS="--max-old-space-size=8192"
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@@ -78,7 +77,6 @@ RUN npm ci --only-production
COPY /backend .
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
RUN npm i -D tsconfig-paths
ENV NODE_OPTIONS="--max-old-space-size=8192"
RUN npm run build
# Production stage
@@ -130,7 +128,7 @@ RUN apt-get update && apt-get install -y \
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.89 \
&& apt-get update && apt-get install -y infisical=0.41.2 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /
@@ -166,9 +164,9 @@ ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV NODE_OPTIONS="--max-old-space-size=1024"

View File

@@ -9,7 +9,7 @@ RUN apt-get update && apt-get install -y \
make \
g++ \
openssh-client \
openssl
openssl
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
@@ -55,10 +55,10 @@ COPY --from=build /app .
# Install Infisical CLI
RUN apt-get install -y curl bash && \
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.41.89 git
apt-get update && apt-get install -y infisical=0.41.2 git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
ENV HOST=0.0.0.0

View File

@@ -57,7 +57,7 @@ RUN mkdir -p /etc/softhsm2/tokens && \
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.41.89
apt-get install -y infisical=0.41.2
WORKDIR /app

View File

@@ -52,7 +52,7 @@ RUN apt-get install -y opensc
RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
WORKDIR /openssl-build
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
&& tar -xf openssl-3.1.2.tar.gz \
@@ -66,7 +66,7 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.41.89
apt-get install -y infisical=0.41.2
WORKDIR /app

View File

@@ -4,7 +4,6 @@ import "ts-node/register";
import dotenv from "dotenv";
import type { Knex } from "knex";
import path from "path";
import { initLogger } from "@app/lib/logger";
// Update with your config settings. .
dotenv.config({
@@ -14,8 +13,6 @@ dotenv.config({
path: path.join(__dirname, "../../../.env")
});
initLogger();
export default {
development: {
client: "postgres",

View File

@@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedEnvOverrides");
if (!hasColumn) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.binary("encryptedEnvOverrides").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedEnvOverrides");
if (hasColumn) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("encryptedEnvOverrides");
});
}
}

View File

@@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
if (hasColumn) {
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
t.datetime("lastInvitedAt").nullable().defaultTo(knex.fn.now()).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
if (hasColumn) {
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
t.datetime("lastInvitedAt").nullable().alter();
});
}
}

View File

@@ -1,46 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const MIGRATION_TIMEOUT = 30 * 60 * 1000; // 30 minutes
export async function up(knex: Knex): Promise<void> {
const result = await knex.raw("SHOW statement_timeout");
const originalTimeout = result.rows[0].statement_timeout;
try {
await knex.raw(`SET statement_timeout = ${MIGRATION_TIMEOUT}`);
// iat means IdentityAccessToken
await knex.raw(`
CREATE INDEX IF NOT EXISTS idx_iat_identity_id
ON ${TableName.IdentityAccessToken} ("identityId")
`);
await knex.raw(`
CREATE INDEX IF NOT EXISTS idx_iat_ua_client_secret_id
ON ${TableName.IdentityAccessToken} ("identityUAClientSecretId")
`);
} finally {
await knex.raw(`SET statement_timeout = '${originalTimeout}'`);
}
}
export async function down(knex: Knex): Promise<void> {
const result = await knex.raw("SHOW statement_timeout");
const originalTimeout = result.rows[0].statement_timeout;
try {
await knex.raw(`SET statement_timeout = ${MIGRATION_TIMEOUT}`);
await knex.raw(`
DROP INDEX IF EXISTS idx_iat_identity_id
`);
await knex.raw(`
DROP INDEX IF EXISTS idx_iat_ua_client_secret_id
`);
} finally {
await knex.raw(`SET statement_timeout = '${originalTimeout}'`);
}
}

View File

@@ -0,0 +1,55 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const existingSecretApprovalPolicies = await knex(TableName.SecretApprovalPolicy)
.whereNull("secretPath")
.orWhere("secretPath", "");
const existingAccessApprovalPolicies = await knex(TableName.AccessApprovalPolicy)
.whereNull("secretPath")
.orWhere("secretPath", "");
// update all the secret approval policies secretPath to be "/**"
if (existingSecretApprovalPolicies.length) {
await knex(TableName.SecretApprovalPolicy)
.whereIn(
"id",
existingSecretApprovalPolicies.map((el) => el.id)
)
.update({
secretPath: "/**"
});
}
// update all the access approval policies secretPath to be "/**"
if (existingAccessApprovalPolicies.length) {
await knex(TableName.AccessApprovalPolicy)
.whereIn(
"id",
existingAccessApprovalPolicies.map((el) => el.id)
)
.update({
secretPath: "/**"
});
}
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (table) => {
table.string("secretPath").notNullable().alter();
});
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (table) => {
table.string("secretPath").notNullable().alter();
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (table) => {
table.string("secretPath").nullable().alter();
});
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (table) => {
table.string("secretPath").nullable().alter();
});
}

View File

@@ -11,7 +11,7 @@ export const AccessApprovalPoliciesSchema = z.object({
id: z.string().uuid(),
name: z.string(),
approvals: z.number().default(1),
secretPath: z.string().nullable().optional(),
secretPath: z.string(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),

View File

@@ -10,7 +10,7 @@ import { TImmutableDBKeys } from "./models";
export const SecretApprovalPoliciesSchema = z.object({
id: z.string().uuid(),
name: z.string(),
secretPath: z.string().nullable().optional(),
secretPath: z.string(),
approvals: z.number().default(1),
envId: z.string().uuid(),
createdAt: z.date(),

View File

@@ -34,8 +34,7 @@ export const SuperAdminSchema = z.object({
encryptedGitHubAppConnectionClientSecret: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionSlug: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionId: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionPrivateKey: zodBuffer.nullable().optional(),
encryptedEnvOverrides: zodBuffer.nullable().optional()
encryptedGitHubAppConnectionPrivateKey: zodBuffer.nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

View File

@@ -2,6 +2,7 @@ import { nanoid } from "nanoid";
import { z } from "zod";
import { ApproverType, BypasserType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { removeTrailingSlash } from "@app/lib/fn";
import { EnforcementLevel } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@@ -19,7 +20,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
body: z.object({
projectSlug: z.string().trim(),
name: z.string().optional(),
secretPath: z.string().trim().default("/"),
secretPath: z.string().trim().min(1, { message: "Secret path cannot be empty" }).transform(removeTrailingSlash),
environment: z.string(),
approvers: z
.discriminatedUnion("type", [
@@ -174,8 +175,9 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
secretPath: z
.string()
.trim()
.min(1, { message: "Secret path cannot be empty" })
.optional()
.transform((val) => (val === "" ? "/" : val)),
.transform((val) => (val ? removeTrailingSlash(val) : val)),
approvers: z
.discriminatedUnion("type", [
z.object({

View File

@@ -17,7 +17,6 @@ import { z } from "zod";
import { LdapGroupMapsSchema } from "@app/db/schemas";
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
import { ApiDocsTags, LdapSso } from "@app/lib/api-docs";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
@@ -133,18 +132,10 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapSso],
description: "Get LDAP config",
security: [
{
bearerAuth: []
}
],
querystring: z.object({
organizationId: z.string().trim().describe(LdapSso.GET_CONFIG.organizationId)
organizationId: z.string().trim()
}),
response: {
200: z.object({
@@ -181,32 +172,23 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapSso],
description: "Create LDAP config",
security: [
{
bearerAuth: []
}
],
body: z.object({
organizationId: z.string().trim().describe(LdapSso.CREATE_CONFIG.organizationId),
isActive: z.boolean().describe(LdapSso.CREATE_CONFIG.isActive),
url: z.string().trim().describe(LdapSso.CREATE_CONFIG.url),
bindDN: z.string().trim().describe(LdapSso.CREATE_CONFIG.bindDN),
bindPass: z.string().trim().describe(LdapSso.CREATE_CONFIG.bindPass),
uniqueUserAttribute: z.string().trim().default("uidNumber").describe(LdapSso.CREATE_CONFIG.uniqueUserAttribute),
searchBase: z.string().trim().describe(LdapSso.CREATE_CONFIG.searchBase),
searchFilter: z.string().trim().default("(uid={{username}})").describe(LdapSso.CREATE_CONFIG.searchFilter),
groupSearchBase: z.string().trim().describe(LdapSso.CREATE_CONFIG.groupSearchBase),
organizationId: z.string().trim(),
isActive: z.boolean(),
url: z.string().trim(),
bindDN: z.string().trim(),
bindPass: z.string().trim(),
uniqueUserAttribute: z.string().trim().default("uidNumber"),
searchBase: z.string().trim(),
searchFilter: z.string().trim().default("(uid={{username}})"),
groupSearchBase: z.string().trim(),
groupSearchFilter: z
.string()
.trim()
.default("(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))")
.describe(LdapSso.CREATE_CONFIG.groupSearchFilter),
caCert: z.string().trim().default("").describe(LdapSso.CREATE_CONFIG.caCert)
.default("(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))"),
caCert: z.string().trim().default("")
}),
response: {
200: SanitizedLdapConfigSchema
@@ -232,31 +214,23 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapSso],
description: "Update LDAP config",
security: [
{
bearerAuth: []
}
],
body: z
.object({
isActive: z.boolean().describe(LdapSso.UPDATE_CONFIG.isActive),
url: z.string().trim().describe(LdapSso.UPDATE_CONFIG.url),
bindDN: z.string().trim().describe(LdapSso.UPDATE_CONFIG.bindDN),
bindPass: z.string().trim().describe(LdapSso.UPDATE_CONFIG.bindPass),
uniqueUserAttribute: z.string().trim().describe(LdapSso.UPDATE_CONFIG.uniqueUserAttribute),
searchBase: z.string().trim().describe(LdapSso.UPDATE_CONFIG.searchBase),
searchFilter: z.string().trim().describe(LdapSso.UPDATE_CONFIG.searchFilter),
groupSearchBase: z.string().trim().describe(LdapSso.UPDATE_CONFIG.groupSearchBase),
groupSearchFilter: z.string().trim().describe(LdapSso.UPDATE_CONFIG.groupSearchFilter),
caCert: z.string().trim().describe(LdapSso.UPDATE_CONFIG.caCert)
isActive: z.boolean(),
url: z.string().trim(),
bindDN: z.string().trim(),
bindPass: z.string().trim(),
uniqueUserAttribute: z.string().trim(),
searchBase: z.string().trim(),
searchFilter: z.string().trim(),
groupSearchBase: z.string().trim(),
groupSearchFilter: z.string().trim(),
caCert: z.string().trim()
})
.partial()
.merge(z.object({ organizationId: z.string().trim().describe(LdapSso.UPDATE_CONFIG.organizationId) })),
.merge(z.object({ organizationId: z.string() })),
response: {
200: SanitizedLdapConfigSchema
}

View File

@@ -13,7 +13,6 @@ import { z } from "zod";
import { OidcConfigsSchema } from "@app/db/schemas";
import { OIDCConfigurationType, OIDCJWTSignatureAlgorithm } from "@app/ee/services/oidc/oidc-config-types";
import { ApiDocsTags, OidcSSo } from "@app/lib/api-docs";
import { getConfig } from "@app/lib/config/env";
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@@ -154,18 +153,10 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
hide: false,
tags: [ApiDocsTags.OidcSso],
description: "Get OIDC config",
security: [
{
bearerAuth: []
}
],
querystring: z.object({
organizationId: z.string().trim().describe(OidcSSo.GET_CONFIG.organizationId)
orgSlug: z.string().trim()
}),
response: {
200: SanitizedOidcConfigSchema.pick({
@@ -189,8 +180,9 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
}
},
handler: async (req) => {
const { orgSlug } = req.query;
const oidc = await server.services.oidc.getOidc({
organizationId: req.query.organizationId,
orgSlug,
type: "external",
actor: req.permission.type,
actorId: req.permission.id,
@@ -208,16 +200,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
hide: false,
tags: [ApiDocsTags.OidcSso],
description: "Update OIDC config",
security: [
{
bearerAuth: []
}
],
body: z
.object({
allowedEmailDomains: z
@@ -232,26 +216,22 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
.split(",")
.map((id) => id.trim())
.join(", ");
})
.describe(OidcSSo.UPDATE_CONFIG.allowedEmailDomains),
discoveryURL: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.discoveryURL),
configurationType: z.nativeEnum(OIDCConfigurationType).describe(OidcSSo.UPDATE_CONFIG.configurationType),
issuer: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.issuer),
authorizationEndpoint: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.authorizationEndpoint),
jwksUri: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.jwksUri),
tokenEndpoint: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.tokenEndpoint),
userinfoEndpoint: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.userinfoEndpoint),
clientId: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.clientId),
clientSecret: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.clientSecret),
isActive: z.boolean().describe(OidcSSo.UPDATE_CONFIG.isActive),
manageGroupMemberships: z.boolean().optional().describe(OidcSSo.UPDATE_CONFIG.manageGroupMemberships),
jwtSignatureAlgorithm: z
.nativeEnum(OIDCJWTSignatureAlgorithm)
.optional()
.describe(OidcSSo.UPDATE_CONFIG.jwtSignatureAlgorithm)
}),
discoveryURL: z.string().trim(),
configurationType: z.nativeEnum(OIDCConfigurationType),
issuer: z.string().trim(),
authorizationEndpoint: z.string().trim(),
jwksUri: z.string().trim(),
tokenEndpoint: z.string().trim(),
userinfoEndpoint: z.string().trim(),
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean(),
manageGroupMemberships: z.boolean().optional(),
jwtSignatureAlgorithm: z.nativeEnum(OIDCJWTSignatureAlgorithm).optional()
})
.partial()
.merge(z.object({ organizationId: z.string().describe(OidcSSo.UPDATE_CONFIG.organizationId) })),
.merge(z.object({ orgSlug: z.string() })),
response: {
200: SanitizedOidcConfigSchema.pick({
id: true,
@@ -287,16 +267,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
hide: false,
tags: [ApiDocsTags.OidcSso],
description: "Create OIDC config",
security: [
{
bearerAuth: []
}
],
body: z
.object({
allowedEmailDomains: z
@@ -311,34 +283,23 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
.split(",")
.map((id) => id.trim())
.join(", ");
})
.describe(OidcSSo.CREATE_CONFIG.allowedEmailDomains),
configurationType: z.nativeEnum(OIDCConfigurationType).describe(OidcSSo.CREATE_CONFIG.configurationType),
issuer: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.issuer),
discoveryURL: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.discoveryURL),
authorizationEndpoint: z
.string()
.trim()
.optional()
.default("")
.describe(OidcSSo.CREATE_CONFIG.authorizationEndpoint),
jwksUri: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.jwksUri),
tokenEndpoint: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.tokenEndpoint),
userinfoEndpoint: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.userinfoEndpoint),
clientId: z.string().trim().describe(OidcSSo.CREATE_CONFIG.clientId),
clientSecret: z.string().trim().describe(OidcSSo.CREATE_CONFIG.clientSecret),
isActive: z.boolean().describe(OidcSSo.CREATE_CONFIG.isActive),
organizationId: z.string().trim().describe(OidcSSo.CREATE_CONFIG.organizationId),
manageGroupMemberships: z
.boolean()
.optional()
.default(false)
.describe(OidcSSo.CREATE_CONFIG.manageGroupMemberships),
}),
configurationType: z.nativeEnum(OIDCConfigurationType),
issuer: z.string().trim().optional().default(""),
discoveryURL: z.string().trim().optional().default(""),
authorizationEndpoint: z.string().trim().optional().default(""),
jwksUri: z.string().trim().optional().default(""),
tokenEndpoint: z.string().trim().optional().default(""),
userinfoEndpoint: z.string().trim().optional().default(""),
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean(),
orgSlug: z.string().trim(),
manageGroupMemberships: z.boolean().optional().default(false),
jwtSignatureAlgorithm: z
.nativeEnum(OIDCJWTSignatureAlgorithm)
.optional()
.default(OIDCJWTSignatureAlgorithm.RS256)
.describe(OidcSSo.CREATE_CONFIG.jwtSignatureAlgorithm)
})
.superRefine((data, ctx) => {
if (data.configurationType === OIDCConfigurationType.CUSTOM) {

View File

@@ -13,7 +13,6 @@ import { FastifyRequest } from "fastify";
import { z } from "zod";
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
import { ApiDocsTags, SamlSso } from "@app/lib/api-docs";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
@@ -150,8 +149,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
firstName,
lastName: lastName as string,
relayState: (req.body as { RelayState?: string }).RelayState,
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider,
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId,
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider as string,
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string,
metadata: userMetadata
});
cb(null, { isUserCompleted, providerAuthToken });
@@ -263,31 +262,25 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
hide: false,
tags: [ApiDocsTags.SamlSso],
description: "Get SAML config",
security: [
{
bearerAuth: []
}
],
querystring: z.object({
organizationId: z.string().trim().describe(SamlSso.GET_CONFIG.organizationId)
organizationId: z.string().trim()
}),
response: {
200: z.object({
id: z.string(),
organization: z.string(),
orgId: z.string(),
authProvider: z.string(),
isActive: z.boolean(),
entryPoint: z.string(),
issuer: z.string(),
cert: z.string(),
lastUsed: z.date().nullable().optional()
})
200: z
.object({
id: z.string(),
organization: z.string(),
orgId: z.string(),
authProvider: z.string(),
isActive: z.boolean(),
entryPoint: z.string(),
issuer: z.string(),
cert: z.string(),
lastUsed: z.date().nullable().optional()
})
.optional()
}
},
handler: async (req) => {
@@ -309,23 +302,15 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
hide: false,
tags: [ApiDocsTags.SamlSso],
description: "Create SAML config",
security: [
{
bearerAuth: []
}
],
body: z.object({
organizationId: z.string().trim().describe(SamlSso.CREATE_CONFIG.organizationId),
authProvider: z.nativeEnum(SamlProviders).describe(SamlSso.CREATE_CONFIG.authProvider),
isActive: z.boolean().describe(SamlSso.CREATE_CONFIG.isActive),
entryPoint: z.string().trim().describe(SamlSso.CREATE_CONFIG.entryPoint),
issuer: z.string().trim().describe(SamlSso.CREATE_CONFIG.issuer),
cert: z.string().trim().describe(SamlSso.CREATE_CONFIG.cert)
organizationId: z.string(),
authProvider: z.nativeEnum(SamlProviders),
isActive: z.boolean(),
entryPoint: z.string(),
issuer: z.string(),
cert: z.string()
}),
response: {
200: SanitizedSamlConfigSchema
@@ -356,26 +341,18 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
hide: false,
tags: [ApiDocsTags.SamlSso],
description: "Update SAML config",
security: [
{
bearerAuth: []
}
],
body: z
.object({
authProvider: z.nativeEnum(SamlProviders).describe(SamlSso.UPDATE_CONFIG.authProvider),
isActive: z.boolean().describe(SamlSso.UPDATE_CONFIG.isActive),
entryPoint: z.string().trim().describe(SamlSso.UPDATE_CONFIG.entryPoint),
issuer: z.string().trim().describe(SamlSso.UPDATE_CONFIG.issuer),
cert: z.string().trim().describe(SamlSso.UPDATE_CONFIG.cert)
authProvider: z.nativeEnum(SamlProviders),
isActive: z.boolean(),
entryPoint: z.string(),
issuer: z.string(),
cert: z.string()
})
.partial()
.merge(z.object({ organizationId: z.string().trim().describe(SamlSso.UPDATE_CONFIG.organizationId) })),
.merge(z.object({ organizationId: z.string() })),
response: {
200: SanitizedSamlConfigSchema
}

View File

@@ -23,10 +23,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
environment: z.string(),
secretPath: z
.string()
.optional()
.nullable()
.default("/")
.transform((val) => (val ? removeTrailingSlash(val) : val)),
.min(1, { message: "Secret path cannot be empty" })
.transform((val) => removeTrailingSlash(val)),
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
@@ -100,10 +98,10 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
approvals: z.number().min(1).default(1),
secretPath: z
.string()
.trim()
.min(1, { message: "Secret path cannot be empty" })
.optional()
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val))
.transform((val) => (val === "" ? "/" : val)),
.transform((val) => (val ? removeTrailingSlash(val) : undefined)),
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
allowedSelfApprovals: z.boolean().default(true)
}),

View File

@@ -141,39 +141,14 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { approval, projectId, secretMutationEvents } =
await server.services.secretApprovalRequest.mergeSecretApprovalRequest({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
approvalId: req.params.id,
bypassReason: req.body.bypassReason
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId,
event: {
type: EventType.SECRET_APPROVAL_MERGED,
metadata: {
mergedBy: req.permission.id,
secretApprovalRequestSlug: approval.slug,
secretApprovalRequestId: approval.id
}
}
const { approval } = await server.services.secretApprovalRequest.mergeSecretApprovalRequest({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
approvalId: req.params.id,
bypassReason: req.body.bypassReason
});
for await (const event of secretMutationEvents) {
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId,
event
});
}
return { approval };
}
});

View File

@@ -1,16 +0,0 @@
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
import {
BitbucketDataSourceSchema,
CreateBitbucketDataSourceSchema,
UpdateBitbucketDataSourceSchema
} from "@app/ee/services/secret-scanning-v2/bitbucket";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export const registerBitbucketSecretScanningRouter = async (server: FastifyZodProvider) =>
registerSecretScanningEndpoints({
type: SecretScanningDataSource.Bitbucket,
server,
responseSchema: BitbucketDataSourceSchema,
createSchema: CreateBitbucketDataSourceSchema,
updateSchema: UpdateBitbucketDataSourceSchema
});

View File

@@ -1,6 +1,5 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { registerBitbucketSecretScanningRouter } from "./bitbucket-secret-scanning-router";
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
export * from "./secret-scanning-v2-router";
@@ -9,6 +8,5 @@ export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
SecretScanningDataSource,
(server: FastifyZodProvider) => Promise<void>
> = {
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter,
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter
};

View File

@@ -2,7 +2,6 @@ import { z } from "zod";
import { SecretScanningConfigsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { BitbucketDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
import {
SecretScanningFindingStatus,
@@ -22,10 +21,7 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [
GitHubDataSourceListItemSchema,
BitbucketDataSourceListItemSchema
]);
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]);
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
server.route({

View File

@@ -53,7 +53,7 @@ export interface TAccessApprovalPolicyDALFactory
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
id: string;
@@ -93,7 +93,7 @@ export interface TAccessApprovalPolicyDALFactory
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
id: string;
@@ -116,7 +116,7 @@ export interface TAccessApprovalPolicyDALFactory
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
}>;
findLastValidPolicy: (
@@ -138,7 +138,7 @@ export interface TAccessApprovalPolicyDALFactory
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
}
| undefined
@@ -190,7 +190,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
}>;
deleteAccessApprovalPolicy: ({
@@ -214,7 +214,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
id: string;
@@ -252,7 +252,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
}>;
getAccessApprovalPolicyByProjectSlug: ({
@@ -286,7 +286,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
id: string;
@@ -337,7 +337,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
id: string;

View File

@@ -60,6 +60,26 @@ export const accessApprovalPolicyServiceFactory = ({
accessApprovalRequestReviewerDAL,
orgMembershipDAL
}: TAccessApprovalPolicyServiceFactoryDep): TAccessApprovalPolicyServiceFactory => {
const $policyExists = async ({
envId,
secretPath,
policyId
}: {
envId: string;
secretPath: string;
policyId?: string;
}) => {
const policy = await accessApprovalPolicyDAL
.findOne({
envId,
secretPath,
deletedAt: null
})
.catch(() => null);
return policyId ? policy && policy.id !== policyId : Boolean(policy);
};
const createAccessApprovalPolicy: TAccessApprovalPolicyServiceFactory["createAccessApprovalPolicy"] = async ({
name,
actor,
@@ -106,6 +126,12 @@ export const accessApprovalPolicyServiceFactory = ({
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
if (!env) throw new NotFoundError({ message: `Environment with slug '${environment}' not found` });
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${environment}'`
});
}
let approverUserIds = userApprovers;
if (userApproverNames.length) {
const approverUsersInDB = await userDAL.find({
@@ -279,7 +305,11 @@ export const accessApprovalPolicyServiceFactory = ({
) as { username: string; sequence?: number }[];
const accessApprovalPolicy = await accessApprovalPolicyDAL.findById(policyId);
if (!accessApprovalPolicy) throw new BadRequestError({ message: "Approval policy not found" });
if (!accessApprovalPolicy) {
throw new NotFoundError({
message: `Access approval policy with ID '${policyId}' not found`
});
}
const currentApprovals = approvals || accessApprovalPolicy.approvals;
if (
@@ -290,9 +320,18 @@ export const accessApprovalPolicyServiceFactory = ({
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
}
if (!accessApprovalPolicy) {
throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
if (
await $policyExists({
envId: accessApprovalPolicy.envId,
secretPath: secretPath || accessApprovalPolicy.secretPath,
policyId: accessApprovalPolicy.id
})
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${accessApprovalPolicy.environment.slug}'`
});
}
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,

View File

@@ -122,7 +122,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
}>;
deleteAccessApprovalPolicy: ({
@@ -146,7 +146,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
id: string;
@@ -218,7 +218,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
id: string;
@@ -269,7 +269,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
id: string;

View File

@@ -116,15 +116,6 @@ interface BaseAuthData {
userAgentType?: UserAgentType;
}
export enum SecretApprovalEvent {
Create = "create",
Update = "update",
Delete = "delete",
CreateMany = "create-many",
UpdateMany = "update-many",
DeleteMany = "delete-many"
}
export enum UserAgentType {
WEB = "web",
CLI = "cli",
@@ -1714,17 +1705,6 @@ interface SecretApprovalRequest {
committedBy: string;
secretApprovalRequestSlug: string;
secretApprovalRequestId: string;
eventType: SecretApprovalEvent;
secretKey?: string;
secretId?: string;
secrets?: {
secretKey?: string;
secretId?: string;
environment?: string;
secretPath?: string;
}[];
environment: string;
secretPath: string;
};
}

View File

@@ -21,7 +21,7 @@ import { randomUUID } from "crypto";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
@@ -81,21 +81,6 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return client;
}
if (providerInputs.method === AwsIamAuthType.IRSA) {
// Allow instances to disable automatic service account token fetching (e.g. for shared cloud)
if (!appCfg.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN) {
throw new UnauthorizedError({
message: "Failed to get AWS credentials via IRSA: KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN is not enabled."
});
}
// The SDK will automatically pick up credentials from the environment
const client = new IAMClient({
region: providerInputs.region
});
return client;
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
@@ -116,7 +101,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
.catch((err) => {
const message = (err as Error)?.message;
if (
(providerInputs.method === AwsIamAuthType.AssumeRole || providerInputs.method === AwsIamAuthType.IRSA) &&
providerInputs.method === AwsIamAuthType.AssumeRole &&
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
message.includes("Must specify userName when calling with non-User credentials")
) {

View File

@@ -28,8 +28,7 @@ export enum SqlProviders {
export enum AwsIamAuthType {
AssumeRole = "assume-role",
AccessKey = "access-key",
IRSA = "irsa"
AccessKey = "access-key"
}
export enum ElasticSearchAuthTypes {
@@ -222,16 +221,6 @@ export const DynamicSecretAwsIamSchema = z.preprocess(
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional(),
tags: ResourceMetadataSchema.optional()
}),
z.object({
method: z.literal(AwsIamAuthType.IRSA),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional(),
tags: ResourceMetadataSchema.optional()
})
])
);

View File

@@ -107,26 +107,34 @@ export const oidcConfigServiceFactory = ({
kmsService
}: TOidcConfigServiceFactoryDep) => {
const getOidc = async (dto: TGetOidcCfgDTO) => {
const oidcCfg = await oidcConfigDAL.findOne({
orgId: dto.organizationId
});
if (!oidcCfg) {
const org = await orgDAL.findOne({ slug: dto.orgSlug });
if (!org) {
throw new NotFoundError({
message: `OIDC configuration for organization with ID '${dto.organizationId}' not found`
message: `Organization with slug '${dto.orgSlug}' not found`,
name: "OrgNotFound"
});
}
if (dto.type === "external") {
const { permission } = await permissionService.getOrgPermission(
dto.actor,
dto.actorId,
dto.organizationId,
org.id,
dto.actorAuthMethod,
dto.actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
}
const oidcCfg = await oidcConfigDAL.findOne({
orgId: org.id
});
if (!oidcCfg) {
throw new NotFoundError({
message: `OIDC configuration for organization with slug '${dto.orgSlug}' not found`
});
}
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: oidcCfg.orgId
@@ -457,7 +465,7 @@ export const oidcConfigServiceFactory = ({
};
const updateOidcCfg = async ({
organizationId,
orgSlug,
allowedEmailDomains,
configurationType,
discoveryURL,
@@ -476,11 +484,13 @@ export const oidcConfigServiceFactory = ({
manageGroupMemberships,
jwtSignatureAlgorithm
}: TUpdateOidcCfgDTO) => {
const org = await orgDAL.findOne({ id: organizationId });
const org = await orgDAL.findOne({
slug: orgSlug
});
if (!org) {
throw new NotFoundError({
message: `Organization with ID '${organizationId}' not found`
message: `Organization with slug '${orgSlug}' not found`
});
}
@@ -545,7 +555,7 @@ export const oidcConfigServiceFactory = ({
};
const createOidcCfg = async ({
organizationId,
orgSlug,
allowedEmailDomains,
configurationType,
discoveryURL,
@@ -564,10 +574,12 @@ export const oidcConfigServiceFactory = ({
manageGroupMemberships,
jwtSignatureAlgorithm
}: TCreateOidcCfgDTO) => {
const org = await orgDAL.findOne({ id: organizationId });
const org = await orgDAL.findOne({
slug: orgSlug
});
if (!org) {
throw new NotFoundError({
message: `Organization with ID '${organizationId}' not found`
message: `Organization with slug '${orgSlug}' not found`
});
}
@@ -627,7 +639,7 @@ export const oidcConfigServiceFactory = ({
const oidcCfg = await getOidc({
type: "internal",
organizationId: org.id
orgSlug
});
if (!oidcCfg || !oidcCfg.isActive) {

View File

@@ -26,11 +26,11 @@ export type TOidcLoginDTO = {
export type TGetOidcCfgDTO =
| ({
type: "external";
organizationId: string;
orgSlug: string;
} & TGenericPermission)
| {
type: "internal";
organizationId: string;
orgSlug: string;
};
export type TCreateOidcCfgDTO = {
@@ -45,7 +45,7 @@ export type TCreateOidcCfgDTO = {
clientId: string;
clientSecret: string;
isActive: boolean;
organizationId: string;
orgSlug: string;
manageGroupMemberships: boolean;
jwtSignatureAlgorithm: OIDCJWTSignatureAlgorithm;
} & TGenericPermission;
@@ -62,7 +62,7 @@ export type TUpdateOidcCfgDTO = Partial<{
clientId: string;
clientSecret: string;
isActive: boolean;
organizationId: string;
orgSlug: string;
manageGroupMemberships: boolean;
jwtSignatureAlgorithm: OIDCJWTSignatureAlgorithm;
}> &

View File

@@ -148,18 +148,10 @@ export const samlConfigServiceFactory = ({
let samlConfig: TSamlConfigs | undefined;
if (dto.type === "org") {
samlConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
if (!samlConfig) {
throw new NotFoundError({
message: `SAML configuration for organization with ID '${dto.orgId}' not found`
});
}
if (!samlConfig) return;
} else if (dto.type === "orgSlug") {
const org = await orgDAL.findOne({ slug: dto.orgSlug });
if (!org) {
throw new NotFoundError({
message: `Organization with slug '${dto.orgSlug}' not found`
});
}
if (!org) return;
samlConfig = await samlConfigDAL.findOne({ orgId: org.id });
} else if (dto.type === "ssoId") {
// TODO:

View File

@@ -61,17 +61,20 @@ export type TSamlLoginDTO = {
export type TSamlConfigServiceFactory = {
createSamlCfg: (arg: TCreateSamlCfgDTO) => Promise<TSamlConfigs>;
updateSamlCfg: (arg: TUpdateSamlCfgDTO) => Promise<TSamlConfigs>;
getSaml: (arg: TGetSamlCfgDTO) => Promise<{
id: string;
organization: string;
orgId: string;
authProvider: string;
isActive: boolean;
entryPoint: string;
issuer: string;
cert: string;
lastUsed: Date | null | undefined;
}>;
getSaml: (arg: TGetSamlCfgDTO) => Promise<
| {
id: string;
organization: string;
orgId: string;
authProvider: string;
isActive: boolean;
entryPoint: string;
issuer: string;
cert: string;
lastUsed: Date | null | undefined;
}
| undefined
>;
samlLogin: (arg: TSamlLoginDTO) => Promise<{
isUserCompleted: boolean;
providerAuthToken: string;

View File

@@ -55,6 +55,26 @@ export const secretApprovalPolicyServiceFactory = ({
licenseService,
secretApprovalRequestDAL
}: TSecretApprovalPolicyServiceFactoryDep) => {
const $policyExists = async ({
envId,
secretPath,
policyId
}: {
envId: string;
secretPath: string;
policyId?: string;
}) => {
const policy = await secretApprovalPolicyDAL
.findOne({
envId,
secretPath,
deletedAt: null
})
.catch(() => null);
return policyId ? policy && policy.id !== policyId : Boolean(policy);
};
const createSecretApprovalPolicy = async ({
name,
actor,
@@ -106,10 +126,17 @@ export const secretApprovalPolicyServiceFactory = ({
}
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
if (!env)
if (!env) {
throw new NotFoundError({
message: `Environment with slug '${environment}' not found in project with ID ${projectId}`
});
}
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${environment}'`
});
}
let groupBypassers: string[] = [];
let bypasserUserIds: string[] = [];
@@ -260,6 +287,18 @@ export const secretApprovalPolicyServiceFactory = ({
});
}
if (
await $policyExists({
envId: secretApprovalPolicy.envId,
secretPath: secretPath || secretApprovalPolicy.secretPath,
policyId: secretApprovalPolicy.id
})
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${secretApprovalPolicy.environment.slug}'`
});
}
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,

View File

@@ -4,7 +4,7 @@ import { ApproverType, BypasserType } from "../access-approval-policy/access-app
export type TCreateSapDTO = {
approvals: number;
secretPath?: string | null;
secretPath: string;
environment: string;
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; username?: string })[];
bypassers?: (
@@ -20,7 +20,7 @@ export type TCreateSapDTO = {
export type TUpdateSapDTO = {
secretPolicyId: string;
approvals?: number;
secretPath?: string | null;
secretPath?: string;
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; username?: string })[];
bypassers?: (
| { type: BypasserType.Group; id: string }

View File

@@ -10,7 +10,6 @@ import {
TSecretApprovalRequestsSecretsInsert,
TSecretApprovalRequestsSecretsV2Insert
} from "@app/db/schemas";
import { Event, EventType } from "@app/ee/services/audit-log/audit-log-types";
import { getConfig } from "@app/lib/config/env";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
@@ -524,7 +523,7 @@ export const secretApprovalRequestServiceFactory = ({
});
}
const { policy, folderId, projectId, bypassers, environment } = secretApprovalRequest;
const { policy, folderId, projectId, bypassers } = secretApprovalRequest;
if (policy.deletedAt) {
throw new BadRequestError({
message: "The policy associated with this secret approval request has been deleted."
@@ -958,112 +957,7 @@ export const secretApprovalRequestServiceFactory = ({
});
}
const { created, updated, deleted } = mergeStatus.secrets;
const secretMutationEvents: Event[] = [];
if (created.length) {
if (created.length > 1) {
secretMutationEvents.push({
type: EventType.CREATE_SECRETS,
metadata: {
environment,
secretPath: folder.path,
secrets: created.map((secret) => ({
secretId: secret.id,
secretVersion: 1,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string,
// @ts-expect-error not present on v1 secrets
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
}))
}
});
} else {
const [secret] = created;
secretMutationEvents.push({
type: EventType.CREATE_SECRET,
metadata: {
environment,
secretPath: folder.path,
secretId: secret.id,
secretVersion: 1,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string,
// @ts-expect-error not present on v1 secrets
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
}
});
}
}
if (updated.length) {
if (updated.length > 1) {
secretMutationEvents.push({
type: EventType.UPDATE_SECRETS,
metadata: {
environment,
secretPath: folder.path,
secrets: updated.map((secret) => ({
secretId: secret.id,
secretVersion: secret.version,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string,
// @ts-expect-error not present on v1 secrets
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
}))
}
});
} else {
const [secret] = updated;
secretMutationEvents.push({
type: EventType.UPDATE_SECRET,
metadata: {
environment,
secretPath: folder.path,
secretId: secret.id,
secretVersion: secret.version,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string,
// @ts-expect-error not present on v1 secrets
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
}
});
}
}
if (deleted.length) {
if (deleted.length > 1) {
secretMutationEvents.push({
type: EventType.DELETE_SECRETS,
metadata: {
environment,
secretPath: folder.path,
secrets: deleted.map((secret) => ({
secretId: secret.id,
secretVersion: secret.version,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string
}))
}
});
} else {
const [secret] = deleted;
secretMutationEvents.push({
type: EventType.DELETE_SECRET,
metadata: {
environment,
secretPath: folder.path,
secretId: secret.id,
secretVersion: secret.version,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string
}
});
}
}
return { ...mergeStatus, projectId, secretMutationEvents };
return mergeStatus;
};
// function to save secret change to secret approval

View File

@@ -1,9 +0,0 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
name: "Bitbucket",
type: SecretScanningDataSource.Bitbucket,
connection: AppConnection.Bitbucket
};

View File

@@ -1,314 +0,0 @@
import { join } from "path";
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import {
SecretScanningFindingSeverity,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
cloneRepository,
convertPatchLineToFileLineNumber,
replaceNonChangesWithNewlines
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import {
TSecretScanningFactoryGetDiffScanFindingsPayload,
TSecretScanningFactoryGetDiffScanResourcePayload,
TSecretScanningFactoryGetFullScanPath,
TSecretScanningFactoryInitialize,
TSecretScanningFactoryListRawResources,
TSecretScanningFactoryPostInitialization,
TSecretScanningFactoryTeardown
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { BasicRepositoryRegex } from "@app/lib/regex";
import {
getBitbucketUser,
listBitbucketRepositories,
TBitbucketConnection
} from "@app/services/app-connection/bitbucket";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import {
TBitbucketDataSourceCredentials,
TBitbucketDataSourceInput,
TBitbucketDataSourceWithConnection,
TQueueBitbucketResourceDiffScan
} from "./bitbucket-secret-scanning-types";
export const BitbucketSecretScanningFactory = () => {
const initialize: TSecretScanningFactoryInitialize<
TBitbucketDataSourceInput,
TBitbucketConnection,
TBitbucketDataSourceCredentials
> = async ({ connection, payload }, callback) => {
const cfg = getConfig();
const { email, apiToken } = connection.credentials;
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
const { data } = await request.post<{ uuid: string }>(
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${encodeURIComponent(payload.config.workspaceSlug)}/hooks`,
{
description: "Infisical webhook for push events",
url: `${cfg.SITE_URL}/secret-scanning/webhooks/bitbucket`,
active: false,
events: ["repo:push"]
},
{
headers: {
Authorization: authHeader,
Accept: "application/json"
}
}
);
return callback({
credentials: { webhookId: data.uuid, webhookSecret: alphaNumericNanoId(64) }
});
};
const postInitialization: TSecretScanningFactoryPostInitialization<
TBitbucketDataSourceInput,
TBitbucketConnection,
TBitbucketDataSourceCredentials
> = async ({ dataSourceId, credentials, connection, payload }) => {
const { email, apiToken } = connection.credentials;
const { webhookId, webhookSecret } = credentials;
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
const cfg = getConfig();
const newWebhookUrl = `${cfg.SITE_URL}/secret-scanning/webhooks/bitbucket?dataSourceId=${dataSourceId}`;
await request.put(
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${encodeURIComponent(payload.config.workspaceSlug)}/hooks/${webhookId}`,
{
description: "Infisical webhook for push events",
url: newWebhookUrl,
active: true,
events: ["repo:push"],
secret: webhookSecret
},
{
headers: {
Authorization: authHeader,
Accept: "application/json"
}
}
);
};
const teardown: TSecretScanningFactoryTeardown<
TBitbucketDataSourceWithConnection,
TBitbucketDataSourceCredentials
> = async ({ credentials, dataSource }) => {
const {
connection: {
credentials: { email, apiToken }
},
config
} = dataSource;
const { webhookId } = credentials;
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
try {
await request.delete(
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${config.workspaceSlug}/hooks/${webhookId}`,
{
headers: {
Authorization: authHeader,
Accept: "application/json"
}
}
);
} catch (err) {
logger.error(`teardown: Bitbucket - Failed to call delete on webhook [webhookId=${webhookId}]`);
}
};
const listRawResources: TSecretScanningFactoryListRawResources<TBitbucketDataSourceWithConnection> = async (
dataSource
) => {
const {
connection,
config: { includeRepos, workspaceSlug }
} = dataSource;
const repos = await listBitbucketRepositories(connection, workspaceSlug);
const filteredRepos: typeof repos = [];
if (includeRepos.includes("*")) {
filteredRepos.push(...repos);
} else {
filteredRepos.push(...repos.filter((repo) => includeRepos.includes(repo.full_name)));
}
return filteredRepos.map(({ full_name, uuid }) => ({
name: full_name,
externalId: uuid,
type: SecretScanningResource.Repository
}));
};
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TBitbucketDataSourceWithConnection> = async ({
dataSource,
resourceName,
tempFolder
}) => {
const {
connection: {
credentials: { apiToken, email }
}
} = dataSource;
const repoPath = join(tempFolder, "repo.git");
if (!BasicRepositoryRegex.test(resourceName)) {
throw new Error("Invalid Bitbucket repository name");
}
const { username } = await getBitbucketUser({ email, apiToken });
await cloneRepository({
cloneUrl: `https://${encodeURIComponent(username)}:${apiToken}@bitbucket.org/${resourceName}.git`,
repoPath
});
return repoPath;
};
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
TQueueBitbucketResourceDiffScan["payload"]
> = ({ repository }) => {
return {
name: repository.full_name,
externalId: repository.uuid,
type: SecretScanningResource.Repository
};
};
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
TBitbucketDataSourceWithConnection,
TQueueBitbucketResourceDiffScan["payload"]
> = async ({ dataSource, payload, resourceName, configPath }) => {
const {
connection: {
credentials: { apiToken, email }
}
} = dataSource;
const { push, repository } = payload;
const allFindings: SecretMatch[] = [];
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
for (const change of push.changes) {
for (const commit of change.commits) {
// eslint-disable-next-line no-await-in-loop
const { data: diffstat } = await request.get<{
values: {
status: "added" | "modified" | "removed" | "renamed";
new?: { path: string };
old?: { path: string };
}[];
}>(`${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${repository.full_name}/diffstat/${commit.hash}`, {
headers: {
Authorization: authHeader,
Accept: "application/json"
}
});
// eslint-disable-next-line no-continue
if (!diffstat.values) continue;
for (const file of diffstat.values) {
if ((file.status === "added" || file.status === "modified") && file.new?.path) {
const filePath = file.new.path;
// eslint-disable-next-line no-await-in-loop
const { data: patch } = await request.get<string>(
`https://api.bitbucket.org/2.0/repositories/${repository.full_name}/diff/${commit.hash}`,
{
params: {
path: filePath
},
headers: {
Authorization: authHeader
},
responseType: "text"
}
);
// eslint-disable-next-line no-continue
if (!patch) continue;
// eslint-disable-next-line no-await-in-loop
const findings = await scanContentAndGetFindings(replaceNonChangesWithNewlines(`\n${patch}`), configPath);
const adjustedFindings = findings.map((finding) => {
const startLine = convertPatchLineToFileLineNumber(patch, finding.StartLine);
const endLine =
finding.StartLine === finding.EndLine
? startLine
: convertPatchLineToFileLineNumber(patch, finding.EndLine);
const startColumn = finding.StartColumn - 1; // subtract 1 for +
const endColumn = finding.EndColumn - 1; // subtract 1 for +
const authorName = commit.author.user?.display_name || commit.author.raw.split(" <")[0];
const emailMatch = commit.author.raw.match(/<(.*)>/);
const authorEmail = emailMatch?.[1] ?? "";
return {
...finding,
StartLine: startLine,
EndLine: endLine,
StartColumn: startColumn,
EndColumn: endColumn,
File: filePath,
Commit: commit.hash,
Author: authorName,
Email: authorEmail,
Message: commit.message,
Fingerprint: `${commit.hash}:${filePath}:${finding.RuleID}:${startLine}:${startColumn}`,
Date: commit.date,
Link: `https://bitbucket.org/${resourceName}/src/${commit.hash}/${filePath}#lines-${startLine}`
};
});
allFindings.push(...adjustedFindings);
}
}
}
}
return allFindings.map(
({
// discard match and secret as we don't want to store
Match,
Secret,
...finding
}) => ({
details: titleCaseToCamelCase(finding),
fingerprint: finding.Fingerprint,
severity: SecretScanningFindingSeverity.High,
rule: finding.RuleID
})
);
};
return {
initialize,
postInitialization,
listRawResources,
getFullScanPath,
getDiffScanResourcePayload,
getDiffScanFindingsPayload,
teardown
};
};

View File

@@ -1,97 +0,0 @@
import { z } from "zod";
import {
SecretScanningDataSource,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
BaseCreateSecretScanningDataSourceSchema,
BaseSecretScanningDataSourceSchema,
BaseSecretScanningFindingSchema,
BaseUpdateSecretScanningDataSourceSchema,
GitRepositoryScanFindingDetailsSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
import { SecretScanningDataSources } from "@app/lib/api-docs";
import { BasicRepositoryRegex } from "@app/lib/regex";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const BitbucketDataSourceConfigSchema = z.object({
workspaceSlug: z
.string()
.min(1, "Workspace slug required")
.max(128)
.describe(SecretScanningDataSources.CONFIG.BITBUCKET.workspaceSlug),
includeRepos: z
.array(
z
.string()
.min(1)
.max(256)
.refine((value) => value === "*" || BasicRepositoryRegex.test(value), "Invalid repository name format")
)
.nonempty("One or more repositories required")
.max(100, "Cannot configure more than 100 repositories")
.default(["*"])
.describe(SecretScanningDataSources.CONFIG.BITBUCKET.includeRepos)
});
export const BitbucketDataSourceSchema = BaseSecretScanningDataSourceSchema({
type: SecretScanningDataSource.Bitbucket,
isConnectionRequired: true
})
.extend({
config: BitbucketDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "Bitbucket"
})
);
export const CreateBitbucketDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
type: SecretScanningDataSource.Bitbucket,
isConnectionRequired: true
})
.extend({
config: BitbucketDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "Bitbucket"
})
);
export const UpdateBitbucketDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(
SecretScanningDataSource.Bitbucket
)
.extend({
config: BitbucketDataSourceConfigSchema.optional()
})
.describe(
JSON.stringify({
title: "Bitbucket"
})
);
export const BitbucketDataSourceListItemSchema = z
.object({
name: z.literal("Bitbucket"),
connection: z.literal(AppConnection.Bitbucket),
type: z.literal(SecretScanningDataSource.Bitbucket)
})
.describe(
JSON.stringify({
title: "Bitbucket"
})
);
export const BitbucketFindingSchema = BaseSecretScanningFindingSchema.extend({
resourceType: z.literal(SecretScanningResource.Repository),
dataSourceType: z.literal(SecretScanningDataSource.Bitbucket),
details: GitRepositoryScanFindingDetailsSchema
});
export const BitbucketDataSourceCredentialsSchema = z.object({
webhookId: z.string(),
webhookSecret: z.string()
});

View File

@@ -1,104 +0,0 @@
import crypto from "crypto";
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
import { logger } from "@app/lib/logger";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import {
TBitbucketDataSource,
TBitbucketDataSourceCredentials,
TBitbucketPushEvent
} from "./bitbucket-secret-scanning-types";
export const bitbucketSecretScanningService = (
secretScanningV2DAL: TSecretScanningV2DALFactory,
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const handlePushEvent = async (
payload: TBitbucketPushEvent & { dataSourceId: string; receivedSignature: string; bodyString: string }
) => {
const { push, repository, bodyString, receivedSignature } = payload;
if (!push?.changes?.length || !repository?.workspace?.uuid) {
logger.warn(
`secretScanningV2PushEvent: Bitbucket - Insufficient data [changes=${
push?.changes?.length ?? 0
}] [repository=${repository?.name}] [workspaceUuid=${repository?.workspace?.uuid}]`
);
return;
}
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
id: payload.dataSourceId,
type: SecretScanningDataSource.Bitbucket
})) as TBitbucketDataSource | undefined;
if (!dataSource) {
logger.error(
`secretScanningV2PushEvent: Bitbucket - Could not find data source [workspaceUuid=${repository.workspace.uuid}]`
);
return;
}
const {
isAutoScanEnabled,
config: { includeRepos },
encryptedCredentials,
projectId
} = dataSource;
if (!encryptedCredentials) {
logger.info(
`secretScanningV2PushEvent: Bitbucket - Could not find encrypted credentials [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
);
return;
}
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const decryptedCredentials = decryptor({ cipherTextBlob: encryptedCredentials });
const credentials = JSON.parse(decryptedCredentials.toString()) as TBitbucketDataSourceCredentials;
const hmac = crypto.createHmac("sha256", credentials.webhookSecret);
hmac.update(bodyString);
const calculatedSignature = hmac.digest("hex");
if (calculatedSignature !== receivedSignature) {
logger.error(
`secretScanningV2PushEvent: Bitbucket - Invalid signature for webhook [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
);
return;
}
if (!isAutoScanEnabled) {
logger.info(
`secretScanningV2PushEvent: Bitbucket - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
);
return;
}
if (includeRepos.includes("*") || includeRepos.includes(repository.full_name)) {
await secretScanningV2Queue.queueResourceDiffScan({
dataSourceType: SecretScanningDataSource.Bitbucket,
payload,
dataSourceId: dataSource.id
});
} else {
logger.info(
`secretScanningV2PushEvent: Bitbucket - ignoring due to repository not being present in config [workspaceUuid=${repository.workspace.uuid}] [dataSourceId=${dataSource.id}]`
);
}
};
return {
handlePushEvent
};
};

View File

@@ -1,85 +0,0 @@
import { z } from "zod";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TBitbucketConnection } from "@app/services/app-connection/bitbucket";
import {
BitbucketDataSourceCredentialsSchema,
BitbucketDataSourceListItemSchema,
BitbucketDataSourceSchema,
BitbucketFindingSchema,
CreateBitbucketDataSourceSchema
} from "./bitbucket-secret-scanning-schemas";
export type TBitbucketDataSource = z.infer<typeof BitbucketDataSourceSchema>;
export type TBitbucketDataSourceInput = z.infer<typeof CreateBitbucketDataSourceSchema>;
export type TBitbucketDataSourceListItem = z.infer<typeof BitbucketDataSourceListItemSchema>;
export type TBitbucketDataSourceCredentials = z.infer<typeof BitbucketDataSourceCredentialsSchema>;
export type TBitbucketFinding = z.infer<typeof BitbucketFindingSchema>;
export type TBitbucketDataSourceWithConnection = TBitbucketDataSource & {
connection: TBitbucketConnection;
};
export type TBitbucketPushEventRepository = {
full_name: string;
name: string;
workspace: {
slug: string;
uuid: string;
};
uuid: string;
};
export type TBitbucketPushEventCommit = {
hash: string;
message: string;
author: {
raw: string;
user?: {
display_name: string;
uuid: string;
nickname: string;
};
};
date: string;
};
export type TBitbucketPushEventChange = {
new?: {
name: string;
type: string;
};
old?: {
name: string;
type: string;
};
created: boolean;
closed: boolean;
forced: boolean;
commits: TBitbucketPushEventCommit[];
};
export type TBitbucketPushEvent = {
push: {
changes: TBitbucketPushEventChange[];
};
repository: TBitbucketPushEventRepository;
actor: {
display_name: string;
uuid: string;
nickname: string;
};
};
export type TQueueBitbucketResourceDiffScan = {
dataSourceType: SecretScanningDataSource.Bitbucket;
payload: TBitbucketPushEvent & { dataSourceId: string };
dataSourceId: string;
resourceId: string;
scanId: string;
};

View File

@@ -1,3 +0,0 @@
export * from "./bitbucket-secret-scanning-constants";
export * from "./bitbucket-secret-scanning-schemas";
export * from "./bitbucket-secret-scanning-types";

View File

@@ -19,23 +19,18 @@ import {
TSecretScanningFactoryGetFullScanPath,
TSecretScanningFactoryInitialize,
TSecretScanningFactoryListRawResources,
TSecretScanningFactoryPostInitialization,
TSecretScanningFactoryTeardown
TSecretScanningFactoryPostInitialization
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { BasicRepositoryRegex } from "@app/lib/regex";
import { GitHubRepositoryRegex } from "@app/lib/regex";
import { listGitHubRadarRepositories, TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
import {
TGitHubDataSourceInput,
TGitHubDataSourceWithConnection,
TQueueGitHubResourceDiffScan
} from "./github-secret-scanning-types";
import { TGitHubDataSourceWithConnection, TQueueGitHubResourceDiffScan } from "./github-secret-scanning-types";
export const GitHubSecretScanningFactory = () => {
const initialize: TSecretScanningFactoryInitialize<TGitHubDataSourceInput, TGitHubRadarConnection> = async (
const initialize: TSecretScanningFactoryInitialize<TGitHubRadarConnection> = async (
{ connection, secretScanningV2DAL },
callback
) => {
@@ -56,17 +51,10 @@ export const GitHubSecretScanningFactory = () => {
});
};
const postInitialization: TSecretScanningFactoryPostInitialization<
TGitHubDataSourceInput,
TGitHubRadarConnection
> = async () => {
const postInitialization: TSecretScanningFactoryPostInitialization<TGitHubRadarConnection> = async () => {
// no post-initialization required
};
const teardown: TSecretScanningFactoryTeardown<TGitHubDataSourceWithConnection> = async () => {
// no termination required
};
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
dataSource
) => {
@@ -119,7 +107,7 @@ export const GitHubSecretScanningFactory = () => {
const repoPath = join(tempFolder, "repo.git");
if (!BasicRepositoryRegex.test(resourceName)) {
if (!GitHubRepositoryRegex.test(resourceName)) {
throw new Error("Invalid GitHub repository name");
}
@@ -237,7 +225,6 @@ export const GitHubSecretScanningFactory = () => {
listRawResources,
getFullScanPath,
getDiffScanResourcePayload,
getDiffScanFindingsPayload,
teardown
getDiffScanFindingsPayload
};
};

View File

@@ -12,7 +12,7 @@ import {
GitRepositoryScanFindingDetailsSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
import { SecretScanningDataSources } from "@app/lib/api-docs";
import { BasicRepositoryRegex } from "@app/lib/regex";
import { GitHubRepositoryRegex } from "@app/lib/regex";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const GitHubDataSourceConfigSchema = z.object({
@@ -22,7 +22,7 @@ export const GitHubDataSourceConfigSchema = z.object({
.string()
.min(1)
.max(256)
.refine((value) => value === "*" || BasicRepositoryRegex.test(value), "Invalid repository name format")
.refine((value) => value === "*" || GitHubRepositoryRegex.test(value), "Invalid repository name format")
)
.nonempty("One or more repositories required")
.max(100, "Cannot configure more than 100 repositories")

View File

@@ -1,6 +1,5 @@
export enum SecretScanningDataSource {
GitHub = "github",
Bitbucket = "bitbucket"
GitHub = "github"
}
export enum SecretScanningScanStatus {

View File

@@ -1,23 +1,19 @@
import { BitbucketSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-factory";
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
import {
TQueueSecretScanningResourceDiffScan,
TSecretScanningDataSourceCredentials,
TSecretScanningDataSourceInput,
TSecretScanningDataSourceWithConnection,
TSecretScanningFactory
} from "./secret-scanning-v2-types";
type TSecretScanningFactoryImplementation = TSecretScanningFactory<
TSecretScanningDataSourceWithConnection,
TQueueSecretScanningResourceDiffScan["payload"],
TSecretScanningDataSourceInput,
TSecretScanningDataSourceCredentials
TSecretScanningDataSourceCredentials,
TQueueSecretScanningResourceDiffScan["payload"]
>;
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation,
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation
};

View File

@@ -4,7 +4,6 @@ import RE2 from "re2";
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
import { titleCaseToCamelCase } from "@app/lib/fn";
@@ -12,8 +11,7 @@ import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secre
import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListItem } from "./secret-scanning-v2-types";
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
};
export const listSecretScanningDataSourceOptions = () => {

View File

@@ -2,16 +2,13 @@ import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/se
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
[SecretScanningDataSource.GitHub]: "GitHub",
[SecretScanningDataSource.Bitbucket]: "Bitbucket"
[SecretScanningDataSource.GitHub]: "GitHub"
};
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar,
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar
};
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" },
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" }
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" }
};

View File

@@ -37,8 +37,7 @@ import {
TQueueSecretScanningDataSourceFullScan,
TQueueSecretScanningResourceDiffScan,
TQueueSecretScanningSendNotification,
TSecretScanningDataSourceWithConnection,
TSecretScanningFinding
TSecretScanningDataSourceWithConnection
} from "./secret-scanning-v2-types";
type TSecretRotationV2QueueServiceFactoryDep = {
@@ -460,16 +459,13 @@ export const secretScanningV2QueueServiceFactory = async ({
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
if (newFindings.length) {
const finding = newFindings[0] as TSecretScanningFinding;
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Completed,
resourceName: resource.name,
isDiffScan: true,
dataSource,
numberOfSecrets: newFindings.length,
scanId,
authorName: finding?.details?.author,
authorEmail: finding?.details?.email
scanId
});
}
@@ -586,8 +582,8 @@ export const secretScanningV2QueueServiceFactory = async ({
substitutions:
payload.status === SecretScanningScanStatus.Completed
? {
authorName: payload.authorName,
authorEmail: payload.authorEmail,
authorName: "Jim",
authorEmail: "jim@infisical.com",
resourceName,
numberOfSecrets: payload.numberOfSecrets,
isDiffScan: payload.isDiffScan,

View File

@@ -19,7 +19,8 @@ export const BaseSecretScanningDataSourceSchema = ({
// unique to provider
type: true,
connectionId: true,
config: true
config: true,
encryptedCredentials: true
}).extend({
type: z.literal(type),
connectionId: isConnectionRequired ? z.string().uuid() : z.null(),

View File

@@ -30,8 +30,6 @@ import {
TFindSecretScanningDataSourceByNameDTO,
TListSecretScanningDataSourcesByProjectId,
TSecretScanningDataSource,
TSecretScanningDataSourceCredentials,
TSecretScanningDataSourceInput,
TSecretScanningDataSourceWithConnection,
TSecretScanningDataSourceWithDetails,
TSecretScanningFinding,
@@ -51,7 +49,6 @@ import { TAppConnection } from "@app/services/app-connection/app-connection-type
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { bitbucketSecretScanningService } from "./bitbucket/bitbucket-secret-scanning-service";
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
@@ -259,7 +256,7 @@ export const secretScanningV2ServiceFactory = ({
try {
const createdDataSource = await factory.initialize(
{
payload: payload as TSecretScanningDataSourceInput,
payload,
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
secretScanningV2DAL
},
@@ -290,7 +287,7 @@ export const secretScanningV2ServiceFactory = ({
);
await factory.postInitialization({
payload: payload as TSecretScanningDataSourceInput,
payload,
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
dataSourceId: dataSource.id,
credentials
@@ -401,6 +398,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
projectId: dataSource.projectId
});
@@ -414,36 +412,7 @@ export const secretScanningV2ServiceFactory = ({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
let connection: TAppConnection | null = null;
if (dataSource.connection) {
connection = await decryptAppConnection(dataSource.connection, kmsService);
}
let credentials: TSecretScanningDataSourceCredentials | undefined;
if (dataSource.encryptedCredentials) {
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: dataSource.projectId
});
credentials = JSON.parse(
decryptor({
cipherTextBlob: dataSource.encryptedCredentials
}).toString()
) as TSecretScanningDataSourceCredentials;
}
await factory.teardown({
dataSource: {
...dataSource,
// @ts-expect-error currently we don't have a null connection data source
connection
},
credentials
});
// TODO: clean up webhooks
await secretScanningV2DAL.dataSources.deleteById(dataSourceId);
@@ -900,7 +869,6 @@ export const secretScanningV2ServiceFactory = ({
updateSecretScanningFindingById,
findSecretScanningConfigByProjectId,
upsertSecretScanningConfig,
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue),
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue)
};
};

View File

@@ -4,15 +4,6 @@ import {
TSecretScanningResources,
TSecretScanningScans
} from "@app/db/schemas";
import {
TBitbucketDataSource,
TBitbucketDataSourceCredentials,
TBitbucketDataSourceInput,
TBitbucketDataSourceListItem,
TBitbucketDataSourceWithConnection,
TBitbucketFinding,
TQueueBitbucketResourceDiffScan
} from "@app/ee/services/secret-scanning-v2/bitbucket";
import {
TGitHubDataSource,
TGitHubDataSourceInput,
@@ -28,7 +19,7 @@ import {
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource;
export type TSecretScanningDataSource = TGitHubDataSource;
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
lastScannedAt?: Date | null;
@@ -50,17 +41,13 @@ export type TSecretScanningScanWithDetails = TSecretScanningScans & {
resourceName: string;
};
export type TSecretScanningDataSourceWithConnection =
| TGitHubDataSourceWithConnection
| TBitbucketDataSourceWithConnection;
export type TSecretScanningDataSourceWithConnection = TGitHubDataSourceWithConnection;
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput | TBitbucketDataSourceInput;
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput;
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem | TBitbucketDataSourceListItem;
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem;
export type TSecretScanningDataSourceCredentials = TBitbucketDataSourceCredentials | undefined;
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding;
export type TSecretScanningFinding = TGitHubFinding;
export type TListSecretScanningDataSourcesByProjectId = {
projectId: string;
@@ -112,21 +99,14 @@ export type TQueueSecretScanningDataSourceFullScan = {
scanId: string;
};
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan | TQueueBitbucketResourceDiffScan;
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan;
export type TQueueSecretScanningSendNotification = {
dataSource: TSecretScanningDataSources;
resourceName: string;
} & (
| { status: SecretScanningScanStatus.Failed; errorMessage: string }
| {
status: SecretScanningScanStatus.Completed;
numberOfSecrets: number;
scanId: string;
isDiffScan: boolean;
authorName?: string;
authorEmail?: string;
}
| { status: SecretScanningScanStatus.Completed; numberOfSecrets: number; scanId: string; isDiffScan: boolean }
);
export type TCloneRepository = {
@@ -158,12 +138,11 @@ export type TSecretScanningDataSourceRaw = NonNullable<
>;
export type TSecretScanningFactoryInitialize<
P extends TSecretScanningDataSourceInput,
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
C extends TSecretScanningDataSourceCredentials = undefined
> = (
params: {
payload: P;
payload: TCreateSecretScanningDataSourceDTO;
connection: T;
secretScanningV2DAL: TSecretScanningV2DALFactory;
},
@@ -171,27 +150,24 @@ export type TSecretScanningFactoryInitialize<
) => Promise<TSecretScanningDataSourceRaw>;
export type TSecretScanningFactoryPostInitialization<
P extends TSecretScanningDataSourceInput,
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
C extends TSecretScanningDataSourceCredentials = undefined
> = (params: { payload: P; connection: T; credentials: C; dataSourceId: string }) => Promise<void>;
export type TSecretScanningFactoryTeardown<
T extends TSecretScanningDataSourceWithConnection,
C extends TSecretScanningDataSourceCredentials = undefined
> = (params: { dataSource: T; credentials: C }) => Promise<void>;
> = (params: {
payload: TCreateSecretScanningDataSourceDTO;
connection: T;
credentials: C;
dataSourceId: string;
}) => Promise<void>;
export type TSecretScanningFactory<
T extends TSecretScanningDataSourceWithConnection,
P extends TQueueSecretScanningResourceDiffScan["payload"],
I extends TSecretScanningDataSourceInput,
C extends TSecretScanningDataSourceCredentials | undefined = undefined
C extends TSecretScanningDataSourceCredentials,
P extends TQueueSecretScanningResourceDiffScan["payload"]
> = () => {
listRawResources: TSecretScanningFactoryListRawResources<T>;
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
initialize: TSecretScanningFactoryInitialize<I, T["connection"] | undefined, C>;
postInitialization: TSecretScanningFactoryPostInitialization<I, T["connection"] | undefined, C>;
teardown: TSecretScanningFactoryTeardown<T, C>;
initialize: TSecretScanningFactoryInitialize<T["connection"] | undefined, C>;
postInitialization: TSecretScanningFactoryPostInitialization<T["connection"] | undefined, C>;
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
};
@@ -209,3 +185,5 @@ export type TUpsertSecretScanningConfigDTO = {
projectId: string;
content: string | null;
};
export type TSecretScanningDataSourceCredentials = undefined;

View File

@@ -1,22 +1,7 @@
import { z } from "zod";
import { BitbucketDataSourceSchema, BitbucketFindingSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [
GitHubDataSourceSchema,
BitbucketDataSourceSchema
]);
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [GitHubDataSourceSchema]);
export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType", [
GitHubFindingSchema.describe(
JSON.stringify({
title: "GitHub"
})
),
BitbucketFindingSchema.describe(
JSON.stringify({
title: "Bitbucket"
})
)
]);
export const SecretScanningFindingSchema = z.discriminatedUnion("resourceType", [GitHubFindingSchema]);

View File

@@ -66,10 +66,7 @@ export enum ApiDocsTags {
KmsKeys = "KMS Keys",
KmsEncryption = "KMS Encryption",
KmsSigning = "KMS Signing",
SecretScanning = "Secret Scanning",
OidcSso = "OIDC SSO",
SamlSso = "SAML SSO",
LdapSso = "LDAP SSO"
SecretScanning = "Secret Scanning"
}
export const GROUPS = {
@@ -2272,16 +2269,9 @@ export const AppConnections = {
code: "The OAuth code to use to connect with GitLab.",
accessTokenType: "The type of token used to connect with GitLab."
},
BITBUCKET: {
email: "The email used to access Bitbucket.",
apiToken: "The API token used to access Bitbucket."
},
ZABBIX: {
apiToken: "The API Token used to access Zabbix.",
instanceUrl: "The Zabbix instance URL to connect with."
},
RAILWAY: {
apiToken: "The API token used to authenticate with Railway."
}
}
};
@@ -2477,14 +2467,6 @@ export const SecretSyncs = {
hostId: "The ID of the Zabbix host to sync secrets to.",
hostName: "The name of the Zabbix host to sync secrets to.",
macroType: "The type of macro to sync secrets to. (0: Text, 1: Secret)"
},
RAILWAY: {
projectId: "The ID of the Railway project to sync secrets to.",
projectName: "The name of the Railway project to sync secrets to.",
environmentId: "The Railway environment to sync secrets to.",
environmentName: "The Railway environment to sync secrets to.",
serviceId: "The Railway service that secrets should be synced to.",
serviceName: "The Railway service that secrets should be synced to."
}
}
};
@@ -2605,9 +2587,7 @@ export const SecretRotations = {
export const SecretScanningDataSources = {
LIST: (type?: SecretScanningDataSource) => ({
projectId: `The ID of the project to list ${
type ? SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type] : "Scanning"
} Data Sources from.`
projectId: `The ID of the project to list ${type ? SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type] : "Scanning"} Data Sources from.`
}),
GET_BY_ID: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`
@@ -2658,10 +2638,6 @@ export const SecretScanningDataSources = {
CONFIG: {
GITHUB: {
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
},
BITBUCKET: {
workspaceSlug: "The workspace to scan.",
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
}
}
};
@@ -2686,113 +2662,3 @@ export const SecretScanningConfigs = {
content: "The contents of the Secret Scanning Configuration file."
}
};
export const OidcSSo = {
GET_CONFIG: {
organizationId: "The ID of the organization to get the OIDC config for."
},
UPDATE_CONFIG: {
organizationId: "The ID of the organization to update the OIDC config for.",
allowedEmailDomains:
"A list of allowed email domains that users can use to authenticate with. This field is comma separated. Example: 'example.com,acme.com'",
discoveryURL: "The URL of the OIDC discovery endpoint.",
configurationType: "The configuration type to use for the OIDC configuration.",
issuer:
"The issuer for the OIDC configuration. This is only supported when the OIDC configuration type is set to 'custom'.",
authorizationEndpoint:
"The endpoint to use for OIDC authorization. This is only supported when the OIDC configuration type is set to 'custom'.",
jwksUri: "The URL of the OIDC JWKS endpoint.",
tokenEndpoint: "The token endpoint to use for OIDC token exchange.",
userinfoEndpoint: "The userinfo endpoint to get user information from the OIDC provider.",
clientId: "The client ID to use for OIDC authentication.",
clientSecret: "The client secret to use for OIDC authentication.",
isActive: "Whether to enable or disable this OIDC configuration.",
manageGroupMemberships:
"Whether to manage group memberships for the OIDC configuration. If enabled, users will automatically be assigned groups when they sign in, based on which groups they are a member of in the OIDC provider.",
jwtSignatureAlgorithm: "The algorithm to use for JWT signature verification."
},
CREATE_CONFIG: {
organizationId: "The ID of the organization to create the OIDC config for.",
allowedEmailDomains:
"A list of allowed email domains that users can use to authenticate with. This field is comma separated.",
discoveryURL: "The URL of the OIDC discovery endpoint.",
configurationType: "The configuration type to use for the OIDC configuration.",
issuer:
"The issuer for the OIDC configuration. This is only supported when the OIDC configuration type is set to 'custom'.",
authorizationEndpoint:
"The authorization endpoint to use for OIDC authorization. This is only supported when the OIDC configuration type is set to 'custom'.",
jwksUri: "The URL of the OIDC JWKS endpoint.",
tokenEndpoint: "The token endpoint to use for OIDC token exchange.",
userinfoEndpoint: "The userinfo endpoint to get user information from the OIDC provider.",
clientId: "The client ID to use for OIDC authentication.",
clientSecret: "The client secret to use for OIDC authentication.",
isActive: "Whether to enable or disable this OIDC configuration.",
manageGroupMemberships:
"Whether to manage group memberships for the OIDC configuration. If enabled, users will automatically be assigned groups when they sign in, based on which groups they are a member of in the OIDC provider.",
jwtSignatureAlgorithm: "The algorithm to use for JWT signature verification."
}
};
export const SamlSso = {
GET_CONFIG: {
organizationId: "The ID of the organization to get the SAML config for."
},
UPDATE_CONFIG: {
organizationId: "The ID of the organization to update the SAML config for.",
authProvider: "Authentication provider to use for SAML authentication.",
isActive: "Whether to enable or disable this SAML configuration.",
entryPoint:
"The entry point for the SAML authentication. This is the URL that the user will be redirected to after they have authenticated with the SAML provider.",
issuer: "The SAML provider issuer URL or entity ID.",
cert: "The certificate to use for SAML authentication."
},
CREATE_CONFIG: {
organizationId: "The ID of the organization to create the SAML config for.",
authProvider: "Authentication provider to use for SAML authentication.",
isActive: "Whether to enable or disable this SAML configuration.",
entryPoint:
"The entry point for the SAML authentication. This is the URL that the user will be redirected to after they have authenticated with the SAML provider.",
issuer: "The SAML provider issuer URL or entity ID.",
cert: "The certificate to use for SAML authentication."
}
};
export const LdapSso = {
GET_CONFIG: {
organizationId: "The ID of the organization to get the LDAP config for."
},
CREATE_CONFIG: {
organizationId: "The ID of the organization to create the LDAP config for.",
isActive: "Whether to enable or disable this LDAP configuration.",
url: "The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.",
bindDN:
"The distinguished name of the object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`",
bindPass: "The password to use along with Bind DN when performing the user search.",
searchBase: "The base DN to use for the user search such as `ou=Users,dc=acme,dc=com`",
uniqueUserAttribute:
"The attribute to use as the unique identifier of LDAP users such as `sAMAccountName`, `cn`, `uid`, `objectGUID`. If left blank, defaults to uidNumber",
searchFilter:
"The template used to construct the LDAP user search filter such as `(uid={{username}})` uses literal `{{username}}` to have the given username used in the search. The default is `(uid={{username}})` which is compatible with several common directory schemas.",
groupSearchBase: "LDAP search base to use for group membership search such as `ou=Groups,dc=acme,dc=com`",
groupSearchFilter:
"The template used when constructing the group membership query such as `(&(objectClass=posixGroup)(memberUid={{.Username}}))`. The template can access the following context variables: `[UserDN, UserName]`. The default is `(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))` which is compatible with several common directory schemas.",
caCert: "The CA certificate to use when verifying the LDAP server certificate."
},
UPDATE_CONFIG: {
organizationId: "The ID of the organization to update the LDAP config for.",
isActive: "Whether to enable or disable this LDAP configuration.",
url: "The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.",
bindDN:
"The distinguished name of object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`",
bindPass: "The password to use along with Bind DN when performing the user search.",
uniqueUserAttribute:
"The attribute to use as the unique identifier of LDAP users such as `sAMAccountName`, `cn`, `uid`, `objectGUID`. If left blank, defaults to uidNumber",
searchFilter:
"The template used to construct the LDAP user search filter such as `(uid={{username}})` uses literal `{{username}}` to have the given username used in the search. The default is `(uid={{username}})` which is compatible with several common directory schemas.",
searchBase: "The base DN to use for the user search such as `ou=Users,dc=acme,dc=com`",
groupSearchBase: "LDAP search base to use for group membership search such as `ou=Groups,dc=acme,dc=com`",
groupSearchFilter:
"The template used when constructing the group membership query such as `(&(objectClass=posixGroup)(memberUid={{.Username}}))`. The template can access the following context variables: `[UserDN, UserName]`. The default is `(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))` which is compatible with several common directory schemas.",
caCert: "The CA certificate to use when verifying the LDAP server certificate."
}
};

View File

@@ -2,7 +2,6 @@ import { z } from "zod";
import { QueueWorkerProfile } from "@app/lib/types";
import { BadRequestError } from "../errors";
import { removeTrailingSlash } from "../fn";
import { CustomLogger } from "../logger/logger";
import { zpStr } from "../zod";
@@ -28,7 +27,6 @@ const databaseReadReplicaSchema = z
const envSchema = z
.object({
INFISICAL_PLATFORM_VERSION: zpStr(z.string().optional()),
KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN: zodStrBool.default("false"),
PORT: z.coerce.number().default(IS_PACKAGED ? 8080 : 4000),
DISABLE_SECRET_SCANNING: z
.enum(["true", "false"])
@@ -343,11 +341,8 @@ const envSchema = z
export type TEnvConfig = Readonly<z.infer<typeof envSchema>>;
let envCfg: TEnvConfig;
let originalEnvConfig: TEnvConfig;
export const getConfig = () => envCfg;
export const getOriginalConfig = () => originalEnvConfig;
// cannot import singleton logger directly as it needs config to load various transport
export const initEnvConfig = (logger?: CustomLogger) => {
const parsedEnv = envSchema.safeParse(process.env);
@@ -357,217 +352,10 @@ export const initEnvConfig = (logger?: CustomLogger) => {
process.exit(-1);
}
const config = Object.freeze(parsedEnv.data);
envCfg = config;
if (!originalEnvConfig) {
originalEnvConfig = config;
}
envCfg = Object.freeze(parsedEnv.data);
return envCfg;
};
// A list of environment variables that can be overwritten
export const overwriteSchema: {
[key: string]: {
name: string;
fields: { key: keyof TEnvConfig; description?: string }[];
};
} = {
aws: {
name: "AWS",
fields: [
{
key: "INF_APP_CONNECTION_AWS_ACCESS_KEY_ID",
description: "The Access Key ID of your AWS account."
},
{
key: "INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY",
description: "The Client Secret of your AWS application."
}
]
},
azure: {
name: "Azure",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]
},
gcp: {
name: "GCP",
fields: [
{
key: "INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL",
description: "The GCP Service Account JSON credentials."
}
]
},
github_app: {
name: "GitHub App",
fields: [
{
key: "INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID",
description: "The Client ID of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET",
description: "The Client Secret of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_APP_SLUG",
description: "The Slug of your GitHub application. This is the one found in the URL."
},
{
key: "INF_APP_CONNECTION_GITHUB_APP_ID",
description: "The App ID of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY",
description: "The Private Key of your GitHub application."
}
]
},
github_oauth: {
name: "GitHub OAuth",
fields: [
{
key: "INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID",
description: "The Client ID of your GitHub OAuth application."
},
{
key: "INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET",
description: "The Client Secret of your GitHub OAuth application."
}
]
},
github_radar_app: {
name: "GitHub Radar App",
fields: [
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID",
description: "The Client ID of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET",
description: "The Client Secret of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG",
description: "The Slug of your GitHub application. This is the one found in the URL."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_ID",
description: "The App ID of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY",
description: "The Private Key of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET",
description: "The Webhook Secret of your GitHub application."
}
]
},
github_sso: {
name: "GitHub SSO",
fields: [
{
key: "CLIENT_ID_GITHUB_LOGIN",
description: "The Client ID of your GitHub OAuth application."
},
{
key: "CLIENT_SECRET_GITHUB_LOGIN",
description: "The Client Secret of your GitHub OAuth application."
}
]
},
gitlab_oauth: {
name: "GitLab OAuth",
fields: [
{
key: "INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID",
description: "The Client ID of your GitLab OAuth application."
},
{
key: "INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET",
description: "The Client Secret of your GitLab OAuth application."
}
]
},
gitlab_sso: {
name: "GitLab SSO",
fields: [
{
key: "CLIENT_ID_GITLAB_LOGIN",
description: "The Client ID of your GitLab application."
},
{
key: "CLIENT_SECRET_GITLAB_LOGIN",
description: "The Secret of your GitLab application."
},
{
key: "CLIENT_GITLAB_LOGIN_URL",
description:
"The URL of your self-hosted instance of GitLab where the OAuth application is registered. If no URL is passed in, this will default to https://gitlab.com."
}
]
},
google_sso: {
name: "Google SSO",
fields: [
{
key: "CLIENT_ID_GOOGLE_LOGIN",
description: "The Client ID of your GCP OAuth2 application."
},
{
key: "CLIENT_SECRET_GOOGLE_LOGIN",
description: "The Client Secret of your GCP OAuth2 application."
}
]
}
};
export const overridableKeys = new Set(
Object.values(overwriteSchema).flatMap(({ fields }) => fields.map(({ key }) => key))
);
export const validateOverrides = (config: Record<string, string>) => {
const allowedOverrides = Object.fromEntries(
Object.entries(config).filter(([key]) => overridableKeys.has(key as keyof z.input<typeof envSchema>))
);
const tempEnv: Record<string, unknown> = { ...process.env, ...allowedOverrides };
const parsedResult = envSchema.safeParse(tempEnv);
if (!parsedResult.success) {
const errorDetails = parsedResult.error.issues
.map((issue) => `Key: "${issue.path.join(".")}", Error: ${issue.message}`)
.join("\n");
throw new BadRequestError({ message: errorDetails });
}
};
export const overrideEnvConfig = (config: Record<string, string>) => {
const allowedOverrides = Object.fromEntries(
Object.entries(config).filter(([key]) => overridableKeys.has(key as keyof z.input<typeof envSchema>))
);
const tempEnv: Record<string, unknown> = { ...process.env, ...allowedOverrides };
const parsedResult = envSchema.safeParse(tempEnv);
if (parsedResult.success) {
envCfg = Object.freeze(parsedResult.data);
}
};
export const formatSmtpConfig = () => {
const tlsOptions: {
rejectUnauthorized: boolean;

View File

@@ -1,18 +1,11 @@
import axios, { AxiosInstance, CreateAxiosDefaults } from "axios";
import axiosRetry, { IAxiosRetryConfig } from "axios-retry";
import axios from "axios";
import axiosRetry from "axios-retry";
export function createRequestClient(defaults: CreateAxiosDefaults = {}, retry: IAxiosRetryConfig = {}): AxiosInstance {
const client = axios.create(defaults);
export const request = axios.create();
axiosRetry(client, {
retries: 3,
// eslint-disable-next-line
retryDelay: axiosRetry.exponentialDelay,
retryCondition: (err) => axiosRetry.isNetworkError(err) || axiosRetry.isRetryableError(err),
...retry
});
return client;
}
export const request = createRequestClient();
axiosRetry(request, {
retries: 3,
// eslint-disable-next-line
retryDelay: axiosRetry.exponentialDelay,
retryCondition: (err) => axiosRetry.isNetworkError(err) || axiosRetry.isRetryableError(err)
});

View File

@@ -10,4 +10,4 @@ export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
export const BasicRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
export const GitHubRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);

View File

@@ -1,9 +1,7 @@
import type { EmitterWebhookEventName } from "@octokit/webhooks/dist-types/types";
import { PushEvent } from "@octokit/webhooks-types";
import { Probot } from "probot";
import { z } from "zod";
import { TBitbucketPushEvent } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-types";
import { getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { writeLimit } from "@app/server/config/rateLimiter";
@@ -65,52 +63,4 @@ export const registerSecretScanningV2Webhooks = async (server: FastifyZodProvide
return res.send("ok");
}
});
// bitbucket push event webhook
server.route({
method: "POST",
url: "/bitbucket",
schema: {
querystring: z.object({
dataSourceId: z.string().min(1, { message: "Data Source ID is required" })
}),
headers: z
.object({
"x-hub-signature": z.string().min(1, { message: "X-Hub-Signature header is required" })
})
.passthrough()
},
config: {
rateLimit: writeLimit
},
handler: async (req, res) => {
const { dataSourceId } = req.query;
// Verify signature
const signature = req.headers["x-hub-signature"];
if (!signature) {
logger.error("Missing X-Hub-Signature header for Bitbucket webhook");
return res.status(401).send({ message: "Unauthorized: Missing signature" });
}
const expectedSignaturePrefix = "sha256=";
if (!signature.startsWith(expectedSignaturePrefix)) {
logger.error({ signature }, "Invalid X-Hub-Signature format for Bitbucket webhook");
return res.status(401).send({ message: "Unauthorized: Invalid signature format" });
}
const receivedSignature = signature.substring(expectedSignaturePrefix.length);
if (!dataSourceId) return res.status(400).send({ message: "Data Source ID is required" });
await server.services.secretScanningV2.bitbucket.handlePushEvent({
...(req.body as TBitbucketPushEvent),
dataSourceId,
receivedSignature,
bodyString: JSON.stringify(req.body)
});
return res.send("ok");
}
});
};

View File

@@ -300,7 +300,6 @@ import { injectIdentity } from "../plugins/auth/inject-identity";
import { injectPermission } from "../plugins/auth/inject-permission";
import { injectRateLimits } from "../plugins/inject-rate-limits";
import { registerV1Routes } from "./v1";
import { initializeOauthConfigSync } from "./v1/sso-router";
import { registerV2Routes } from "./v2";
import { registerV3Routes } from "./v3";
@@ -1911,7 +1910,6 @@ export const registerRoutes = async (
await hsmService.startService();
await telemetryQueue.startTelemetryCheck();
await telemetryQueue.startAggregatedEventsJob();
await dailyResourceCleanUp.startCleanUp();
await dailyExpiringPkiItemAlert.startSendingAlerts();
await pkiSubscriberQueue.startDailyAutoRenewalJob();
@@ -2048,16 +2046,6 @@ export const registerRoutes = async (
}
}
const configSyncJob = await superAdminService.initializeEnvConfigSync();
if (configSyncJob) {
cronJobs.push(configSyncJob);
}
const oauthConfigSyncJob = await initializeOauthConfigSync();
if (oauthConfigSyncJob) {
cronJobs.push(oauthConfigSyncJob);
}
server.decorate<FastifyZodProvider["store"]>("store", {
user: userDAL,
kmipClient: kmipClientDAL

View File

@@ -8,7 +8,7 @@ import {
SuperAdminSchema,
UsersSchema
} from "@app/db/schemas";
import { getConfig, overridableKeys } from "@app/lib/config/env";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { invalidateCacheLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
@@ -42,15 +42,13 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
encryptedGitHubAppConnectionClientSecret: true,
encryptedGitHubAppConnectionSlug: true,
encryptedGitHubAppConnectionId: true,
encryptedGitHubAppConnectionPrivateKey: true,
encryptedEnvOverrides: true
encryptedGitHubAppConnectionPrivateKey: true
}).extend({
isMigrationModeOn: z.boolean(),
defaultAuthOrgSlug: z.string().nullable(),
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
defaultAuthOrgAuthMethod: z.string().nullish(),
isSecretScanningDisabled: z.boolean(),
kubernetesAutoFetchServiceAccountToken: z.boolean()
isSecretScanningDisabled: z.boolean()
})
})
}
@@ -62,8 +60,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
config: {
...config,
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING
}
};
}
@@ -113,14 +110,11 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
.refine((content) => DOMPurify.sanitize(content) === content, {
message: "Page frame content contains unsafe HTML."
})
.optional(),
envOverrides: z.record(z.enum(Array.from(overridableKeys) as [string, ...string[]]), z.string()).optional()
.optional()
}),
response: {
200: z.object({
config: SuperAdminSchema.omit({
encryptedEnvOverrides: true
}).extend({
config: SuperAdminSchema.extend({
defaultAuthOrgSlug: z.string().nullable()
})
})
@@ -387,41 +381,6 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/env-overrides",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.record(
z.string(),
z.object({
name: z.string(),
fields: z
.object({
key: z.string(),
value: z.string(),
hasEnvEntry: z.boolean(),
description: z.string().optional()
})
.array()
})
)
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async () => {
const envOverrides = await server.services.superAdmin.getEnvOverridesOrganized();
return envOverrides;
}
});
server.route({
method: "DELETE",
url: "/user-management/users/:userId",

View File

@@ -31,10 +31,6 @@ import {
AzureKeyVaultConnectionListItemSchema,
SanitizedAzureKeyVaultConnectionSchema
} from "@app/services/app-connection/azure-key-vault";
import {
BitbucketConnectionListItemSchema,
SanitizedBitbucketConnectionSchema
} from "@app/services/app-connection/bitbucket";
import {
CamundaConnectionListItemSchema,
SanitizedCamundaConnectionSchema
@@ -71,10 +67,6 @@ import {
PostgresConnectionListItemSchema,
SanitizedPostgresConnectionSchema
} from "@app/services/app-connection/postgres";
import {
RailwayConnectionListItemSchema,
SanitizedRailwayConnectionSchema
} from "@app/services/app-connection/railway";
import {
RenderConnectionListItemSchema,
SanitizedRenderConnectionSchema
@@ -126,9 +118,7 @@ const SanitizedAppConnectionSchema = z.union([
...SanitizedFlyioConnectionSchema.options,
...SanitizedGitLabConnectionSchema.options,
...SanitizedCloudflareConnectionSchema.options,
...SanitizedBitbucketConnectionSchema.options,
...SanitizedZabbixConnectionSchema.options,
...SanitizedRailwayConnectionSchema.options
...SanitizedZabbixConnectionSchema.options
]);
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
@@ -161,9 +151,7 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
FlyioConnectionListItemSchema,
GitLabConnectionListItemSchema,
CloudflareConnectionListItemSchema,
BitbucketConnectionListItemSchema,
ZabbixConnectionListItemSchema,
RailwayConnectionListItemSchema
ZabbixConnectionListItemSchema
]);
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {

View File

@@ -1,88 +0,0 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateBitbucketConnectionSchema,
SanitizedBitbucketConnectionSchema,
UpdateBitbucketConnectionSchema
} from "@app/services/app-connection/bitbucket";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerBitbucketConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.Bitbucket,
server,
sanitizedResponseSchema: SanitizedBitbucketConnectionSchema,
createSchema: CreateBitbucketConnectionSchema,
updateSchema: UpdateBitbucketConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/workspaces`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({
workspaces: z.object({ slug: z.string() }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
params: { connectionId }
} = req;
const workspaces = await server.services.appConnection.bitbucket.listWorkspaces(connectionId, req.permission);
return { workspaces };
}
});
server.route({
method: "GET",
url: `/:connectionId/repositories`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
querystring: z.object({
workspaceSlug: z.string().min(1).max(255)
}),
response: {
200: z.object({
repositories: z.object({ slug: z.string(), full_name: z.string(), uuid: z.string() }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
params: { connectionId },
query: { workspaceSlug }
} = req;
const repositories = await server.services.appConnection.bitbucket.listRepositories(
{ connectionId, workspaceSlug },
req.permission
);
return { repositories };
}
});
};

View File

@@ -9,7 +9,6 @@ import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-confi
import { registerAzureClientSecretsConnectionRouter } from "./azure-client-secrets-connection-router";
import { registerAzureDevOpsConnectionRouter } from "./azure-devops-connection-router";
import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router";
import { registerBitbucketConnectionRouter } from "./bitbucket-connection-router";
import { registerCamundaConnectionRouter } from "./camunda-connection-router";
import { registerCloudflareConnectionRouter } from "./cloudflare-connection-router";
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
@@ -25,7 +24,6 @@ import { registerLdapConnectionRouter } from "./ldap-connection-router";
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
import { registerMySqlConnectionRouter } from "./mysql-connection-router";
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
import { registerRailwayConnectionRouter } from "./railway-connection-router";
import { registerRenderConnectionRouter } from "./render-connection-router";
import { registerTeamCityConnectionRouter } from "./teamcity-connection-router";
import { registerTerraformCloudConnectionRouter } from "./terraform-cloud-router";
@@ -66,7 +64,5 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
[AppConnection.Flyio]: registerFlyioConnectionRouter,
[AppConnection.GitLab]: registerGitLabConnectionRouter,
[AppConnection.Cloudflare]: registerCloudflareConnectionRouter,
[AppConnection.Bitbucket]: registerBitbucketConnectionRouter,
[AppConnection.Zabbix]: registerZabbixConnectionRouter,
[AppConnection.Railway]: registerRailwayConnectionRouter
[AppConnection.Zabbix]: registerZabbixConnectionRouter
};

View File

@@ -1,67 +0,0 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateRailwayConnectionSchema,
SanitizedRailwayConnectionSchema,
UpdateRailwayConnectionSchema
} from "@app/services/app-connection/railway";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerRailwayConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.Railway,
server,
sanitizedResponseSchema: SanitizedRailwayConnectionSchema,
createSchema: CreateRailwayConnectionSchema,
updateSchema: UpdateRailwayConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/projects`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({
projects: z
.object({
name: z.string(),
id: z.string(),
services: z.array(
z.object({
name: z.string(),
id: z.string()
})
),
environments: z.array(
z.object({
name: z.string(),
id: z.string()
})
)
})
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const projects = await server.services.appConnection.railway.listProjects(connectionId, req.permission);
return { projects };
}
});
};

View File

@@ -732,8 +732,8 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
actorOrgId: req.permission.orgId,
projectId,
environment,
path: secretPath
// search scott: removing for now because this prevents searching imported secrets which are fetched separately client side
path: secretPath,
search
});
if (remainingLimit > 0 && totalImportCount > adjustedOffset) {
@@ -745,7 +745,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
projectId,
environment,
path: secretPath,
// search scott: removing for now because this prevents searching imported secrets which are fetched separately client side
search,
limit: remainingLimit,
offset: adjustedOffset
});

View File

@@ -17,7 +17,6 @@ import { registerGitLabSyncRouter } from "./gitlab-sync-router";
import { registerHCVaultSyncRouter } from "./hc-vault-sync-router";
import { registerHerokuSyncRouter } from "./heroku-sync-router";
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
import { registerRailwaySyncRouter } from "./railway-sync-router";
import { registerRenderSyncRouter } from "./render-sync-router";
import { registerTeamCitySyncRouter } from "./teamcity-sync-router";
import { registerTerraformCloudSyncRouter } from "./terraform-cloud-sync-router";
@@ -50,6 +49,5 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
[SecretSync.Flyio]: registerFlyioSyncRouter,
[SecretSync.GitLab]: registerGitLabSyncRouter,
[SecretSync.CloudflarePages]: registerCloudflarePagesSyncRouter,
[SecretSync.Zabbix]: registerZabbixSyncRouter,
[SecretSync.Railway]: registerRailwaySyncRouter
[SecretSync.Zabbix]: registerZabbixSyncRouter
};

View File

@@ -1,17 +0,0 @@
import {
CreateRailwaySyncSchema,
RailwaySyncSchema,
UpdateRailwaySyncSchema
} from "@app/services/secret-sync/railway/railway-sync-schemas";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerRailwaySyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.Railway,
server,
responseSchema: RailwaySyncSchema,
createSchema: CreateRailwaySyncSchema,
updateSchema: UpdateRailwaySyncSchema
});

View File

@@ -382,8 +382,7 @@ export const registerSyncSecretsEndpoints = <T extends TSecretSync, I extends TS
{
syncId,
destination,
importBehavior,
auditLogInfo: req.auditLogInfo
importBehavior
},
req.permission
)) as T;
@@ -416,8 +415,7 @@ export const registerSyncSecretsEndpoints = <T extends TSecretSync, I extends TS
const secretSync = (await server.services.secretSync.triggerSecretSyncRemoveSecretsById(
{
syncId,
destination,
auditLogInfo: req.auditLogInfo
destination
},
req.permission
)) as T;

View File

@@ -34,7 +34,6 @@ import { GitLabSyncListItemSchema, GitLabSyncSchema } from "@app/services/secret
import { HCVaultSyncListItemSchema, HCVaultSyncSchema } from "@app/services/secret-sync/hc-vault";
import { HerokuSyncListItemSchema, HerokuSyncSchema } from "@app/services/secret-sync/heroku";
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
import { RailwaySyncListItemSchema, RailwaySyncSchema } from "@app/services/secret-sync/railway/railway-sync-schemas";
import { RenderSyncListItemSchema, RenderSyncSchema } from "@app/services/secret-sync/render/render-sync-schemas";
import { TeamCitySyncListItemSchema, TeamCitySyncSchema } from "@app/services/secret-sync/teamcity";
import { TerraformCloudSyncListItemSchema, TerraformCloudSyncSchema } from "@app/services/secret-sync/terraform-cloud";
@@ -65,8 +64,7 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
FlyioSyncSchema,
GitLabSyncSchema,
CloudflarePagesSyncSchema,
ZabbixSyncSchema,
RailwaySyncSchema
ZabbixSyncSchema
]);
const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
@@ -92,8 +90,7 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
FlyioSyncListItemSchema,
GitLabSyncListItemSchema,
CloudflarePagesSyncListItemSchema,
ZabbixSyncListItemSchema,
RailwaySyncListItemSchema
ZabbixSyncListItemSchema
]);
export const registerSecretSyncRouter = async (server: FastifyZodProvider) => {

View File

@@ -9,7 +9,6 @@
import { Authenticator } from "@fastify/passport";
import fastifySession from "@fastify/session";
import RedisStore from "connect-redis";
import { CronJob } from "cron";
import { Strategy as GitLabStrategy } from "passport-gitlab2";
import { Strategy as GoogleStrategy } from "passport-google-oauth20";
import { Strategy as OAuth2Strategy } from "passport-oauth2";
@@ -26,14 +25,27 @@ import { AuthMethod } from "@app/services/auth/auth-type";
import { OrgAuthMethod } from "@app/services/org/org-types";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
const passport = new Authenticator({ key: "sso", userProperty: "passportUser" });
let serverInstance: FastifyZodProvider | null = null;
export const registerOauthMiddlewares = (server: FastifyZodProvider) => {
serverInstance = server;
export const registerSsoRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const passport = new Authenticator({ key: "sso", userProperty: "passportUser" });
const redisStore = new RedisStore({
client: server.redis,
prefix: "oauth-session:",
ttl: 600 // 10 minutes
});
await server.register(fastifySession, {
secret: appCfg.COOKIE_SECRET_SIGN_KEY,
store: redisStore,
cookie: {
secure: appCfg.HTTPS_ENABLED,
sameSite: "lax" // we want cookies to be sent to Infisical in redirects originating from IDP server
}
});
await server.register(passport.initialize());
await server.register(passport.secureSession());
// passport oauth strategy for Google
const isGoogleOauthActive = Boolean(appCfg.CLIENT_ID_GOOGLE_LOGIN && appCfg.CLIENT_SECRET_GOOGLE_LOGIN);
if (isGoogleOauthActive) {
@@ -164,49 +176,6 @@ export const registerOauthMiddlewares = (server: FastifyZodProvider) => {
)
);
}
};
export const refreshOauthConfig = () => {
if (!serverInstance) {
logger.warn("Cannot refresh OAuth config: server instance not available");
return;
}
logger.info("Refreshing OAuth configuration...");
registerOauthMiddlewares(serverInstance);
};
export const initializeOauthConfigSync = async () => {
logger.info("Setting up background sync process for oauth configuration");
// sync every 5 minutes
const job = new CronJob("*/5 * * * *", refreshOauthConfig);
job.start();
return job;
};
export const registerSsoRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const redisStore = new RedisStore({
client: server.redis,
prefix: "oauth-session:",
ttl: 600 // 10 minutes
});
await server.register(fastifySession, {
secret: appCfg.COOKIE_SECRET_SIGN_KEY,
store: redisStore,
cookie: {
secure: appCfg.HTTPS_ENABLED,
sameSite: "lax" // we want cookies to be sent to Infisical in redirects originating from IDP server
}
});
await server.register(passport.initialize());
await server.register(passport.secureSession());
registerOauthMiddlewares(server);
server.route({
url: "/redirect/google",

View File

@@ -2,7 +2,7 @@ import picomatch from "picomatch";
import { z } from "zod";
import { SecretApprovalRequestsSchema, SecretsSchema, SecretType, ServiceTokenScopes } from "@app/db/schemas";
import { EventType, SecretApprovalEvent, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
import { ApiDocsTags, RAW_SECRETS, SECRETS } from "@app/lib/api-docs";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
@@ -594,23 +594,6 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
secretReminderRepeatDays: req.body.secretReminderRepeatDays
});
if (secretOperation.type === SecretProtectionType.Approval) {
await server.services.auditLog.createAuditLog({
projectId: req.body.workspaceId,
...req.auditLogInfo,
event: {
type: EventType.SECRET_APPROVAL_REQUEST,
metadata: {
committedBy: secretOperation.approval.committerUserId,
secretApprovalRequestId: secretOperation.approval.id,
secretApprovalRequestSlug: secretOperation.approval.slug,
secretPath: req.body.secretPath,
environment: req.body.environment,
secretKey: req.params.secretName,
eventType: SecretApprovalEvent.Create
}
}
});
return { approval: secretOperation.approval };
}
@@ -747,23 +730,6 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
});
if (secretOperation.type === SecretProtectionType.Approval) {
await server.services.auditLog.createAuditLog({
projectId: req.body.workspaceId,
...req.auditLogInfo,
event: {
type: EventType.SECRET_APPROVAL_REQUEST,
metadata: {
committedBy: secretOperation.approval.committerUserId,
secretApprovalRequestId: secretOperation.approval.id,
secretApprovalRequestSlug: secretOperation.approval.slug,
secretPath: req.body.secretPath,
environment: req.body.environment,
secretKey: req.params.secretName,
eventType: SecretApprovalEvent.Update
}
}
});
return { approval: secretOperation.approval };
}
const { secret } = secretOperation;
@@ -865,23 +831,6 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
type: req.body.type
});
if (secretOperation.type === SecretProtectionType.Approval) {
await server.services.auditLog.createAuditLog({
projectId: req.body.workspaceId,
...req.auditLogInfo,
event: {
type: EventType.SECRET_APPROVAL_REQUEST,
metadata: {
committedBy: secretOperation.approval.committerUserId,
secretApprovalRequestId: secretOperation.approval.id,
secretApprovalRequestSlug: secretOperation.approval.slug,
secretPath: req.body.secretPath,
environment: req.body.environment,
secretKey: req.params.secretName,
eventType: SecretApprovalEvent.Delete
}
}
});
return { approval: secretOperation.approval };
}
@@ -1216,10 +1165,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
metadata: {
committedBy: approval.committerUserId,
secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug,
secretPath,
environment,
eventType: SecretApprovalEvent.Create
secretApprovalRequestSlug: approval.slug
}
}
});
@@ -1405,11 +1351,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
metadata: {
committedBy: approval.committerUserId,
secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug,
secretPath,
environment,
secretKey: req.params.secretName,
eventType: SecretApprovalEvent.Update
secretApprovalRequestSlug: approval.slug
}
}
});
@@ -1547,11 +1489,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
metadata: {
committedBy: approval.committerUserId,
secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug,
secretPath,
environment,
secretKey: req.params.secretName,
eventType: SecretApprovalEvent.Delete
secretApprovalRequestSlug: approval.slug
}
}
});
@@ -1735,10 +1673,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
metadata: {
committedBy: approval.committerUserId,
secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug,
secretPath,
environment,
eventType: SecretApprovalEvent.CreateMany
secretApprovalRequestSlug: approval.slug
}
}
});
@@ -1866,13 +1801,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
metadata: {
committedBy: approval.committerUserId,
secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug,
secretPath,
environment,
eventType: SecretApprovalEvent.UpdateMany,
secrets: inputSecrets.map((secret) => ({
secretKey: secret.secretName
}))
secretApprovalRequestSlug: approval.slug
}
}
});
@@ -1991,13 +1920,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
metadata: {
committedBy: approval.committerUserId,
secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug,
secretPath,
environment,
secrets: inputSecrets.map((secret) => ({
secretKey: secret.secretName
})),
eventType: SecretApprovalEvent.DeleteMany
secretApprovalRequestSlug: approval.slug
}
}
});
@@ -2115,24 +2038,6 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
secrets: inputSecrets
});
if (secretOperation.type === SecretProtectionType.Approval) {
await server.services.auditLog.createAuditLog({
projectId: req.body.workspaceId,
...req.auditLogInfo,
event: {
type: EventType.SECRET_APPROVAL_REQUEST,
metadata: {
committedBy: secretOperation.approval.committerUserId,
secretApprovalRequestId: secretOperation.approval.id,
secretApprovalRequestSlug: secretOperation.approval.slug,
secretPath,
environment,
secrets: inputSecrets.map((secret) => ({
secretKey: secret.secretKey
})),
eventType: SecretApprovalEvent.CreateMany
}
}
});
return { approval: secretOperation.approval };
}
const { secrets } = secretOperation;
@@ -2265,25 +2170,6 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
mode: req.body.mode
});
if (secretOperation.type === SecretProtectionType.Approval) {
await server.services.auditLog.createAuditLog({
projectId: req.body.workspaceId,
...req.auditLogInfo,
event: {
type: EventType.SECRET_APPROVAL_REQUEST,
metadata: {
committedBy: secretOperation.approval.committerUserId,
secretApprovalRequestId: secretOperation.approval.id,
secretApprovalRequestSlug: secretOperation.approval.slug,
secretPath,
environment,
secrets: inputSecrets.map((secret) => ({
secretKey: secret.secretKey,
secretPath: secret.secretPath
})),
eventType: SecretApprovalEvent.UpdateMany
}
}
});
return { approval: secretOperation.approval };
}
const { secrets } = secretOperation;
@@ -2412,25 +2298,6 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
secrets: inputSecrets
});
if (secretOperation.type === SecretProtectionType.Approval) {
await server.services.auditLog.createAuditLog({
projectId: req.body.workspaceId,
...req.auditLogInfo,
event: {
type: EventType.SECRET_APPROVAL_REQUEST,
metadata: {
committedBy: secretOperation.approval.committerUserId,
secretApprovalRequestId: secretOperation.approval.id,
secretApprovalRequestSlug: secretOperation.approval.slug,
secretPath,
environment,
secrets: inputSecrets.map((secret) => ({
secretKey: secret.secretKey
})),
eventType: SecretApprovalEvent.DeleteMany
}
}
});
return { approval: secretOperation.approval };
}
const { secrets } = secretOperation;

View File

@@ -28,9 +28,7 @@ export enum AppConnection {
Flyio = "flyio",
GitLab = "gitlab",
Cloudflare = "cloudflare",
Zabbix = "zabbix",
Railway = "railway",
Bitbucket = "bitbucket"
Zabbix = "zabbix"
}
export enum AWSRegion {

View File

@@ -50,11 +50,6 @@ import {
getAzureKeyVaultConnectionListItem,
validateAzureKeyVaultConnectionCredentials
} from "./azure-key-vault";
import {
BitbucketConnectionMethod,
getBitbucketConnectionListItem,
validateBitbucketConnectionCredentials
} from "./bitbucket";
import { CamundaConnectionMethod, getCamundaConnectionListItem, validateCamundaConnectionCredentials } from "./camunda";
import { CloudflareConnectionMethod } from "./cloudflare/cloudflare-connection-enum";
import {
@@ -91,7 +86,6 @@ import { getMsSqlConnectionListItem, MsSqlConnectionMethod } from "./mssql";
import { MySqlConnectionMethod } from "./mysql/mysql-connection-enums";
import { getMySqlConnectionListItem } from "./mysql/mysql-connection-fns";
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
import { getRailwayConnectionListItem, validateRailwayConnectionCredentials } from "./railway";
import { RenderConnectionMethod } from "./render/render-connection-enums";
import { getRenderConnectionListItem, validateRenderConnectionCredentials } from "./render/render-connection-fns";
import {
@@ -144,9 +138,7 @@ export const listAppConnectionOptions = () => {
getFlyioConnectionListItem(),
getGitLabConnectionListItem(),
getCloudflareConnectionListItem(),
getZabbixConnectionListItem(),
getRailwayConnectionListItem(),
getBitbucketConnectionListItem()
getZabbixConnectionListItem()
].sort((a, b) => a.name.localeCompare(b.name));
};
@@ -227,9 +219,7 @@ export const validateAppConnectionCredentials = async (
[AppConnection.Flyio]: validateFlyioConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.GitLab]: validateGitLabConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Cloudflare]: validateCloudflareConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Zabbix]: validateZabbixConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Railway]: validateRailwayConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Bitbucket]: validateBitbucketConnectionCredentials as TAppConnectionCredentialsValidator
[AppConnection.Zabbix]: validateZabbixConnectionCredentials as TAppConnectionCredentialsValidator
};
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection);
@@ -266,7 +256,6 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
case VercelConnectionMethod.ApiToken:
case OnePassConnectionMethod.ApiToken:
case CloudflareConnectionMethod.APIToken:
case BitbucketConnectionMethod.ApiToken:
case ZabbixConnectionMethod.ApiToken:
return "API Token";
case PostgresConnectionMethod.UsernameAndPassword:
@@ -348,9 +337,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.Flyio]: platformManagedCredentialsNotSupported,
[AppConnection.GitLab]: platformManagedCredentialsNotSupported,
[AppConnection.Cloudflare]: platformManagedCredentialsNotSupported,
[AppConnection.Zabbix]: platformManagedCredentialsNotSupported,
[AppConnection.Railway]: platformManagedCredentialsNotSupported,
[AppConnection.Bitbucket]: platformManagedCredentialsNotSupported
[AppConnection.Zabbix]: platformManagedCredentialsNotSupported
};
export const enterpriseAppCheck = async (

View File

@@ -30,9 +30,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
[AppConnection.Flyio]: "Fly.io",
[AppConnection.GitLab]: "GitLab",
[AppConnection.Cloudflare]: "Cloudflare",
[AppConnection.Zabbix]: "Zabbix",
[AppConnection.Railway]: "Railway",
[AppConnection.Bitbucket]: "Bitbucket"
[AppConnection.Zabbix]: "Zabbix"
};
export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanType> = {
@@ -65,7 +63,5 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
[AppConnection.Flyio]: AppConnectionPlanType.Regular,
[AppConnection.GitLab]: AppConnectionPlanType.Regular,
[AppConnection.Cloudflare]: AppConnectionPlanType.Regular,
[AppConnection.Zabbix]: AppConnectionPlanType.Regular,
[AppConnection.Railway]: AppConnectionPlanType.Regular,
[AppConnection.Bitbucket]: AppConnectionPlanType.Regular
[AppConnection.Zabbix]: AppConnectionPlanType.Regular
};

View File

@@ -45,8 +45,6 @@ import { azureClientSecretsConnectionService } from "./azure-client-secrets/azur
import { ValidateAzureDevOpsConnectionCredentialsSchema } from "./azure-devops/azure-devops-schemas";
import { azureDevOpsConnectionService } from "./azure-devops/azure-devops-service";
import { ValidateAzureKeyVaultConnectionCredentialsSchema } from "./azure-key-vault";
import { ValidateBitbucketConnectionCredentialsSchema } from "./bitbucket";
import { bitbucketConnectionService } from "./bitbucket/bitbucket-connection-service";
import { ValidateCamundaConnectionCredentialsSchema } from "./camunda";
import { camundaConnectionService } from "./camunda/camunda-connection-service";
import { ValidateCloudflareConnectionCredentialsSchema } from "./cloudflare/cloudflare-connection-schema";
@@ -72,8 +70,6 @@ import { ValidateLdapConnectionCredentialsSchema } from "./ldap";
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
import { ValidateMySqlConnectionCredentialsSchema } from "./mysql";
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
import { ValidateRailwayConnectionCredentialsSchema } from "./railway";
import { railwayConnectionService } from "./railway/railway-connection-service";
import { ValidateRenderConnectionCredentialsSchema } from "./render/render-connection-schema";
import { renderConnectionService } from "./render/render-connection-service";
import { ValidateTeamCityConnectionCredentialsSchema } from "./teamcity";
@@ -126,9 +122,7 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
[AppConnection.Flyio]: ValidateFlyioConnectionCredentialsSchema,
[AppConnection.GitLab]: ValidateGitLabConnectionCredentialsSchema,
[AppConnection.Cloudflare]: ValidateCloudflareConnectionCredentialsSchema,
[AppConnection.Zabbix]: ValidateZabbixConnectionCredentialsSchema,
[AppConnection.Railway]: ValidateRailwayConnectionCredentialsSchema,
[AppConnection.Bitbucket]: ValidateBitbucketConnectionCredentialsSchema
[AppConnection.Zabbix]: ValidateZabbixConnectionCredentialsSchema
};
export const appConnectionServiceFactory = ({
@@ -539,8 +533,6 @@ export const appConnectionServiceFactory = ({
flyio: flyioConnectionService(connectAppConnectionById),
gitlab: gitlabConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
cloudflare: cloudflareConnectionService(connectAppConnectionById),
zabbix: zabbixConnectionService(connectAppConnectionById),
railway: railwayConnectionService(connectAppConnectionById),
bitbucket: bitbucketConnectionService(connectAppConnectionById)
zabbix: zabbixConnectionService(connectAppConnectionById)
};
};

View File

@@ -56,12 +56,6 @@ import {
TAzureKeyVaultConnectionInput,
TValidateAzureKeyVaultConnectionCredentialsSchema
} from "./azure-key-vault";
import {
TBitbucketConnection,
TBitbucketConnectionConfig,
TBitbucketConnectionInput,
TValidateBitbucketConnectionCredentialsSchema
} from "./bitbucket";
import {
TCamundaConnection,
TCamundaConnectionConfig,
@@ -141,12 +135,6 @@ import {
TPostgresConnectionInput,
TValidatePostgresConnectionCredentialsSchema
} from "./postgres";
import {
TRailwayConnection,
TRailwayConnectionConfig,
TRailwayConnectionInput,
TValidateRailwayConnectionCredentialsSchema
} from "./railway";
import {
TRenderConnection,
TRenderConnectionConfig,
@@ -214,9 +202,7 @@ export type TAppConnection = { id: string } & (
| TFlyioConnection
| TGitLabConnection
| TCloudflareConnection
| TBitbucketConnection
| TZabbixConnection
| TRailwayConnection
);
export type TAppConnectionRaw = NonNullable<Awaited<ReturnType<TAppConnectionDALFactory["findById"]>>>;
@@ -253,9 +239,7 @@ export type TAppConnectionInput = { id: string } & (
| TFlyioConnectionInput
| TGitLabConnectionInput
| TCloudflareConnectionInput
| TBitbucketConnectionInput
| TZabbixConnectionInput
| TRailwayConnectionInput
);
export type TSqlConnectionInput =
@@ -300,9 +284,7 @@ export type TAppConnectionConfig =
| TFlyioConnectionConfig
| TGitLabConnectionConfig
| TCloudflareConnectionConfig
| TBitbucketConnectionConfig
| TZabbixConnectionConfig
| TRailwayConnectionConfig;
| TZabbixConnectionConfig;
export type TValidateAppConnectionCredentialsSchema =
| TValidateAwsConnectionCredentialsSchema
@@ -334,9 +316,7 @@ export type TValidateAppConnectionCredentialsSchema =
| TValidateFlyioConnectionCredentialsSchema
| TValidateGitLabConnectionCredentialsSchema
| TValidateCloudflareConnectionCredentialsSchema
| TValidateBitbucketConnectionCredentialsSchema
| TValidateZabbixConnectionCredentialsSchema
| TValidateRailwayConnectionCredentialsSchema;
| TValidateZabbixConnectionCredentialsSchema;
export type TListAwsConnectionKmsKeys = {
connectionId: string;

View File

@@ -1,3 +0,0 @@
export enum BitbucketConnectionMethod {
ApiToken = "api-token"
}

View File

@@ -1,117 +0,0 @@
import { AxiosError } from "axios";
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { BitbucketConnectionMethod } from "./bitbucket-connection-enums";
import {
TBitbucketConnection,
TBitbucketConnectionConfig,
TBitbucketRepo,
TBitbucketWorkspace
} from "./bitbucket-connection-types";
export const getBitbucketConnectionListItem = () => {
return {
name: "Bitbucket" as const,
app: AppConnection.Bitbucket as const,
methods: Object.values(BitbucketConnectionMethod) as [BitbucketConnectionMethod.ApiToken]
};
};
export const getBitbucketUser = async ({ email, apiToken }: { email: string; apiToken: string }) => {
try {
const { data } = await request.get<{ username: string }>(`${IntegrationUrls.BITBUCKET_API_URL}/2.0/user`, {
headers: {
Authorization: `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`,
Accept: "application/json"
}
});
return data;
} catch (error: unknown) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
});
}
throw new BadRequestError({
message: "Unable to validate connection: verify credentials"
});
}
};
export const validateBitbucketConnectionCredentials = async (config: TBitbucketConnectionConfig) => {
await getBitbucketUser(config.credentials);
return config.credentials;
};
interface BitbucketWorkspacesResponse {
values: TBitbucketWorkspace[];
next?: string;
}
export const listBitbucketWorkspaces = async (appConnection: TBitbucketConnection) => {
const { email, apiToken } = appConnection.credentials;
const headers = {
Authorization: `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`,
Accept: "application/json"
};
let allWorkspaces: TBitbucketWorkspace[] = [];
let nextUrl: string | undefined = `${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces?pagelen=100`;
let iterationCount = 0;
// Limit to 10 iterations, fetching at most 10 * 100 = 1000 workspaces
while (nextUrl && iterationCount < 10) {
// eslint-disable-next-line no-await-in-loop
const { data }: { data: BitbucketWorkspacesResponse } = await request.get<BitbucketWorkspacesResponse>(nextUrl, {
headers
});
allWorkspaces = allWorkspaces.concat(data.values.map((workspace) => ({ slug: workspace.slug })));
nextUrl = data.next;
iterationCount += 1;
}
return allWorkspaces;
};
interface BitbucketRepositoriesResponse {
values: TBitbucketRepo[];
next?: string;
}
export const listBitbucketRepositories = async (appConnection: TBitbucketConnection, workspaceSlug: string) => {
const { email, apiToken } = appConnection.credentials;
const headers = {
Authorization: `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`,
Accept: "application/json"
};
let allRepos: TBitbucketRepo[] = [];
let nextUrl: string | undefined =
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${encodeURIComponent(workspaceSlug)}?pagelen=100`;
let iterationCount = 0;
// Limit to 10 iterations, fetching at most 10 * 100 = 1000 repositories
while (nextUrl && iterationCount < 10) {
// eslint-disable-next-line no-await-in-loop
const { data }: { data: BitbucketRepositoriesResponse } = await request.get<BitbucketRepositoriesResponse>(
nextUrl,
{
headers
}
);
allRepos = allRepos.concat(data.values);
nextUrl = data.next;
iterationCount += 1;
}
return allRepos;
};

View File

@@ -1,72 +0,0 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { BitbucketConnectionMethod } from "./bitbucket-connection-enums";
export const BitbucketConnectionAccessTokenCredentialsSchema = z.object({
apiToken: z
.string()
.trim()
.min(1, "API Token required")
.max(255)
.describe(AppConnections.CREDENTIALS.BITBUCKET.apiToken),
email: z
.string()
.email()
.trim()
.min(1, "Email required")
.max(255)
.describe(AppConnections.CREDENTIALS.BITBUCKET.email)
});
const BaseBitbucketConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.Bitbucket) });
export const BitbucketConnectionSchema = BaseBitbucketConnectionSchema.extend({
method: z.literal(BitbucketConnectionMethod.ApiToken),
credentials: BitbucketConnectionAccessTokenCredentialsSchema
});
export const SanitizedBitbucketConnectionSchema = z.discriminatedUnion("method", [
BaseBitbucketConnectionSchema.extend({
method: z.literal(BitbucketConnectionMethod.ApiToken),
credentials: BitbucketConnectionAccessTokenCredentialsSchema.pick({
email: true
})
})
]);
export const ValidateBitbucketConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
.literal(BitbucketConnectionMethod.ApiToken)
.describe(AppConnections.CREATE(AppConnection.Bitbucket).method),
credentials: BitbucketConnectionAccessTokenCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.Bitbucket).credentials
)
})
]);
export const CreateBitbucketConnectionSchema = ValidateBitbucketConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.Bitbucket)
);
export const UpdateBitbucketConnectionSchema = z
.object({
credentials: BitbucketConnectionAccessTokenCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.Bitbucket).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Bitbucket));
export const BitbucketConnectionListItemSchema = z.object({
name: z.literal("Bitbucket"),
app: z.literal(AppConnection.Bitbucket),
methods: z.nativeEnum(BitbucketConnectionMethod).array()
});

View File

@@ -1,33 +0,0 @@
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { listBitbucketRepositories, listBitbucketWorkspaces } from "./bitbucket-connection-fns";
import { TBitbucketConnection, TGetBitbucketRepositoriesDTO } from "./bitbucket-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
connectionId: string,
actor: OrgServiceActor
) => Promise<TBitbucketConnection>;
export const bitbucketConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
const listWorkspaces = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Bitbucket, connectionId, actor);
const workspaces = await listBitbucketWorkspaces(appConnection);
return workspaces;
};
const listRepositories = async (
{ connectionId, workspaceSlug }: TGetBitbucketRepositoriesDTO,
actor: OrgServiceActor
) => {
const appConnection = await getAppConnection(AppConnection.Bitbucket, connectionId, actor);
const repositories = await listBitbucketRepositories(appConnection, workspaceSlug);
return repositories;
};
return {
listWorkspaces,
listRepositories
};
};

View File

@@ -1,40 +0,0 @@
import z from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
BitbucketConnectionSchema,
CreateBitbucketConnectionSchema,
ValidateBitbucketConnectionCredentialsSchema
} from "./bitbucket-connection-schemas";
export type TBitbucketConnection = z.infer<typeof BitbucketConnectionSchema>;
export type TBitbucketConnectionInput = z.infer<typeof CreateBitbucketConnectionSchema> & {
app: AppConnection.Bitbucket;
};
export type TValidateBitbucketConnectionCredentialsSchema = typeof ValidateBitbucketConnectionCredentialsSchema;
export type TBitbucketConnectionConfig = DiscriminativePick<
TBitbucketConnectionInput,
"method" | "app" | "credentials"
> & {
orgId: string;
};
export type TGetBitbucketRepositoriesDTO = {
connectionId: string;
workspaceSlug: string;
};
export type TBitbucketWorkspace = {
slug: string;
};
export type TBitbucketRepo = {
uuid: string;
full_name: string; // workspace-slug/repo-slug
slug: string;
};

View File

@@ -1,4 +0,0 @@
export * from "./bitbucket-connection-enums";
export * from "./bitbucket-connection-fns";
export * from "./bitbucket-connection-schemas";
export * from "./bitbucket-connection-types";

View File

@@ -1,4 +0,0 @@
export * from "./railway-connection-constants";
export * from "./railway-connection-fns";
export * from "./railway-connection-schemas";
export * from "./railway-connection-types";

View File

@@ -1,5 +0,0 @@
export enum RailwayConnectionMethod {
AccountToken = "account-token",
ProjectToken = "project-token",
TeamToken = "team-token"
}

View File

@@ -1,66 +0,0 @@
/* eslint-disable no-await-in-loop */
import { AxiosError } from "axios";
import { BadRequestError } from "@app/lib/errors";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { RailwayConnectionMethod } from "./railway-connection-constants";
import { RailwayPublicAPI } from "./railway-connection-public-client";
import { TRailwayConnection, TRailwayConnectionConfig } from "./railway-connection-types";
export const getRailwayConnectionListItem = () => {
return {
name: "Railway" as const,
app: AppConnection.Railway as const,
methods: Object.values(RailwayConnectionMethod)
};
};
export const validateRailwayConnectionCredentials = async (config: TRailwayConnectionConfig) => {
const { credentials, method } = config;
try {
await RailwayPublicAPI.healthcheck({
method,
credentials
});
} catch (error: unknown) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
});
}
throw new BadRequestError({
message: "Unable to validate connection - verify credentials"
});
}
return credentials;
};
export const listProjects = async (appConnection: TRailwayConnection) => {
const { credentials, method } = appConnection;
try {
return await RailwayPublicAPI.listProjects({
method,
credentials
});
} catch (error: unknown) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to list projects: ${error.message || "Unknown error"}`
});
}
if (error instanceof BadRequestError) {
throw error;
}
throw new BadRequestError({
message: "Unable to list projects",
error
});
}
};

View File

@@ -1,237 +0,0 @@
/* eslint-disable class-methods-use-this */
import { AxiosError, AxiosInstance, AxiosResponse } from "axios";
import { createRequestClient } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { RailwayConnectionMethod } from "./railway-connection-constants";
import {
RailwayAccountWorkspaceListSchema,
RailwayGetProjectsByProjectTokenSchema,
RailwayGetSubscriptionTypeSchema,
RailwayProjectsListSchema
} from "./railway-connection-schemas";
import { RailwayProject, TRailwayConnectionConfig, TRailwayResponse } from "./railway-connection-types";
type RailwaySendReqOptions = Pick<TRailwayConnectionConfig, "credentials" | "method">;
export function getRailwayAuthHeaders(method: RailwayConnectionMethod, token: string): Record<string, string> {
switch (method) {
case RailwayConnectionMethod.AccountToken:
case RailwayConnectionMethod.TeamToken:
return {
Authorization: token
};
case RailwayConnectionMethod.ProjectToken:
return {
"Project-Access-Token": token
};
default:
throw new Error(`Unsupported Railway connection method`);
}
}
export function getRailwayRatelimiter(headers: AxiosResponse["headers"]): {
isRatelimited: boolean;
maxAttempts: number;
wait: () => Promise<void>;
} {
const retryAfter: number | undefined = headers["Retry-After"] as number | undefined;
const requestsLeft = parseInt(headers["X-RateLimit-Remaining"] as string, 10);
const limitResetAt = headers["X-RateLimit-Reset"] as string;
const now = +new Date();
const nextReset = +new Date(limitResetAt);
const remaining = Math.min(0, nextReset - now);
const wait = () => {
return new Promise<void>((res) => {
setTimeout(res, remaining);
});
};
return {
isRatelimited: Boolean(retryAfter || requestsLeft === 0),
wait,
maxAttempts: 3
};
}
class RailwayPublicClient {
private client: AxiosInstance;
constructor() {
this.client = createRequestClient({
method: "POST",
baseURL: IntegrationUrls.RAILWAY_API_URL,
headers: {
"Content-Type": "application/json"
}
});
}
async send<T extends TRailwayResponse>(
query: string,
options: RailwaySendReqOptions,
variables: Record<string, string | Record<string, string>> = {},
retryAttempt: number = 0
): Promise<T["data"] | undefined> {
const body = {
query,
variables
};
const response = await this.client.request<T>({
data: body,
headers: getRailwayAuthHeaders(options.method, options.credentials.apiToken)
});
const { errors } = response.data;
if (Array.isArray(errors) && errors.length > 0) {
throw new AxiosError(errors[0].message);
}
const limiter = getRailwayRatelimiter(response.headers);
if (limiter.isRatelimited && retryAttempt <= limiter.maxAttempts) {
await limiter.wait();
return this.send(query, options, variables, retryAttempt + 1);
}
return response.data.data;
}
healthcheck(config: RailwaySendReqOptions) {
switch (config.method) {
case RailwayConnectionMethod.AccountToken:
return this.send(`{ me { teams { edges { node { id } } } } }`, config);
case RailwayConnectionMethod.ProjectToken:
return this.send(`{ projectToken { projectId environmentId project { id } } }`, config);
case RailwayConnectionMethod.TeamToken:
return this.send(`{ projects { edges { node { id name team { id } } } } }`, config);
default:
throw new Error(`Unsupported Railway connection method`);
}
}
async getSubscriptionType(config: RailwaySendReqOptions & { projectId: string }) {
const res = await this.send(
`query project($projectId: String!) { project(id: $projectId) { subscriptionType }}`,
config,
{
projectId: config.projectId
}
);
const data = await RailwayGetSubscriptionTypeSchema.parseAsync(res);
return data.project.subscriptionType;
}
async listProjects(config: RailwaySendReqOptions): Promise<RailwayProject[]> {
switch (config.method) {
case RailwayConnectionMethod.TeamToken: {
const res = await this.send(
`{ projects { edges { node { id, name, services{ edges{ node { id, name } } } environments { edges { node { name, id } } } } } } }`,
config
);
const data = await RailwayProjectsListSchema.parseAsync(res);
return data.projects.edges.map((p) => ({
id: p.node.id,
name: p.node.name,
environments: p.node.environments.edges.map((e) => e.node),
services: p.node.services.edges.map((s) => s.node)
}));
}
case RailwayConnectionMethod.AccountToken: {
const res = await this.send(
`{ me { workspaces { id, name, team{ projects{ edges{ node{ id, name, services{ edges { node { name, id } } } environments { edges { node { name, id } } } } } } } } } }`,
config
);
const data = await RailwayAccountWorkspaceListSchema.parseAsync(res);
return data.me.workspaces.flatMap((w) =>
w.team.projects.edges.map((p) => ({
id: p.node.id,
name: p.node.name,
environments: p.node.environments.edges.map((e) => e.node),
services: p.node.services.edges.map((s) => s.node)
}))
);
}
case RailwayConnectionMethod.ProjectToken: {
const res = await this.send(
`query { projectToken { project { id, name, services { edges { node { name, id } } } environments { edges { node { name, id } } } } } }`,
config
);
const data = await RailwayGetProjectsByProjectTokenSchema.parseAsync(res);
const p = data.projectToken.project;
return [
{
id: p.id,
name: p.name,
environments: p.environments.edges.map((e) => e.node),
services: p.services.edges.map((s) => s.node)
}
];
}
default:
throw new Error(`Unsupported Railway connection method`);
}
}
async getVariables(
config: RailwaySendReqOptions,
variables: { projectId: string; environmentId: string; serviceId?: string }
) {
const res = await this.send<TRailwayResponse<{ variables: Record<string, string> }>>(
`query variables($environmentId: String!, $projectId: String!, $serviceId: String) { variables( projectId: $projectId, environmentId: $environmentId, serviceId: $serviceId ) }`,
config,
variables
);
if (!res?.variables) {
throw new BadRequestError({
message: "Failed to get railway variables - empty response"
});
}
return res.variables;
}
async deleteVariable(
config: RailwaySendReqOptions,
variables: { input: { projectId: string; environmentId: string; name: string; serviceId?: string } }
) {
await this.send<TRailwayResponse<{ variables: Record<string, string> }>>(
`mutation variableDelete($input: VariableDeleteInput!) { variableDelete(input: $input) }`,
config,
variables
);
}
async upsertVariable(
config: RailwaySendReqOptions,
variables: { input: { projectId: string; environmentId: string; name: string; value: string; serviceId?: string } }
) {
await this.send<TRailwayResponse<{ variables: Record<string, string> }>>(
`mutation variableUpsert($input: VariableUpsertInput!) { variableUpsert(input: $input) }`,
config,
variables
);
}
}
export const RailwayPublicAPI = new RailwayPublicClient();

View File

@@ -1,117 +0,0 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { RailwayConnectionMethod } from "./railway-connection-constants";
export const RailwayConnectionMethodSchema = z
.nativeEnum(RailwayConnectionMethod)
.describe(AppConnections.CREATE(AppConnection.Railway).method);
export const RailwayConnectionAccessTokenCredentialsSchema = z.object({
apiToken: z
.string()
.trim()
.min(1, "API Token required")
.max(255)
.describe(AppConnections.CREDENTIALS.RAILWAY.apiToken)
});
const BaseRailwayConnectionSchema = BaseAppConnectionSchema.extend({
app: z.literal(AppConnection.Railway)
});
export const RailwayConnectionSchema = BaseRailwayConnectionSchema.extend({
method: RailwayConnectionMethodSchema,
credentials: RailwayConnectionAccessTokenCredentialsSchema
});
export const SanitizedRailwayConnectionSchema = z.discriminatedUnion("method", [
BaseRailwayConnectionSchema.extend({
method: RailwayConnectionMethodSchema,
credentials: RailwayConnectionAccessTokenCredentialsSchema.pick({})
})
]);
export const ValidateRailwayConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: RailwayConnectionMethodSchema,
credentials: RailwayConnectionAccessTokenCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.Railway).credentials
)
})
]);
export const CreateRailwayConnectionSchema = ValidateRailwayConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.Railway)
);
export const UpdateRailwayConnectionSchema = z
.object({
credentials: RailwayConnectionAccessTokenCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.Railway).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Railway));
export const RailwayConnectionListItemSchema = z.object({
name: z.literal("Railway"),
app: z.literal(AppConnection.Railway),
methods: z.nativeEnum(RailwayConnectionMethod).array()
});
export const RailwayResourceSchema = z.object({
node: z.object({
id: z.string(),
name: z.string()
})
});
export const RailwayProjectEdgeSchema = z.object({
node: z.object({
id: z.string(),
name: z.string(),
services: z.object({
edges: z.array(RailwayResourceSchema)
}),
environments: z.object({
edges: z.array(RailwayResourceSchema)
})
})
});
export const RailwayProjectsListSchema = z.object({
projects: z.object({
edges: z.array(RailwayProjectEdgeSchema)
})
});
export const RailwayAccountWorkspaceListSchema = z.object({
me: z.object({
workspaces: z.array(
z.object({
id: z.string(),
name: z.string(),
team: RailwayProjectsListSchema
})
)
})
});
export const RailwayGetProjectsByProjectTokenSchema = z.object({
projectToken: z.object({
project: RailwayProjectEdgeSchema.shape.node
})
});
export const RailwayGetSubscriptionTypeSchema = z.object({
project: z.object({
subscriptionType: z.enum(["free", "hobby", "pro", "trial"])
})
});

View File

@@ -1,30 +0,0 @@
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { listProjects as getRailwayProjects } from "./railway-connection-fns";
import { TRailwayConnection } from "./railway-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
connectionId: string,
actor: OrgServiceActor
) => Promise<TRailwayConnection>;
export const railwayConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
const listProjects = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Railway, connectionId, actor);
try {
const projects = await getRailwayProjects(appConnection);
return projects;
} catch (error) {
logger.error(error, "Failed to establish connection with Railway");
return [];
}
};
return {
listProjects
};
};

View File

@@ -1,79 +0,0 @@
import z from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
CreateRailwayConnectionSchema,
RailwayConnectionSchema,
ValidateRailwayConnectionCredentialsSchema
} from "./railway-connection-schemas";
export type TRailwayConnection = z.infer<typeof RailwayConnectionSchema>;
export type TRailwayConnectionInput = z.infer<typeof CreateRailwayConnectionSchema> & {
app: AppConnection.Railway;
};
export type TValidateRailwayConnectionCredentialsSchema = typeof ValidateRailwayConnectionCredentialsSchema;
export type TRailwayConnectionConfig = DiscriminativePick<TRailwayConnection, "method" | "app" | "credentials"> & {
orgId: string;
};
export type TRailwayService = {
id: string;
name: string;
};
export type TRailwayEnvironment = {
id: string;
name: string;
};
export type RailwayProject = {
id: string;
name: string;
services: TRailwayService[];
environments: TRailwayEnvironment[];
};
export type TRailwayResponse<T = unknown> = {
data?: T;
errors?: {
message: string;
}[];
};
export type TAccountProjectListResponse = TRailwayResponse<{
projects: {
edges: TProjectEdge[];
};
}>;
export interface TProjectEdge {
node: {
id: string;
name: string;
services: {
edges: TServiceEdge[];
};
environments: {
edges: TEnvironmentEdge[];
};
};
}
type TServiceEdge = {
node: {
id: string;
name: string;
};
};
type TEnvironmentEdge = {
node: {
id: string;
name: string;
};
};

View File

@@ -93,25 +93,23 @@ export const identityProjectServiceFactory = ({
projectId
);
if (requestedRoleChange !== ProjectMembershipRole.NoAccess) {
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
rolePermission
);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to assign to role",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
rolePermission
);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to assign to role",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
// validate custom roles input

View File

@@ -69,25 +69,23 @@ export const identityServiceFactory = ({
orgId
);
const isCustomRole = Boolean(customRole);
if (role !== OrgMembershipRole.NoAccess) {
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionIdentityActions.GrantPrivileges,
OrgPermissionSubjects.Identity,
permission,
rolePermission
);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to create identity",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionIdentityActions.GrantPrivileges,
OrgPermissionSubjects.Identity
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionIdentityActions.GrantPrivileges,
OrgPermissionSubjects.Identity,
permission,
rolePermission
);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to create identity",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionIdentityActions.GrantPrivileges,
OrgPermissionSubjects.Identity
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const plan = await licenseService.getPlan(orgId);
@@ -189,7 +187,6 @@ export const identityServiceFactory = ({
),
details: { missingPermissions: appliedRolePermissionBoundary.missingPermissions }
});
if (isCustomRole) customRole = customOrgRole;
}

View File

@@ -814,9 +814,9 @@ const getAppsCloudflareWorkers = async ({ accessToken, accountId }: { accessToke
};
/**
* Return list of repositories for the Bitbucket integration based on provided Bitbucket workspace
* Return list of repositories for the BitBucket integration based on provided BitBucket workspace
*/
const getAppsBitbucket = async ({ accessToken, workspaceSlug }: { accessToken: string; workspaceSlug?: string }) => {
const getAppsBitBucket = async ({ accessToken, workspaceSlug }: { accessToken: string; workspaceSlug?: string }) => {
interface RepositoriesResponse {
size: number;
page: number;
@@ -1302,7 +1302,7 @@ export const getApps = async ({
});
case Integrations.BITBUCKET:
return getAppsBitbucket({
return getAppsBitBucket({
accessToken,
workspaceSlug
});

View File

@@ -342,7 +342,7 @@ export const getIntegrationOptions = async () => {
{
name: "Bitbucket",
slug: "bitbucket",
image: "Bitbucket.png",
image: "BitBucket.png",
isAvailable: true,
type: "oauth",
clientId: appCfg.CLIENT_ID_BITBUCKET,

View File

@@ -3921,9 +3921,9 @@ const syncSecretsCloudflareWorkers = async ({
};
/**
* Sync/push [secrets] to Bitbucket repo with name [integration.app]
* Sync/push [secrets] to BitBucket repo with name [integration.app]
*/
const syncSecretsBitbucket = async ({
const syncSecretsBitBucket = async ({
integration,
secrets,
accessToken
@@ -4832,7 +4832,7 @@ export const syncIntegrationSecrets = async ({
});
break;
case Integrations.BITBUCKET:
await syncSecretsBitbucket({
await syncSecretsBitBucket({
integration,
secrets,
accessToken

View File

@@ -64,7 +64,7 @@ type ExchangeCodeGitlabResponse = {
created_at: number;
};
type ExchangeCodeBitbucketResponse = {
type ExchangeCodeBitBucketResponse = {
access_token: string;
token_type: string;
expires_in: number;
@@ -392,10 +392,10 @@ const exchangeCodeGitlab = async ({ code, url }: { code: string; url?: string })
};
/**
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Bitbucket
* Return [accessToken], [accessExpiresAt], and [refreshToken] for BitBucket
* code-token exchange
*/
const exchangeCodeBitbucket = async ({ code }: { code: string }) => {
const exchangeCodeBitBucket = async ({ code }: { code: string }) => {
const accessExpiresAt = new Date();
const appCfg = getConfig();
if (!appCfg.CLIENT_SECRET_BITBUCKET || !appCfg.CLIENT_ID_BITBUCKET) {
@@ -403,7 +403,7 @@ const exchangeCodeBitbucket = async ({ code }: { code: string }) => {
}
const res = (
await request.post<ExchangeCodeBitbucketResponse>(
await request.post<ExchangeCodeBitBucketResponse>(
IntegrationUrls.BITBUCKET_TOKEN_URL,
new URLSearchParams({
grant_type: "authorization_code",
@@ -490,7 +490,7 @@ export const exchangeCode = async ({
url
});
case Integrations.BITBUCKET:
return exchangeCodeBitbucket({
return exchangeCodeBitBucket({
code
});
default:
@@ -524,7 +524,7 @@ type RefreshTokenGitLabResponse = {
created_at: number;
};
type RefreshTokenBitbucketResponse = {
type RefreshTokenBitBucketResponse = {
access_token: string;
token_type: string;
expires_in: number;
@@ -653,9 +653,9 @@ const exchangeRefreshGitLab = async ({ refreshToken, url }: { url?: string | nul
/**
* Return new access token by exchanging refresh token [refreshToken] for the
* Bitbucket integration
* BitBucket integration
*/
const exchangeRefreshBitbucket = async ({ refreshToken }: { refreshToken: string }) => {
const exchangeRefreshBitBucket = async ({ refreshToken }: { refreshToken: string }) => {
const accessExpiresAt = new Date();
const appCfg = getConfig();
if (!appCfg.CLIENT_SECRET_BITBUCKET || !appCfg.CLIENT_ID_BITBUCKET) {
@@ -664,7 +664,7 @@ const exchangeRefreshBitbucket = async ({ refreshToken }: { refreshToken: string
const {
data
}: {
data: RefreshTokenBitbucketResponse;
data: RefreshTokenBitBucketResponse;
} = await request.post(
IntegrationUrls.BITBUCKET_TOKEN_URL,
new URLSearchParams({
@@ -794,7 +794,7 @@ export const exchangeRefresh = async (
url
});
case Integrations.BITBUCKET:
return exchangeRefreshBitbucket({
return exchangeRefreshBitBucket({
refreshToken
});
case Integrations.GCP_SECRET_MANAGER:

View File

@@ -108,22 +108,22 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
const now = new Date();
const oneWeekAgo = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
const oneMonthAgo = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000);
const twelveMonthsAgo = new Date(now.getTime() - 360 * 24 * 60 * 60 * 1000);
const threeMonthsAgo = new Date(now.getTime() - 90 * 24 * 60 * 60 * 1000);
const memberships = await db
.replicaNode()(TableName.OrgMembership)
.where("status", "invited")
.where((qb) => {
// lastInvitedAt is null AND createdAt is between 1 week and 12 months ago
// lastInvitedAt is null AND createdAt is between 1 week and 3 months ago
void qb
.whereNull(`${TableName.OrgMembership}.lastInvitedAt`)
.whereBetween(`${TableName.OrgMembership}.createdAt`, [twelveMonthsAgo, oneWeekAgo]);
.whereBetween(`${TableName.OrgMembership}.createdAt`, [threeMonthsAgo, oneWeekAgo]);
})
.orWhere((qb) => {
// lastInvitedAt is older than 1 week ago AND createdAt is younger than 1 month ago
void qb
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneWeekAgo)
.where(`${TableName.OrgMembership}.createdAt`, ">", oneMonthAgo);
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneMonthAgo)
.where(`${TableName.OrgMembership}.createdAt`, ">", oneWeekAgo);
});
return memberships;
@@ -135,22 +135,9 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
}
};
const updateLastInvitedAtByIds = async (membershipIds: string[]) => {
try {
if (membershipIds.length === 0) return;
await db(TableName.OrgMembership).whereIn("id", membershipIds).update({ lastInvitedAt: new Date() });
} catch (error) {
throw new DatabaseError({
error,
name: "Update last invited at by ids"
});
}
};
return {
...orgMembershipOrm,
findOrgMembershipById,
findRecentInvitedMemberships,
updateLastInvitedAtByIds
findRecentInvitedMemberships
};
};

View File

@@ -36,8 +36,6 @@ import { getConfig } from "@app/lib/config/env";
import { generateAsymmetricKeyPair } from "@app/lib/crypto";
import { generateSymmetricKey, infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { generateUserSrpKeys } from "@app/lib/crypto/srp";
import { applyJitter } from "@app/lib/dates";
import { delay as delayMs } from "@app/lib/delay";
import {
BadRequestError,
ForbiddenRequestError,
@@ -46,10 +44,9 @@ import {
UnauthorizedError
} from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { isDisposableEmail } from "@app/lib/validator";
import { QueueName, TQueueServiceFactory } from "@app/queue";
import { TQueueServiceFactory } from "@app/queue";
import { getDefaultOrgMembershipRoleForUpdateOrg } from "@app/services/org/org-role-fns";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
@@ -112,12 +109,7 @@ type TOrgServiceFactoryDep = {
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "insertMany" | "findLatestProjectKey" | "create">;
orgMembershipDAL: Pick<
TOrgMembershipDALFactory,
| "findOrgMembershipById"
| "findOne"
| "findById"
| "findRecentInvitedMemberships"
| "updateById"
| "updateLastInvitedAtByIds"
"findOrgMembershipById" | "findOne" | "findById" | "findRecentInvitedMemberships" | "updateById"
>;
incidentContactDAL: TIncidentContactsDALFactory;
samlConfigDAL: Pick<TSamlConfigDALFactory, "findOne">;
@@ -771,10 +763,6 @@ export const orgServiceFactory = ({
}
});
await orgMembershipDAL.updateById(inviteeOrgMembership.id, {
lastInvitedAt: new Date()
});
return { signupToken: undefined };
};
@@ -1441,13 +1429,10 @@ export const orgServiceFactory = ({
* Re-send emails to users who haven't accepted an invite yet
*/
const notifyInvitedUsers = async () => {
logger.info(`${QueueName.DailyResourceCleanUp}: notify invited users started`);
const invitedUsers = await orgMembershipDAL.findRecentInvitedMemberships();
const appCfg = getConfig();
const orgCache: Record<string, { name: string; id: string } | undefined> = {};
const notifiedUsers: string[] = [];
await Promise.all(
invitedUsers.map(async (invitedUser) => {
@@ -1466,32 +1451,25 @@ export const orgServiceFactory = ({
});
if (invitedUser.inviteEmail) {
await delayMs(Math.max(0, applyJitter(0, 2000)));
try {
await smtpService.sendMail({
template: SmtpTemplates.OrgInvite,
subjectLine: `Reminder: You have been invited to ${org.name} on Infisical`,
recipients: [invitedUser.inviteEmail],
substitutions: {
organizationName: org.name,
email: invitedUser.inviteEmail,
organizationId: org.id.toString(),
token,
callback_url: `${appCfg.SITE_URL}/signupinvite`
}
});
notifiedUsers.push(invitedUser.id);
} catch (err) {
logger.error(err, `${QueueName.DailyResourceCleanUp}: notify invited users failed to send email`);
}
await smtpService.sendMail({
template: SmtpTemplates.OrgInvite,
subjectLine: `Reminder: You have been invited to ${org.name} on Infisical`,
recipients: [invitedUser.inviteEmail],
substitutions: {
organizationName: org.name,
email: invitedUser.inviteEmail,
organizationId: org.id.toString(),
token,
callback_url: `${appCfg.SITE_URL}/signupinvite`
}
});
}
await orgMembershipDAL.updateById(invitedUser.id, {
lastInvitedAt: new Date()
});
})
);
await orgMembershipDAL.updateLastInvitedAtByIds(notifiedUsers);
logger.info(`${QueueName.DailyResourceCleanUp}: notify invited users completed`);
};
return {

View File

@@ -214,7 +214,7 @@ export const secretFolderServiceFactory = ({
}
},
message: "Folder created",
folderId: parentFolder.id,
folderId: doc.id,
changes: [
{
type: CommitType.ADD,

View File

@@ -1,10 +0,0 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { TSecretSyncListItem } from "@app/services/secret-sync/secret-sync-types";
export const RAILWAY_SYNC_LIST_OPTION: TSecretSyncListItem = {
name: "Railway",
destination: SecretSync.Railway,
connection: AppConnection.Railway,
canImportSecrets: true
};

View File

@@ -1,124 +0,0 @@
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { RailwayPublicAPI } from "@app/services/app-connection/railway/railway-connection-public-client";
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
import { SecretSyncError } from "../secret-sync-errors";
import { TSecretMap } from "../secret-sync-types";
import { TRailwaySyncWithCredentials } from "./railway-sync-types";
export const RailwaySyncFns = {
async getSecrets(secretSync: TRailwaySyncWithCredentials): Promise<TSecretMap> {
try {
const config = secretSync.destinationConfig;
const variables = await RailwayPublicAPI.getVariables(secretSync.connection, {
projectId: config.projectId,
environmentId: config.environmentId,
serviceId: config.serviceId || undefined
});
const entries = {} as TSecretMap;
for (const [key, value] of Object.entries(variables)) {
// Skip importing private railway variables
// eslint-disable-next-line no-continue
if (key.startsWith("RAILWAY_")) continue;
entries[key] = {
value
};
}
return entries;
} catch (error) {
throw new SecretSyncError({
error,
message: "Failed to import secrets from Railway"
});
}
},
async syncSecrets(secretSync: TRailwaySyncWithCredentials, secretMap: TSecretMap) {
const {
environment,
syncOptions: { disableSecretDeletion, keySchema }
} = secretSync;
const railwaySecrets = await this.getSecrets(secretSync);
const config = secretSync.destinationConfig;
for await (const key of Object.keys(secretMap)) {
try {
const existing = railwaySecrets[key];
if (existing === undefined || existing.value !== secretMap[key].value) {
await RailwayPublicAPI.upsertVariable(secretSync.connection, {
input: {
projectId: config.projectId,
environmentId: config.environmentId,
serviceId: config.serviceId || undefined,
name: key,
value: secretMap[key].value ?? ""
}
});
}
} catch (error) {
throw new SecretSyncError({
error,
secretKey: key
});
}
}
if (disableSecretDeletion) return;
for await (const key of Object.keys(railwaySecrets)) {
try {
// eslint-disable-next-line no-continue
if (!matchesSchema(key, environment?.slug || "", keySchema)) continue;
if (!secretMap[key]) {
await RailwayPublicAPI.deleteVariable(secretSync.connection, {
input: {
projectId: config.projectId,
environmentId: config.environmentId,
serviceId: config.serviceId || undefined,
name: key
}
});
}
} catch (error) {
throw new SecretSyncError({
error,
secretKey: key
});
}
}
},
async removeSecrets(secretSync: TRailwaySyncWithCredentials, secretMap: TSecretMap) {
const existing = await this.getSecrets(secretSync);
const config = secretSync.destinationConfig;
for await (const secret of Object.keys(existing)) {
try {
if (secret in secretMap) {
await RailwayPublicAPI.deleteVariable(secretSync.connection, {
input: {
projectId: config.projectId,
environmentId: config.environmentId,
serviceId: config.serviceId || undefined,
name: secret
}
});
}
} catch (error) {
throw new SecretSyncError({
error,
secretKey: secret
});
}
}
}
};

Some files were not shown because too many files have changed in this diff Show More