mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-11 12:11:38 +00:00
Compare commits
97 Commits
infisical/
...
server-adm
Author | SHA1 | Date | |
---|---|---|---|
7690d5852b | |||
b163c74a05 | |||
46a4c6b119 | |||
b03e9b70a2 | |||
f6e1808187 | |||
648cb20eb7 | |||
fedffea8d5 | |||
8917629b96 | |||
7de45ad220 | |||
5eb52edc52 | |||
d3d1fb7190 | |||
6531e5b942 | |||
4164b2f32a | |||
0ec56c9928 | |||
35520cfe99 | |||
ba0f6e60e2 | |||
579c68b2a3 | |||
f4ea3e1c75 | |||
7d37ea318f | |||
5cb7ecc354 | |||
5e85de3937 | |||
8719e3e75e | |||
69ece1f3e3 | |||
d5cd6f79f9 | |||
19c0731166 | |||
f636cc678b | |||
ff8ad14e1b | |||
d683d3adb3 | |||
d9b8cd1204 | |||
27b5e2aa68 | |||
692121445d | |||
d2098fda5f | |||
09d72d6da1 | |||
e33a3c281c | |||
a614b81a7a | |||
9a940dce64 | |||
7e523546b3 | |||
814d6e2709 | |||
c0b296ccd5 | |||
da82cfdf6b | |||
92147b5398 | |||
526e184bd9 | |||
9943312063 | |||
c2cefb2b0c | |||
7571c9b426 | |||
bf707667b5 | |||
d2e6743f22 | |||
9e896563ed | |||
64744d042d | |||
2648ac1c90 | |||
22ae1aeee4 | |||
cd13733621 | |||
0191eb48f3 | |||
9d39910152 | |||
c5a8786d1c | |||
9137fa4ca5 | |||
84687c0558 | |||
78da7ec343 | |||
a678ebb4ac | |||
83dd38db49 | |||
a0e8496256 | |||
00d4ae9fbd | |||
218338e5d2 | |||
456107fbf3 | |||
2003f5b671 | |||
d2c6bcc7a7 | |||
06bd593b60 | |||
aea43c0a8e | |||
06f5af1200 | |||
f903e5b3d4 | |||
c6f8915d3f | |||
65b1354ef1 | |||
cda8579ca4 | |||
5badb811e1 | |||
7f8b489724 | |||
8723a16913 | |||
b4593a2e11 | |||
1b1acdcb0b | |||
1bbf78e295 | |||
a8f08730a1 | |||
3b767a4deb | |||
18f5f5d04e | |||
6a6f08fc4d | |||
42648a134c | |||
8c6b903204 | |||
23b20ebdab | |||
37d490ede3 | |||
edecfb1f62 | |||
ae35a863bc | |||
62ad82f7b1 | |||
7ab67db84d | |||
3a17281e37 | |||
abfe185a5b | |||
ba57899a56 | |||
13d2cbd8b0 | |||
abfc5736fd | |||
aef3a7436f |
@ -23,7 +23,7 @@ REDIS_URL=redis://redis:6379
|
|||||||
# Required
|
# Required
|
||||||
SITE_URL=http://localhost:8080
|
SITE_URL=http://localhost:8080
|
||||||
|
|
||||||
# Mail/SMTP
|
# Mail/SMTP
|
||||||
SMTP_HOST=
|
SMTP_HOST=
|
||||||
SMTP_PORT=
|
SMTP_PORT=
|
||||||
SMTP_FROM_ADDRESS=
|
SMTP_FROM_ADDRESS=
|
||||||
@ -132,3 +132,6 @@ DATADOG_PROFILING_ENABLED=
|
|||||||
DATADOG_ENV=
|
DATADOG_ENV=
|
||||||
DATADOG_SERVICE=
|
DATADOG_SERVICE=
|
||||||
DATADOG_HOSTNAME=
|
DATADOG_HOSTNAME=
|
||||||
|
|
||||||
|
# kubernetes
|
||||||
|
KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN=false
|
||||||
|
@ -19,7 +19,7 @@ WORKDIR /app
|
|||||||
|
|
||||||
# Copy dependencies
|
# Copy dependencies
|
||||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||||
# Copy all files
|
# Copy all files
|
||||||
COPY /frontend .
|
COPY /frontend .
|
||||||
|
|
||||||
ENV NODE_ENV production
|
ENV NODE_ENV production
|
||||||
@ -32,7 +32,7 @@ ENV VITE_INTERCOM_ID $INTERCOM_ID
|
|||||||
ARG INFISICAL_PLATFORM_VERSION
|
ARG INFISICAL_PLATFORM_VERSION
|
||||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||||
ARG CAPTCHA_SITE_KEY
|
ARG CAPTCHA_SITE_KEY
|
||||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||||
|
|
||||||
# Build
|
# Build
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
@ -134,7 +134,7 @@ RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-li
|
|||||||
|
|
||||||
# Install Infisical CLI
|
# Install Infisical CLI
|
||||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
|
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
|
||||||
&& apt-get update && apt-get install -y infisical=0.41.2 \
|
&& apt-get update && apt-get install -y infisical=0.41.89 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||||
@ -155,7 +155,7 @@ ENV INTERCOM_ID=$INTERCOM_ID
|
|||||||
ARG CAPTCHA_SITE_KEY
|
ARG CAPTCHA_SITE_KEY
|
||||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
|
|
||||||
COPY --from=backend-runner /app /backend
|
COPY --from=backend-runner /app /backend
|
||||||
|
|
||||||
@ -166,9 +166,9 @@ ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
|||||||
|
|
||||||
ENV PORT 8080
|
ENV PORT 8080
|
||||||
ENV HOST=0.0.0.0
|
ENV HOST=0.0.0.0
|
||||||
ENV HTTPS_ENABLED false
|
ENV HTTPS_ENABLED false
|
||||||
ENV NODE_ENV production
|
ENV NODE_ENV production
|
||||||
ENV STANDALONE_BUILD true
|
ENV STANDALONE_BUILD true
|
||||||
ENV STANDALONE_MODE true
|
ENV STANDALONE_MODE true
|
||||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||||
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
||||||
|
@ -20,7 +20,7 @@ WORKDIR /app
|
|||||||
|
|
||||||
# Copy dependencies
|
# Copy dependencies
|
||||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||||
# Copy all files
|
# Copy all files
|
||||||
COPY /frontend .
|
COPY /frontend .
|
||||||
|
|
||||||
ENV NODE_ENV production
|
ENV NODE_ENV production
|
||||||
@ -33,7 +33,8 @@ ENV VITE_INTERCOM_ID $INTERCOM_ID
|
|||||||
ARG INFISICAL_PLATFORM_VERSION
|
ARG INFISICAL_PLATFORM_VERSION
|
||||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||||
ARG CAPTCHA_SITE_KEY
|
ARG CAPTCHA_SITE_KEY
|
||||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||||
|
ENV NODE_OPTIONS="--max-old-space-size=8192"
|
||||||
|
|
||||||
# Build
|
# Build
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
@ -77,6 +78,7 @@ RUN npm ci --only-production
|
|||||||
COPY /backend .
|
COPY /backend .
|
||||||
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
||||||
RUN npm i -D tsconfig-paths
|
RUN npm i -D tsconfig-paths
|
||||||
|
ENV NODE_OPTIONS="--max-old-space-size=8192"
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Production stage
|
# Production stage
|
||||||
@ -128,7 +130,7 @@ RUN apt-get update && apt-get install -y \
|
|||||||
|
|
||||||
# Install Infisical CLI
|
# Install Infisical CLI
|
||||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
|
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
|
||||||
&& apt-get update && apt-get install -y infisical=0.41.2 \
|
&& apt-get update && apt-get install -y infisical=0.41.89 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
@ -164,9 +166,9 @@ ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
|||||||
|
|
||||||
ENV PORT 8080
|
ENV PORT 8080
|
||||||
ENV HOST=0.0.0.0
|
ENV HOST=0.0.0.0
|
||||||
ENV HTTPS_ENABLED false
|
ENV HTTPS_ENABLED false
|
||||||
ENV NODE_ENV production
|
ENV NODE_ENV production
|
||||||
ENV STANDALONE_BUILD true
|
ENV STANDALONE_BUILD true
|
||||||
ENV STANDALONE_MODE true
|
ENV STANDALONE_MODE true
|
||||||
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ RUN apt-get update && apt-get install -y \
|
|||||||
make \
|
make \
|
||||||
g++ \
|
g++ \
|
||||||
openssh-client \
|
openssh-client \
|
||||||
openssl
|
openssl
|
||||||
|
|
||||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||||
RUN apt-get install -y \
|
RUN apt-get install -y \
|
||||||
@ -55,10 +55,10 @@ COPY --from=build /app .
|
|||||||
# Install Infisical CLI
|
# Install Infisical CLI
|
||||||
RUN apt-get install -y curl bash && \
|
RUN apt-get install -y curl bash && \
|
||||||
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
||||||
apt-get update && apt-get install -y infisical=0.41.2 git
|
apt-get update && apt-get install -y infisical=0.41.89 git
|
||||||
|
|
||||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||||
CMD node healthcheck.js
|
CMD node healthcheck.js
|
||||||
|
|
||||||
ENV HOST=0.0.0.0
|
ENV HOST=0.0.0.0
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ RUN mkdir -p /etc/softhsm2/tokens && \
|
|||||||
# Install Infisical CLI
|
# Install Infisical CLI
|
||||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
apt-get install -y infisical=0.41.2
|
apt-get install -y infisical=0.41.89
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
@ -52,7 +52,7 @@ RUN apt-get install -y opensc
|
|||||||
|
|
||||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||||
|
|
||||||
WORKDIR /openssl-build
|
WORKDIR /openssl-build
|
||||||
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
|
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
|
||||||
&& tar -xf openssl-3.1.2.tar.gz \
|
&& tar -xf openssl-3.1.2.tar.gz \
|
||||||
@ -66,7 +66,7 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
|
|||||||
# Install Infisical CLI
|
# Install Infisical CLI
|
||||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
apt-get install -y infisical=0.41.2
|
apt-get install -y infisical=0.41.89
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
@ -4,6 +4,7 @@ import "ts-node/register";
|
|||||||
import dotenv from "dotenv";
|
import dotenv from "dotenv";
|
||||||
import type { Knex } from "knex";
|
import type { Knex } from "knex";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
import { initLogger } from "@app/lib/logger";
|
||||||
|
|
||||||
// Update with your config settings. .
|
// Update with your config settings. .
|
||||||
dotenv.config({
|
dotenv.config({
|
||||||
@ -13,6 +14,8 @@ dotenv.config({
|
|||||||
path: path.join(__dirname, "../../../.env")
|
path: path.join(__dirname, "../../../.env")
|
||||||
});
|
});
|
||||||
|
|
||||||
|
initLogger();
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
development: {
|
development: {
|
||||||
client: "postgres",
|
client: "postgres",
|
||||||
|
@ -0,0 +1,21 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
|
||||||
|
if (hasColumn) {
|
||||||
|
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||||
|
t.datetime("lastInvitedAt").nullable().defaultTo(knex.fn.now()).alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
|
||||||
|
if (hasColumn) {
|
||||||
|
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||||
|
t.datetime("lastInvitedAt").nullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,46 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
const MIGRATION_TIMEOUT = 30 * 60 * 1000; // 30 minutes
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const result = await knex.raw("SHOW statement_timeout");
|
||||||
|
const originalTimeout = result.rows[0].statement_timeout;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await knex.raw(`SET statement_timeout = ${MIGRATION_TIMEOUT}`);
|
||||||
|
|
||||||
|
// iat means IdentityAccessToken
|
||||||
|
await knex.raw(`
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_iat_identity_id
|
||||||
|
ON ${TableName.IdentityAccessToken} ("identityId")
|
||||||
|
`);
|
||||||
|
|
||||||
|
await knex.raw(`
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_iat_ua_client_secret_id
|
||||||
|
ON ${TableName.IdentityAccessToken} ("identityUAClientSecretId")
|
||||||
|
`);
|
||||||
|
} finally {
|
||||||
|
await knex.raw(`SET statement_timeout = '${originalTimeout}'`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const result = await knex.raw("SHOW statement_timeout");
|
||||||
|
const originalTimeout = result.rows[0].statement_timeout;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await knex.raw(`SET statement_timeout = ${MIGRATION_TIMEOUT}`);
|
||||||
|
|
||||||
|
await knex.raw(`
|
||||||
|
DROP INDEX IF EXISTS idx_iat_identity_id
|
||||||
|
`);
|
||||||
|
|
||||||
|
await knex.raw(`
|
||||||
|
DROP INDEX IF EXISTS idx_iat_ua_client_secret_id
|
||||||
|
`);
|
||||||
|
} finally {
|
||||||
|
await knex.raw(`SET statement_timeout = '${originalTimeout}'`);
|
||||||
|
}
|
||||||
|
}
|
@ -17,6 +17,7 @@ import { z } from "zod";
|
|||||||
import { LdapGroupMapsSchema } from "@app/db/schemas";
|
import { LdapGroupMapsSchema } from "@app/db/schemas";
|
||||||
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
|
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
|
||||||
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
|
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
|
||||||
|
import { ApiDocsTags, LdapSso } from "@app/lib/api-docs";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
@ -132,10 +133,18 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
|||||||
config: {
|
config: {
|
||||||
rateLimit: readLimit
|
rateLimit: readLimit
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT]),
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
schema: {
|
schema: {
|
||||||
|
hide: false,
|
||||||
|
tags: [ApiDocsTags.LdapSso],
|
||||||
|
description: "Get LDAP config",
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
querystring: z.object({
|
querystring: z.object({
|
||||||
organizationId: z.string().trim()
|
organizationId: z.string().trim().describe(LdapSso.GET_CONFIG.organizationId)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
@ -172,23 +181,32 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
|||||||
config: {
|
config: {
|
||||||
rateLimit: writeLimit
|
rateLimit: writeLimit
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT]),
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
schema: {
|
schema: {
|
||||||
|
hide: false,
|
||||||
|
tags: [ApiDocsTags.LdapSso],
|
||||||
|
description: "Create LDAP config",
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
body: z.object({
|
body: z.object({
|
||||||
organizationId: z.string().trim(),
|
organizationId: z.string().trim().describe(LdapSso.CREATE_CONFIG.organizationId),
|
||||||
isActive: z.boolean(),
|
isActive: z.boolean().describe(LdapSso.CREATE_CONFIG.isActive),
|
||||||
url: z.string().trim(),
|
url: z.string().trim().describe(LdapSso.CREATE_CONFIG.url),
|
||||||
bindDN: z.string().trim(),
|
bindDN: z.string().trim().describe(LdapSso.CREATE_CONFIG.bindDN),
|
||||||
bindPass: z.string().trim(),
|
bindPass: z.string().trim().describe(LdapSso.CREATE_CONFIG.bindPass),
|
||||||
uniqueUserAttribute: z.string().trim().default("uidNumber"),
|
uniqueUserAttribute: z.string().trim().default("uidNumber").describe(LdapSso.CREATE_CONFIG.uniqueUserAttribute),
|
||||||
searchBase: z.string().trim(),
|
searchBase: z.string().trim().describe(LdapSso.CREATE_CONFIG.searchBase),
|
||||||
searchFilter: z.string().trim().default("(uid={{username}})"),
|
searchFilter: z.string().trim().default("(uid={{username}})").describe(LdapSso.CREATE_CONFIG.searchFilter),
|
||||||
groupSearchBase: z.string().trim(),
|
groupSearchBase: z.string().trim().describe(LdapSso.CREATE_CONFIG.groupSearchBase),
|
||||||
groupSearchFilter: z
|
groupSearchFilter: z
|
||||||
.string()
|
.string()
|
||||||
.trim()
|
.trim()
|
||||||
.default("(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))"),
|
.default("(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))")
|
||||||
caCert: z.string().trim().default("")
|
.describe(LdapSso.CREATE_CONFIG.groupSearchFilter),
|
||||||
|
caCert: z.string().trim().default("").describe(LdapSso.CREATE_CONFIG.caCert)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: SanitizedLdapConfigSchema
|
200: SanitizedLdapConfigSchema
|
||||||
@ -214,23 +232,31 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
|||||||
config: {
|
config: {
|
||||||
rateLimit: writeLimit
|
rateLimit: writeLimit
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT]),
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
schema: {
|
schema: {
|
||||||
|
hide: false,
|
||||||
|
tags: [ApiDocsTags.LdapSso],
|
||||||
|
description: "Update LDAP config",
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
body: z
|
body: z
|
||||||
.object({
|
.object({
|
||||||
isActive: z.boolean(),
|
isActive: z.boolean().describe(LdapSso.UPDATE_CONFIG.isActive),
|
||||||
url: z.string().trim(),
|
url: z.string().trim().describe(LdapSso.UPDATE_CONFIG.url),
|
||||||
bindDN: z.string().trim(),
|
bindDN: z.string().trim().describe(LdapSso.UPDATE_CONFIG.bindDN),
|
||||||
bindPass: z.string().trim(),
|
bindPass: z.string().trim().describe(LdapSso.UPDATE_CONFIG.bindPass),
|
||||||
uniqueUserAttribute: z.string().trim(),
|
uniqueUserAttribute: z.string().trim().describe(LdapSso.UPDATE_CONFIG.uniqueUserAttribute),
|
||||||
searchBase: z.string().trim(),
|
searchBase: z.string().trim().describe(LdapSso.UPDATE_CONFIG.searchBase),
|
||||||
searchFilter: z.string().trim(),
|
searchFilter: z.string().trim().describe(LdapSso.UPDATE_CONFIG.searchFilter),
|
||||||
groupSearchBase: z.string().trim(),
|
groupSearchBase: z.string().trim().describe(LdapSso.UPDATE_CONFIG.groupSearchBase),
|
||||||
groupSearchFilter: z.string().trim(),
|
groupSearchFilter: z.string().trim().describe(LdapSso.UPDATE_CONFIG.groupSearchFilter),
|
||||||
caCert: z.string().trim()
|
caCert: z.string().trim().describe(LdapSso.UPDATE_CONFIG.caCert)
|
||||||
})
|
})
|
||||||
.partial()
|
.partial()
|
||||||
.merge(z.object({ organizationId: z.string() })),
|
.merge(z.object({ organizationId: z.string().trim().describe(LdapSso.UPDATE_CONFIG.organizationId) })),
|
||||||
response: {
|
response: {
|
||||||
200: SanitizedLdapConfigSchema
|
200: SanitizedLdapConfigSchema
|
||||||
}
|
}
|
||||||
|
@ -13,6 +13,7 @@ import { z } from "zod";
|
|||||||
|
|
||||||
import { OidcConfigsSchema } from "@app/db/schemas";
|
import { OidcConfigsSchema } from "@app/db/schemas";
|
||||||
import { OIDCConfigurationType, OIDCJWTSignatureAlgorithm } from "@app/ee/services/oidc/oidc-config-types";
|
import { OIDCConfigurationType, OIDCJWTSignatureAlgorithm } from "@app/ee/services/oidc/oidc-config-types";
|
||||||
|
import { ApiDocsTags, OidcSSo } from "@app/lib/api-docs";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
@ -153,10 +154,18 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
config: {
|
config: {
|
||||||
rateLimit: readLimit
|
rateLimit: readLimit
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT]),
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
schema: {
|
schema: {
|
||||||
|
hide: false,
|
||||||
|
tags: [ApiDocsTags.OidcSso],
|
||||||
|
description: "Get OIDC config",
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
querystring: z.object({
|
querystring: z.object({
|
||||||
orgSlug: z.string().trim()
|
organizationId: z.string().trim().describe(OidcSSo.GET_CONFIG.organizationId)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: SanitizedOidcConfigSchema.pick({
|
200: SanitizedOidcConfigSchema.pick({
|
||||||
@ -180,9 +189,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
const { orgSlug } = req.query;
|
|
||||||
const oidc = await server.services.oidc.getOidc({
|
const oidc = await server.services.oidc.getOidc({
|
||||||
orgSlug,
|
organizationId: req.query.organizationId,
|
||||||
type: "external",
|
type: "external",
|
||||||
actor: req.permission.type,
|
actor: req.permission.type,
|
||||||
actorId: req.permission.id,
|
actorId: req.permission.id,
|
||||||
@ -200,8 +208,16 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
config: {
|
config: {
|
||||||
rateLimit: writeLimit
|
rateLimit: writeLimit
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT]),
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
schema: {
|
schema: {
|
||||||
|
hide: false,
|
||||||
|
tags: [ApiDocsTags.OidcSso],
|
||||||
|
description: "Update OIDC config",
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
body: z
|
body: z
|
||||||
.object({
|
.object({
|
||||||
allowedEmailDomains: z
|
allowedEmailDomains: z
|
||||||
@ -216,22 +232,26 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
.split(",")
|
.split(",")
|
||||||
.map((id) => id.trim())
|
.map((id) => id.trim())
|
||||||
.join(", ");
|
.join(", ");
|
||||||
}),
|
})
|
||||||
discoveryURL: z.string().trim(),
|
.describe(OidcSSo.UPDATE_CONFIG.allowedEmailDomains),
|
||||||
configurationType: z.nativeEnum(OIDCConfigurationType),
|
discoveryURL: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.discoveryURL),
|
||||||
issuer: z.string().trim(),
|
configurationType: z.nativeEnum(OIDCConfigurationType).describe(OidcSSo.UPDATE_CONFIG.configurationType),
|
||||||
authorizationEndpoint: z.string().trim(),
|
issuer: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.issuer),
|
||||||
jwksUri: z.string().trim(),
|
authorizationEndpoint: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.authorizationEndpoint),
|
||||||
tokenEndpoint: z.string().trim(),
|
jwksUri: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.jwksUri),
|
||||||
userinfoEndpoint: z.string().trim(),
|
tokenEndpoint: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.tokenEndpoint),
|
||||||
clientId: z.string().trim(),
|
userinfoEndpoint: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.userinfoEndpoint),
|
||||||
clientSecret: z.string().trim(),
|
clientId: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.clientId),
|
||||||
isActive: z.boolean(),
|
clientSecret: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.clientSecret),
|
||||||
manageGroupMemberships: z.boolean().optional(),
|
isActive: z.boolean().describe(OidcSSo.UPDATE_CONFIG.isActive),
|
||||||
jwtSignatureAlgorithm: z.nativeEnum(OIDCJWTSignatureAlgorithm).optional()
|
manageGroupMemberships: z.boolean().optional().describe(OidcSSo.UPDATE_CONFIG.manageGroupMemberships),
|
||||||
|
jwtSignatureAlgorithm: z
|
||||||
|
.nativeEnum(OIDCJWTSignatureAlgorithm)
|
||||||
|
.optional()
|
||||||
|
.describe(OidcSSo.UPDATE_CONFIG.jwtSignatureAlgorithm)
|
||||||
})
|
})
|
||||||
.partial()
|
.partial()
|
||||||
.merge(z.object({ orgSlug: z.string() })),
|
.merge(z.object({ organizationId: z.string().describe(OidcSSo.UPDATE_CONFIG.organizationId) })),
|
||||||
response: {
|
response: {
|
||||||
200: SanitizedOidcConfigSchema.pick({
|
200: SanitizedOidcConfigSchema.pick({
|
||||||
id: true,
|
id: true,
|
||||||
@ -267,8 +287,16 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
config: {
|
config: {
|
||||||
rateLimit: writeLimit
|
rateLimit: writeLimit
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT]),
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
schema: {
|
schema: {
|
||||||
|
hide: false,
|
||||||
|
tags: [ApiDocsTags.OidcSso],
|
||||||
|
description: "Create OIDC config",
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
body: z
|
body: z
|
||||||
.object({
|
.object({
|
||||||
allowedEmailDomains: z
|
allowedEmailDomains: z
|
||||||
@ -283,23 +311,34 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
.split(",")
|
.split(",")
|
||||||
.map((id) => id.trim())
|
.map((id) => id.trim())
|
||||||
.join(", ");
|
.join(", ");
|
||||||
}),
|
})
|
||||||
configurationType: z.nativeEnum(OIDCConfigurationType),
|
.describe(OidcSSo.CREATE_CONFIG.allowedEmailDomains),
|
||||||
issuer: z.string().trim().optional().default(""),
|
configurationType: z.nativeEnum(OIDCConfigurationType).describe(OidcSSo.CREATE_CONFIG.configurationType),
|
||||||
discoveryURL: z.string().trim().optional().default(""),
|
issuer: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.issuer),
|
||||||
authorizationEndpoint: z.string().trim().optional().default(""),
|
discoveryURL: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.discoveryURL),
|
||||||
jwksUri: z.string().trim().optional().default(""),
|
authorizationEndpoint: z
|
||||||
tokenEndpoint: z.string().trim().optional().default(""),
|
.string()
|
||||||
userinfoEndpoint: z.string().trim().optional().default(""),
|
.trim()
|
||||||
clientId: z.string().trim(),
|
.optional()
|
||||||
clientSecret: z.string().trim(),
|
.default("")
|
||||||
isActive: z.boolean(),
|
.describe(OidcSSo.CREATE_CONFIG.authorizationEndpoint),
|
||||||
orgSlug: z.string().trim(),
|
jwksUri: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.jwksUri),
|
||||||
manageGroupMemberships: z.boolean().optional().default(false),
|
tokenEndpoint: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.tokenEndpoint),
|
||||||
|
userinfoEndpoint: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.userinfoEndpoint),
|
||||||
|
clientId: z.string().trim().describe(OidcSSo.CREATE_CONFIG.clientId),
|
||||||
|
clientSecret: z.string().trim().describe(OidcSSo.CREATE_CONFIG.clientSecret),
|
||||||
|
isActive: z.boolean().describe(OidcSSo.CREATE_CONFIG.isActive),
|
||||||
|
organizationId: z.string().trim().describe(OidcSSo.CREATE_CONFIG.organizationId),
|
||||||
|
manageGroupMemberships: z
|
||||||
|
.boolean()
|
||||||
|
.optional()
|
||||||
|
.default(false)
|
||||||
|
.describe(OidcSSo.CREATE_CONFIG.manageGroupMemberships),
|
||||||
jwtSignatureAlgorithm: z
|
jwtSignatureAlgorithm: z
|
||||||
.nativeEnum(OIDCJWTSignatureAlgorithm)
|
.nativeEnum(OIDCJWTSignatureAlgorithm)
|
||||||
.optional()
|
.optional()
|
||||||
.default(OIDCJWTSignatureAlgorithm.RS256)
|
.default(OIDCJWTSignatureAlgorithm.RS256)
|
||||||
|
.describe(OidcSSo.CREATE_CONFIG.jwtSignatureAlgorithm)
|
||||||
})
|
})
|
||||||
.superRefine((data, ctx) => {
|
.superRefine((data, ctx) => {
|
||||||
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
|
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
|
||||||
|
@ -13,6 +13,7 @@ import { FastifyRequest } from "fastify";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
|
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
|
||||||
|
import { ApiDocsTags, SamlSso } from "@app/lib/api-docs";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
@ -149,8 +150,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
|||||||
firstName,
|
firstName,
|
||||||
lastName: lastName as string,
|
lastName: lastName as string,
|
||||||
relayState: (req.body as { RelayState?: string }).RelayState,
|
relayState: (req.body as { RelayState?: string }).RelayState,
|
||||||
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider as string,
|
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider,
|
||||||
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string,
|
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId,
|
||||||
metadata: userMetadata
|
metadata: userMetadata
|
||||||
});
|
});
|
||||||
cb(null, { isUserCompleted, providerAuthToken });
|
cb(null, { isUserCompleted, providerAuthToken });
|
||||||
@ -262,25 +263,31 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
|||||||
config: {
|
config: {
|
||||||
rateLimit: readLimit
|
rateLimit: readLimit
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT]),
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
schema: {
|
schema: {
|
||||||
|
hide: false,
|
||||||
|
tags: [ApiDocsTags.SamlSso],
|
||||||
|
description: "Get SAML config",
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
querystring: z.object({
|
querystring: z.object({
|
||||||
organizationId: z.string().trim()
|
organizationId: z.string().trim().describe(SamlSso.GET_CONFIG.organizationId)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z
|
200: z.object({
|
||||||
.object({
|
id: z.string(),
|
||||||
id: z.string(),
|
organization: z.string(),
|
||||||
organization: z.string(),
|
orgId: z.string(),
|
||||||
orgId: z.string(),
|
authProvider: z.string(),
|
||||||
authProvider: z.string(),
|
isActive: z.boolean(),
|
||||||
isActive: z.boolean(),
|
entryPoint: z.string(),
|
||||||
entryPoint: z.string(),
|
issuer: z.string(),
|
||||||
issuer: z.string(),
|
cert: z.string(),
|
||||||
cert: z.string(),
|
lastUsed: z.date().nullable().optional()
|
||||||
lastUsed: z.date().nullable().optional()
|
})
|
||||||
})
|
|
||||||
.optional()
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
@ -302,15 +309,23 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
|||||||
config: {
|
config: {
|
||||||
rateLimit: writeLimit
|
rateLimit: writeLimit
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT]),
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
schema: {
|
schema: {
|
||||||
|
hide: false,
|
||||||
|
tags: [ApiDocsTags.SamlSso],
|
||||||
|
description: "Create SAML config",
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
body: z.object({
|
body: z.object({
|
||||||
organizationId: z.string(),
|
organizationId: z.string().trim().describe(SamlSso.CREATE_CONFIG.organizationId),
|
||||||
authProvider: z.nativeEnum(SamlProviders),
|
authProvider: z.nativeEnum(SamlProviders).describe(SamlSso.CREATE_CONFIG.authProvider),
|
||||||
isActive: z.boolean(),
|
isActive: z.boolean().describe(SamlSso.CREATE_CONFIG.isActive),
|
||||||
entryPoint: z.string(),
|
entryPoint: z.string().trim().describe(SamlSso.CREATE_CONFIG.entryPoint),
|
||||||
issuer: z.string(),
|
issuer: z.string().trim().describe(SamlSso.CREATE_CONFIG.issuer),
|
||||||
cert: z.string()
|
cert: z.string().trim().describe(SamlSso.CREATE_CONFIG.cert)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: SanitizedSamlConfigSchema
|
200: SanitizedSamlConfigSchema
|
||||||
@ -341,18 +356,26 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
|||||||
config: {
|
config: {
|
||||||
rateLimit: writeLimit
|
rateLimit: writeLimit
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT]),
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
schema: {
|
schema: {
|
||||||
|
hide: false,
|
||||||
|
tags: [ApiDocsTags.SamlSso],
|
||||||
|
description: "Update SAML config",
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
body: z
|
body: z
|
||||||
.object({
|
.object({
|
||||||
authProvider: z.nativeEnum(SamlProviders),
|
authProvider: z.nativeEnum(SamlProviders).describe(SamlSso.UPDATE_CONFIG.authProvider),
|
||||||
isActive: z.boolean(),
|
isActive: z.boolean().describe(SamlSso.UPDATE_CONFIG.isActive),
|
||||||
entryPoint: z.string(),
|
entryPoint: z.string().trim().describe(SamlSso.UPDATE_CONFIG.entryPoint),
|
||||||
issuer: z.string(),
|
issuer: z.string().trim().describe(SamlSso.UPDATE_CONFIG.issuer),
|
||||||
cert: z.string()
|
cert: z.string().trim().describe(SamlSso.UPDATE_CONFIG.cert)
|
||||||
})
|
})
|
||||||
.partial()
|
.partial()
|
||||||
.merge(z.object({ organizationId: z.string() })),
|
.merge(z.object({ organizationId: z.string().trim().describe(SamlSso.UPDATE_CONFIG.organizationId) })),
|
||||||
response: {
|
response: {
|
||||||
200: SanitizedSamlConfigSchema
|
200: SanitizedSamlConfigSchema
|
||||||
}
|
}
|
||||||
|
@ -141,14 +141,39 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
|||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT]),
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
const { approval } = await server.services.secretApprovalRequest.mergeSecretApprovalRequest({
|
const { approval, projectId, secretMutationEvents } =
|
||||||
actorId: req.permission.id,
|
await server.services.secretApprovalRequest.mergeSecretApprovalRequest({
|
||||||
actor: req.permission.type,
|
actorId: req.permission.id,
|
||||||
actorAuthMethod: req.permission.authMethod,
|
actor: req.permission.type,
|
||||||
actorOrgId: req.permission.orgId,
|
actorAuthMethod: req.permission.authMethod,
|
||||||
approvalId: req.params.id,
|
actorOrgId: req.permission.orgId,
|
||||||
bypassReason: req.body.bypassReason
|
approvalId: req.params.id,
|
||||||
|
bypassReason: req.body.bypassReason
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
orgId: req.permission.orgId,
|
||||||
|
projectId,
|
||||||
|
event: {
|
||||||
|
type: EventType.SECRET_APPROVAL_MERGED,
|
||||||
|
metadata: {
|
||||||
|
mergedBy: req.permission.id,
|
||||||
|
secretApprovalRequestSlug: approval.slug,
|
||||||
|
secretApprovalRequestId: approval.id
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
for await (const event of secretMutationEvents) {
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
orgId: req.permission.orgId,
|
||||||
|
projectId,
|
||||||
|
event
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return { approval };
|
return { approval };
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -0,0 +1,16 @@
|
|||||||
|
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
|
||||||
|
import {
|
||||||
|
BitbucketDataSourceSchema,
|
||||||
|
CreateBitbucketDataSourceSchema,
|
||||||
|
UpdateBitbucketDataSourceSchema
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||||
|
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
|
||||||
|
export const registerBitbucketSecretScanningRouter = async (server: FastifyZodProvider) =>
|
||||||
|
registerSecretScanningEndpoints({
|
||||||
|
type: SecretScanningDataSource.Bitbucket,
|
||||||
|
server,
|
||||||
|
responseSchema: BitbucketDataSourceSchema,
|
||||||
|
createSchema: CreateBitbucketDataSourceSchema,
|
||||||
|
updateSchema: UpdateBitbucketDataSourceSchema
|
||||||
|
});
|
@ -1,5 +1,6 @@
|
|||||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
|
||||||
|
import { registerBitbucketSecretScanningRouter } from "./bitbucket-secret-scanning-router";
|
||||||
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
|
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
|
||||||
|
|
||||||
export * from "./secret-scanning-v2-router";
|
export * from "./secret-scanning-v2-router";
|
||||||
@ -8,5 +9,6 @@ export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
|
|||||||
SecretScanningDataSource,
|
SecretScanningDataSource,
|
||||||
(server: FastifyZodProvider) => Promise<void>
|
(server: FastifyZodProvider) => Promise<void>
|
||||||
> = {
|
> = {
|
||||||
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter
|
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter,
|
||||||
|
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter
|
||||||
};
|
};
|
||||||
|
@ -2,6 +2,7 @@ import { z } from "zod";
|
|||||||
|
|
||||||
import { SecretScanningConfigsSchema } from "@app/db/schemas";
|
import { SecretScanningConfigsSchema } from "@app/db/schemas";
|
||||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
|
import { BitbucketDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||||
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
|
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||||
import {
|
import {
|
||||||
SecretScanningFindingStatus,
|
SecretScanningFindingStatus,
|
||||||
@ -21,7 +22,10 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
|||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]);
|
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [
|
||||||
|
GitHubDataSourceListItemSchema,
|
||||||
|
BitbucketDataSourceListItemSchema
|
||||||
|
]);
|
||||||
|
|
||||||
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
|
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
|
||||||
server.route({
|
server.route({
|
||||||
|
@ -116,6 +116,15 @@ interface BaseAuthData {
|
|||||||
userAgentType?: UserAgentType;
|
userAgentType?: UserAgentType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum SecretApprovalEvent {
|
||||||
|
Create = "create",
|
||||||
|
Update = "update",
|
||||||
|
Delete = "delete",
|
||||||
|
CreateMany = "create-many",
|
||||||
|
UpdateMany = "update-many",
|
||||||
|
DeleteMany = "delete-many"
|
||||||
|
}
|
||||||
|
|
||||||
export enum UserAgentType {
|
export enum UserAgentType {
|
||||||
WEB = "web",
|
WEB = "web",
|
||||||
CLI = "cli",
|
CLI = "cli",
|
||||||
@ -1705,6 +1714,17 @@ interface SecretApprovalRequest {
|
|||||||
committedBy: string;
|
committedBy: string;
|
||||||
secretApprovalRequestSlug: string;
|
secretApprovalRequestSlug: string;
|
||||||
secretApprovalRequestId: string;
|
secretApprovalRequestId: string;
|
||||||
|
eventType: SecretApprovalEvent;
|
||||||
|
secretKey?: string;
|
||||||
|
secretId?: string;
|
||||||
|
secrets?: {
|
||||||
|
secretKey?: string;
|
||||||
|
secretId?: string;
|
||||||
|
environment?: string;
|
||||||
|
secretPath?: string;
|
||||||
|
}[];
|
||||||
|
environment: string;
|
||||||
|
secretPath: string;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ import { randomUUID } from "crypto";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
|
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
|
||||||
@ -81,6 +81,21 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
|||||||
return client;
|
return client;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (providerInputs.method === AwsIamAuthType.IRSA) {
|
||||||
|
// Allow instances to disable automatic service account token fetching (e.g. for shared cloud)
|
||||||
|
if (!appCfg.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN) {
|
||||||
|
throw new UnauthorizedError({
|
||||||
|
message: "Failed to get AWS credentials via IRSA: KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN is not enabled."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// The SDK will automatically pick up credentials from the environment
|
||||||
|
const client = new IAMClient({
|
||||||
|
region: providerInputs.region
|
||||||
|
});
|
||||||
|
return client;
|
||||||
|
}
|
||||||
|
|
||||||
const client = new IAMClient({
|
const client = new IAMClient({
|
||||||
region: providerInputs.region,
|
region: providerInputs.region,
|
||||||
credentials: {
|
credentials: {
|
||||||
@ -101,7 +116,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
|||||||
.catch((err) => {
|
.catch((err) => {
|
||||||
const message = (err as Error)?.message;
|
const message = (err as Error)?.message;
|
||||||
if (
|
if (
|
||||||
providerInputs.method === AwsIamAuthType.AssumeRole &&
|
(providerInputs.method === AwsIamAuthType.AssumeRole || providerInputs.method === AwsIamAuthType.IRSA) &&
|
||||||
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
|
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
|
||||||
message.includes("Must specify userName when calling with non-User credentials")
|
message.includes("Must specify userName when calling with non-User credentials")
|
||||||
) {
|
) {
|
||||||
|
@ -28,7 +28,8 @@ export enum SqlProviders {
|
|||||||
|
|
||||||
export enum AwsIamAuthType {
|
export enum AwsIamAuthType {
|
||||||
AssumeRole = "assume-role",
|
AssumeRole = "assume-role",
|
||||||
AccessKey = "access-key"
|
AccessKey = "access-key",
|
||||||
|
IRSA = "irsa"
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum ElasticSearchAuthTypes {
|
export enum ElasticSearchAuthTypes {
|
||||||
@ -221,6 +222,16 @@ export const DynamicSecretAwsIamSchema = z.preprocess(
|
|||||||
userGroups: z.string().trim().optional(),
|
userGroups: z.string().trim().optional(),
|
||||||
policyArns: z.string().trim().optional(),
|
policyArns: z.string().trim().optional(),
|
||||||
tags: ResourceMetadataSchema.optional()
|
tags: ResourceMetadataSchema.optional()
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
method: z.literal(AwsIamAuthType.IRSA),
|
||||||
|
region: z.string().trim().min(1),
|
||||||
|
awsPath: z.string().trim().optional(),
|
||||||
|
permissionBoundaryPolicyArn: z.string().trim().optional(),
|
||||||
|
policyDocument: z.string().trim().optional(),
|
||||||
|
userGroups: z.string().trim().optional(),
|
||||||
|
policyArns: z.string().trim().optional(),
|
||||||
|
tags: ResourceMetadataSchema.optional()
|
||||||
})
|
})
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
|
@ -107,34 +107,26 @@ export const oidcConfigServiceFactory = ({
|
|||||||
kmsService
|
kmsService
|
||||||
}: TOidcConfigServiceFactoryDep) => {
|
}: TOidcConfigServiceFactoryDep) => {
|
||||||
const getOidc = async (dto: TGetOidcCfgDTO) => {
|
const getOidc = async (dto: TGetOidcCfgDTO) => {
|
||||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
const oidcCfg = await oidcConfigDAL.findOne({
|
||||||
if (!org) {
|
orgId: dto.organizationId
|
||||||
|
});
|
||||||
|
if (!oidcCfg) {
|
||||||
throw new NotFoundError({
|
throw new NotFoundError({
|
||||||
message: `Organization with slug '${dto.orgSlug}' not found`,
|
message: `OIDC configuration for organization with ID '${dto.organizationId}' not found`
|
||||||
name: "OrgNotFound"
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (dto.type === "external") {
|
if (dto.type === "external") {
|
||||||
const { permission } = await permissionService.getOrgPermission(
|
const { permission } = await permissionService.getOrgPermission(
|
||||||
dto.actor,
|
dto.actor,
|
||||||
dto.actorId,
|
dto.actorId,
|
||||||
org.id,
|
dto.organizationId,
|
||||||
dto.actorAuthMethod,
|
dto.actorAuthMethod,
|
||||||
dto.actorOrgId
|
dto.actorOrgId
|
||||||
);
|
);
|
||||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
||||||
}
|
}
|
||||||
|
|
||||||
const oidcCfg = await oidcConfigDAL.findOne({
|
|
||||||
orgId: org.id
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!oidcCfg) {
|
|
||||||
throw new NotFoundError({
|
|
||||||
message: `OIDC configuration for organization with slug '${dto.orgSlug}' not found`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
type: KmsDataKey.Organization,
|
type: KmsDataKey.Organization,
|
||||||
orgId: oidcCfg.orgId
|
orgId: oidcCfg.orgId
|
||||||
@ -465,7 +457,7 @@ export const oidcConfigServiceFactory = ({
|
|||||||
};
|
};
|
||||||
|
|
||||||
const updateOidcCfg = async ({
|
const updateOidcCfg = async ({
|
||||||
orgSlug,
|
organizationId,
|
||||||
allowedEmailDomains,
|
allowedEmailDomains,
|
||||||
configurationType,
|
configurationType,
|
||||||
discoveryURL,
|
discoveryURL,
|
||||||
@ -484,13 +476,11 @@ export const oidcConfigServiceFactory = ({
|
|||||||
manageGroupMemberships,
|
manageGroupMemberships,
|
||||||
jwtSignatureAlgorithm
|
jwtSignatureAlgorithm
|
||||||
}: TUpdateOidcCfgDTO) => {
|
}: TUpdateOidcCfgDTO) => {
|
||||||
const org = await orgDAL.findOne({
|
const org = await orgDAL.findOne({ id: organizationId });
|
||||||
slug: orgSlug
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!org) {
|
if (!org) {
|
||||||
throw new NotFoundError({
|
throw new NotFoundError({
|
||||||
message: `Organization with slug '${orgSlug}' not found`
|
message: `Organization with ID '${organizationId}' not found`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -555,7 +545,7 @@ export const oidcConfigServiceFactory = ({
|
|||||||
};
|
};
|
||||||
|
|
||||||
const createOidcCfg = async ({
|
const createOidcCfg = async ({
|
||||||
orgSlug,
|
organizationId,
|
||||||
allowedEmailDomains,
|
allowedEmailDomains,
|
||||||
configurationType,
|
configurationType,
|
||||||
discoveryURL,
|
discoveryURL,
|
||||||
@ -574,12 +564,10 @@ export const oidcConfigServiceFactory = ({
|
|||||||
manageGroupMemberships,
|
manageGroupMemberships,
|
||||||
jwtSignatureAlgorithm
|
jwtSignatureAlgorithm
|
||||||
}: TCreateOidcCfgDTO) => {
|
}: TCreateOidcCfgDTO) => {
|
||||||
const org = await orgDAL.findOne({
|
const org = await orgDAL.findOne({ id: organizationId });
|
||||||
slug: orgSlug
|
|
||||||
});
|
|
||||||
if (!org) {
|
if (!org) {
|
||||||
throw new NotFoundError({
|
throw new NotFoundError({
|
||||||
message: `Organization with slug '${orgSlug}' not found`
|
message: `Organization with ID '${organizationId}' not found`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -639,7 +627,7 @@ export const oidcConfigServiceFactory = ({
|
|||||||
|
|
||||||
const oidcCfg = await getOidc({
|
const oidcCfg = await getOidc({
|
||||||
type: "internal",
|
type: "internal",
|
||||||
orgSlug
|
organizationId: org.id
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!oidcCfg || !oidcCfg.isActive) {
|
if (!oidcCfg || !oidcCfg.isActive) {
|
||||||
|
@ -26,11 +26,11 @@ export type TOidcLoginDTO = {
|
|||||||
export type TGetOidcCfgDTO =
|
export type TGetOidcCfgDTO =
|
||||||
| ({
|
| ({
|
||||||
type: "external";
|
type: "external";
|
||||||
orgSlug: string;
|
organizationId: string;
|
||||||
} & TGenericPermission)
|
} & TGenericPermission)
|
||||||
| {
|
| {
|
||||||
type: "internal";
|
type: "internal";
|
||||||
orgSlug: string;
|
organizationId: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TCreateOidcCfgDTO = {
|
export type TCreateOidcCfgDTO = {
|
||||||
@ -45,7 +45,7 @@ export type TCreateOidcCfgDTO = {
|
|||||||
clientId: string;
|
clientId: string;
|
||||||
clientSecret: string;
|
clientSecret: string;
|
||||||
isActive: boolean;
|
isActive: boolean;
|
||||||
orgSlug: string;
|
organizationId: string;
|
||||||
manageGroupMemberships: boolean;
|
manageGroupMemberships: boolean;
|
||||||
jwtSignatureAlgorithm: OIDCJWTSignatureAlgorithm;
|
jwtSignatureAlgorithm: OIDCJWTSignatureAlgorithm;
|
||||||
} & TGenericPermission;
|
} & TGenericPermission;
|
||||||
@ -62,7 +62,7 @@ export type TUpdateOidcCfgDTO = Partial<{
|
|||||||
clientId: string;
|
clientId: string;
|
||||||
clientSecret: string;
|
clientSecret: string;
|
||||||
isActive: boolean;
|
isActive: boolean;
|
||||||
orgSlug: string;
|
organizationId: string;
|
||||||
manageGroupMemberships: boolean;
|
manageGroupMemberships: boolean;
|
||||||
jwtSignatureAlgorithm: OIDCJWTSignatureAlgorithm;
|
jwtSignatureAlgorithm: OIDCJWTSignatureAlgorithm;
|
||||||
}> &
|
}> &
|
||||||
|
@ -148,10 +148,18 @@ export const samlConfigServiceFactory = ({
|
|||||||
let samlConfig: TSamlConfigs | undefined;
|
let samlConfig: TSamlConfigs | undefined;
|
||||||
if (dto.type === "org") {
|
if (dto.type === "org") {
|
||||||
samlConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
|
samlConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
|
||||||
if (!samlConfig) return;
|
if (!samlConfig) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: `SAML configuration for organization with ID '${dto.orgId}' not found`
|
||||||
|
});
|
||||||
|
}
|
||||||
} else if (dto.type === "orgSlug") {
|
} else if (dto.type === "orgSlug") {
|
||||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
||||||
if (!org) return;
|
if (!org) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: `Organization with slug '${dto.orgSlug}' not found`
|
||||||
|
});
|
||||||
|
}
|
||||||
samlConfig = await samlConfigDAL.findOne({ orgId: org.id });
|
samlConfig = await samlConfigDAL.findOne({ orgId: org.id });
|
||||||
} else if (dto.type === "ssoId") {
|
} else if (dto.type === "ssoId") {
|
||||||
// TODO:
|
// TODO:
|
||||||
|
@ -61,20 +61,17 @@ export type TSamlLoginDTO = {
|
|||||||
export type TSamlConfigServiceFactory = {
|
export type TSamlConfigServiceFactory = {
|
||||||
createSamlCfg: (arg: TCreateSamlCfgDTO) => Promise<TSamlConfigs>;
|
createSamlCfg: (arg: TCreateSamlCfgDTO) => Promise<TSamlConfigs>;
|
||||||
updateSamlCfg: (arg: TUpdateSamlCfgDTO) => Promise<TSamlConfigs>;
|
updateSamlCfg: (arg: TUpdateSamlCfgDTO) => Promise<TSamlConfigs>;
|
||||||
getSaml: (arg: TGetSamlCfgDTO) => Promise<
|
getSaml: (arg: TGetSamlCfgDTO) => Promise<{
|
||||||
| {
|
id: string;
|
||||||
id: string;
|
organization: string;
|
||||||
organization: string;
|
orgId: string;
|
||||||
orgId: string;
|
authProvider: string;
|
||||||
authProvider: string;
|
isActive: boolean;
|
||||||
isActive: boolean;
|
entryPoint: string;
|
||||||
entryPoint: string;
|
issuer: string;
|
||||||
issuer: string;
|
cert: string;
|
||||||
cert: string;
|
lastUsed: Date | null | undefined;
|
||||||
lastUsed: Date | null | undefined;
|
}>;
|
||||||
}
|
|
||||||
| undefined
|
|
||||||
>;
|
|
||||||
samlLogin: (arg: TSamlLoginDTO) => Promise<{
|
samlLogin: (arg: TSamlLoginDTO) => Promise<{
|
||||||
isUserCompleted: boolean;
|
isUserCompleted: boolean;
|
||||||
providerAuthToken: string;
|
providerAuthToken: string;
|
||||||
|
@ -10,6 +10,7 @@ import {
|
|||||||
TSecretApprovalRequestsSecretsInsert,
|
TSecretApprovalRequestsSecretsInsert,
|
||||||
TSecretApprovalRequestsSecretsV2Insert
|
TSecretApprovalRequestsSecretsV2Insert
|
||||||
} from "@app/db/schemas";
|
} from "@app/db/schemas";
|
||||||
|
import { Event, EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
@ -523,7 +524,7 @@ export const secretApprovalRequestServiceFactory = ({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const { policy, folderId, projectId, bypassers } = secretApprovalRequest;
|
const { policy, folderId, projectId, bypassers, environment } = secretApprovalRequest;
|
||||||
if (policy.deletedAt) {
|
if (policy.deletedAt) {
|
||||||
throw new BadRequestError({
|
throw new BadRequestError({
|
||||||
message: "The policy associated with this secret approval request has been deleted."
|
message: "The policy associated with this secret approval request has been deleted."
|
||||||
@ -957,7 +958,112 @@ export const secretApprovalRequestServiceFactory = ({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return mergeStatus;
|
const { created, updated, deleted } = mergeStatus.secrets;
|
||||||
|
|
||||||
|
const secretMutationEvents: Event[] = [];
|
||||||
|
|
||||||
|
if (created.length) {
|
||||||
|
if (created.length > 1) {
|
||||||
|
secretMutationEvents.push({
|
||||||
|
type: EventType.CREATE_SECRETS,
|
||||||
|
metadata: {
|
||||||
|
environment,
|
||||||
|
secretPath: folder.path,
|
||||||
|
secrets: created.map((secret) => ({
|
||||||
|
secretId: secret.id,
|
||||||
|
secretVersion: 1,
|
||||||
|
// @ts-expect-error not present on v1 secrets
|
||||||
|
secretKey: secret.key as string,
|
||||||
|
// @ts-expect-error not present on v1 secrets
|
||||||
|
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
const [secret] = created;
|
||||||
|
secretMutationEvents.push({
|
||||||
|
type: EventType.CREATE_SECRET,
|
||||||
|
metadata: {
|
||||||
|
environment,
|
||||||
|
secretPath: folder.path,
|
||||||
|
secretId: secret.id,
|
||||||
|
secretVersion: 1,
|
||||||
|
// @ts-expect-error not present on v1 secrets
|
||||||
|
secretKey: secret.key as string,
|
||||||
|
// @ts-expect-error not present on v1 secrets
|
||||||
|
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (updated.length) {
|
||||||
|
if (updated.length > 1) {
|
||||||
|
secretMutationEvents.push({
|
||||||
|
type: EventType.UPDATE_SECRETS,
|
||||||
|
metadata: {
|
||||||
|
environment,
|
||||||
|
secretPath: folder.path,
|
||||||
|
secrets: updated.map((secret) => ({
|
||||||
|
secretId: secret.id,
|
||||||
|
secretVersion: secret.version,
|
||||||
|
// @ts-expect-error not present on v1 secrets
|
||||||
|
secretKey: secret.key as string,
|
||||||
|
// @ts-expect-error not present on v1 secrets
|
||||||
|
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
const [secret] = updated;
|
||||||
|
secretMutationEvents.push({
|
||||||
|
type: EventType.UPDATE_SECRET,
|
||||||
|
metadata: {
|
||||||
|
environment,
|
||||||
|
secretPath: folder.path,
|
||||||
|
secretId: secret.id,
|
||||||
|
secretVersion: secret.version,
|
||||||
|
// @ts-expect-error not present on v1 secrets
|
||||||
|
secretKey: secret.key as string,
|
||||||
|
// @ts-expect-error not present on v1 secrets
|
||||||
|
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (deleted.length) {
|
||||||
|
if (deleted.length > 1) {
|
||||||
|
secretMutationEvents.push({
|
||||||
|
type: EventType.DELETE_SECRETS,
|
||||||
|
metadata: {
|
||||||
|
environment,
|
||||||
|
secretPath: folder.path,
|
||||||
|
secrets: deleted.map((secret) => ({
|
||||||
|
secretId: secret.id,
|
||||||
|
secretVersion: secret.version,
|
||||||
|
// @ts-expect-error not present on v1 secrets
|
||||||
|
secretKey: secret.key as string
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
const [secret] = deleted;
|
||||||
|
secretMutationEvents.push({
|
||||||
|
type: EventType.DELETE_SECRET,
|
||||||
|
metadata: {
|
||||||
|
environment,
|
||||||
|
secretPath: folder.path,
|
||||||
|
secretId: secret.id,
|
||||||
|
secretVersion: secret.version,
|
||||||
|
// @ts-expect-error not present on v1 secrets
|
||||||
|
secretKey: secret.key as string
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ...mergeStatus, projectId, secretMutationEvents };
|
||||||
};
|
};
|
||||||
|
|
||||||
// function to save secret change to secret approval
|
// function to save secret change to secret approval
|
||||||
|
@ -0,0 +1,9 @@
|
|||||||
|
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
|
||||||
|
export const BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
|
||||||
|
name: "Bitbucket",
|
||||||
|
type: SecretScanningDataSource.Bitbucket,
|
||||||
|
connection: AppConnection.Bitbucket
|
||||||
|
};
|
@ -0,0 +1,314 @@
|
|||||||
|
import { join } from "path";
|
||||||
|
|
||||||
|
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||||
|
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||||
|
import {
|
||||||
|
SecretScanningFindingSeverity,
|
||||||
|
SecretScanningResource
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
import {
|
||||||
|
cloneRepository,
|
||||||
|
convertPatchLineToFileLineNumber,
|
||||||
|
replaceNonChangesWithNewlines
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||||
|
import {
|
||||||
|
TSecretScanningFactoryGetDiffScanFindingsPayload,
|
||||||
|
TSecretScanningFactoryGetDiffScanResourcePayload,
|
||||||
|
TSecretScanningFactoryGetFullScanPath,
|
||||||
|
TSecretScanningFactoryInitialize,
|
||||||
|
TSecretScanningFactoryListRawResources,
|
||||||
|
TSecretScanningFactoryPostInitialization,
|
||||||
|
TSecretScanningFactoryTeardown
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||||
|
import { getConfig } from "@app/lib/config/env";
|
||||||
|
import { request } from "@app/lib/config/request";
|
||||||
|
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||||
|
import { logger } from "@app/lib/logger";
|
||||||
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
import { BasicRepositoryRegex } from "@app/lib/regex";
|
||||||
|
import {
|
||||||
|
getBitbucketUser,
|
||||||
|
listBitbucketRepositories,
|
||||||
|
TBitbucketConnection
|
||||||
|
} from "@app/services/app-connection/bitbucket";
|
||||||
|
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||||
|
|
||||||
|
import {
|
||||||
|
TBitbucketDataSourceCredentials,
|
||||||
|
TBitbucketDataSourceInput,
|
||||||
|
TBitbucketDataSourceWithConnection,
|
||||||
|
TQueueBitbucketResourceDiffScan
|
||||||
|
} from "./bitbucket-secret-scanning-types";
|
||||||
|
|
||||||
|
export const BitbucketSecretScanningFactory = () => {
|
||||||
|
const initialize: TSecretScanningFactoryInitialize<
|
||||||
|
TBitbucketDataSourceInput,
|
||||||
|
TBitbucketConnection,
|
||||||
|
TBitbucketDataSourceCredentials
|
||||||
|
> = async ({ connection, payload }, callback) => {
|
||||||
|
const cfg = getConfig();
|
||||||
|
|
||||||
|
const { email, apiToken } = connection.credentials;
|
||||||
|
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
|
||||||
|
|
||||||
|
const { data } = await request.post<{ uuid: string }>(
|
||||||
|
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${encodeURIComponent(payload.config.workspaceSlug)}/hooks`,
|
||||||
|
{
|
||||||
|
description: "Infisical webhook for push events",
|
||||||
|
url: `${cfg.SITE_URL}/secret-scanning/webhooks/bitbucket`,
|
||||||
|
active: false,
|
||||||
|
events: ["repo:push"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Authorization: authHeader,
|
||||||
|
Accept: "application/json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return callback({
|
||||||
|
credentials: { webhookId: data.uuid, webhookSecret: alphaNumericNanoId(64) }
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const postInitialization: TSecretScanningFactoryPostInitialization<
|
||||||
|
TBitbucketDataSourceInput,
|
||||||
|
TBitbucketConnection,
|
||||||
|
TBitbucketDataSourceCredentials
|
||||||
|
> = async ({ dataSourceId, credentials, connection, payload }) => {
|
||||||
|
const { email, apiToken } = connection.credentials;
|
||||||
|
const { webhookId, webhookSecret } = credentials;
|
||||||
|
|
||||||
|
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
|
||||||
|
|
||||||
|
const cfg = getConfig();
|
||||||
|
const newWebhookUrl = `${cfg.SITE_URL}/secret-scanning/webhooks/bitbucket?dataSourceId=${dataSourceId}`;
|
||||||
|
|
||||||
|
await request.put(
|
||||||
|
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${encodeURIComponent(payload.config.workspaceSlug)}/hooks/${webhookId}`,
|
||||||
|
{
|
||||||
|
description: "Infisical webhook for push events",
|
||||||
|
url: newWebhookUrl,
|
||||||
|
active: true,
|
||||||
|
events: ["repo:push"],
|
||||||
|
secret: webhookSecret
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Authorization: authHeader,
|
||||||
|
Accept: "application/json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const teardown: TSecretScanningFactoryTeardown<
|
||||||
|
TBitbucketDataSourceWithConnection,
|
||||||
|
TBitbucketDataSourceCredentials
|
||||||
|
> = async ({ credentials, dataSource }) => {
|
||||||
|
const {
|
||||||
|
connection: {
|
||||||
|
credentials: { email, apiToken }
|
||||||
|
},
|
||||||
|
config
|
||||||
|
} = dataSource;
|
||||||
|
const { webhookId } = credentials;
|
||||||
|
|
||||||
|
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await request.delete(
|
||||||
|
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${config.workspaceSlug}/hooks/${webhookId}`,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Authorization: authHeader,
|
||||||
|
Accept: "application/json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
logger.error(`teardown: Bitbucket - Failed to call delete on webhook [webhookId=${webhookId}]`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const listRawResources: TSecretScanningFactoryListRawResources<TBitbucketDataSourceWithConnection> = async (
|
||||||
|
dataSource
|
||||||
|
) => {
|
||||||
|
const {
|
||||||
|
connection,
|
||||||
|
config: { includeRepos, workspaceSlug }
|
||||||
|
} = dataSource;
|
||||||
|
|
||||||
|
const repos = await listBitbucketRepositories(connection, workspaceSlug);
|
||||||
|
|
||||||
|
const filteredRepos: typeof repos = [];
|
||||||
|
if (includeRepos.includes("*")) {
|
||||||
|
filteredRepos.push(...repos);
|
||||||
|
} else {
|
||||||
|
filteredRepos.push(...repos.filter((repo) => includeRepos.includes(repo.full_name)));
|
||||||
|
}
|
||||||
|
|
||||||
|
return filteredRepos.map(({ full_name, uuid }) => ({
|
||||||
|
name: full_name,
|
||||||
|
externalId: uuid,
|
||||||
|
type: SecretScanningResource.Repository
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
|
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TBitbucketDataSourceWithConnection> = async ({
|
||||||
|
dataSource,
|
||||||
|
resourceName,
|
||||||
|
tempFolder
|
||||||
|
}) => {
|
||||||
|
const {
|
||||||
|
connection: {
|
||||||
|
credentials: { apiToken, email }
|
||||||
|
}
|
||||||
|
} = dataSource;
|
||||||
|
|
||||||
|
const repoPath = join(tempFolder, "repo.git");
|
||||||
|
|
||||||
|
if (!BasicRepositoryRegex.test(resourceName)) {
|
||||||
|
throw new Error("Invalid Bitbucket repository name");
|
||||||
|
}
|
||||||
|
|
||||||
|
const { username } = await getBitbucketUser({ email, apiToken });
|
||||||
|
|
||||||
|
await cloneRepository({
|
||||||
|
cloneUrl: `https://${encodeURIComponent(username)}:${apiToken}@bitbucket.org/${resourceName}.git`,
|
||||||
|
repoPath
|
||||||
|
});
|
||||||
|
|
||||||
|
return repoPath;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
|
||||||
|
TQueueBitbucketResourceDiffScan["payload"]
|
||||||
|
> = ({ repository }) => {
|
||||||
|
return {
|
||||||
|
name: repository.full_name,
|
||||||
|
externalId: repository.uuid,
|
||||||
|
type: SecretScanningResource.Repository
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
|
||||||
|
TBitbucketDataSourceWithConnection,
|
||||||
|
TQueueBitbucketResourceDiffScan["payload"]
|
||||||
|
> = async ({ dataSource, payload, resourceName, configPath }) => {
|
||||||
|
const {
|
||||||
|
connection: {
|
||||||
|
credentials: { apiToken, email }
|
||||||
|
}
|
||||||
|
} = dataSource;
|
||||||
|
|
||||||
|
const { push, repository } = payload;
|
||||||
|
|
||||||
|
const allFindings: SecretMatch[] = [];
|
||||||
|
|
||||||
|
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
|
||||||
|
|
||||||
|
for (const change of push.changes) {
|
||||||
|
for (const commit of change.commits) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
const { data: diffstat } = await request.get<{
|
||||||
|
values: {
|
||||||
|
status: "added" | "modified" | "removed" | "renamed";
|
||||||
|
new?: { path: string };
|
||||||
|
old?: { path: string };
|
||||||
|
}[];
|
||||||
|
}>(`${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${repository.full_name}/diffstat/${commit.hash}`, {
|
||||||
|
headers: {
|
||||||
|
Authorization: authHeader,
|
||||||
|
Accept: "application/json"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
if (!diffstat.values) continue;
|
||||||
|
|
||||||
|
for (const file of diffstat.values) {
|
||||||
|
if ((file.status === "added" || file.status === "modified") && file.new?.path) {
|
||||||
|
const filePath = file.new.path;
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
const { data: patch } = await request.get<string>(
|
||||||
|
`https://api.bitbucket.org/2.0/repositories/${repository.full_name}/diff/${commit.hash}`,
|
||||||
|
{
|
||||||
|
params: {
|
||||||
|
path: filePath
|
||||||
|
},
|
||||||
|
headers: {
|
||||||
|
Authorization: authHeader
|
||||||
|
},
|
||||||
|
responseType: "text"
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
if (!patch) continue;
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
const findings = await scanContentAndGetFindings(replaceNonChangesWithNewlines(`\n${patch}`), configPath);
|
||||||
|
|
||||||
|
const adjustedFindings = findings.map((finding) => {
|
||||||
|
const startLine = convertPatchLineToFileLineNumber(patch, finding.StartLine);
|
||||||
|
const endLine =
|
||||||
|
finding.StartLine === finding.EndLine
|
||||||
|
? startLine
|
||||||
|
: convertPatchLineToFileLineNumber(patch, finding.EndLine);
|
||||||
|
const startColumn = finding.StartColumn - 1; // subtract 1 for +
|
||||||
|
const endColumn = finding.EndColumn - 1; // subtract 1 for +
|
||||||
|
const authorName = commit.author.user?.display_name || commit.author.raw.split(" <")[0];
|
||||||
|
const emailMatch = commit.author.raw.match(/<(.*)>/);
|
||||||
|
const authorEmail = emailMatch?.[1] ?? "";
|
||||||
|
|
||||||
|
return {
|
||||||
|
...finding,
|
||||||
|
StartLine: startLine,
|
||||||
|
EndLine: endLine,
|
||||||
|
StartColumn: startColumn,
|
||||||
|
EndColumn: endColumn,
|
||||||
|
File: filePath,
|
||||||
|
Commit: commit.hash,
|
||||||
|
Author: authorName,
|
||||||
|
Email: authorEmail,
|
||||||
|
Message: commit.message,
|
||||||
|
Fingerprint: `${commit.hash}:${filePath}:${finding.RuleID}:${startLine}:${startColumn}`,
|
||||||
|
Date: commit.date,
|
||||||
|
Link: `https://bitbucket.org/${resourceName}/src/${commit.hash}/${filePath}#lines-${startLine}`
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
allFindings.push(...adjustedFindings);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return allFindings.map(
|
||||||
|
({
|
||||||
|
// discard match and secret as we don't want to store
|
||||||
|
Match,
|
||||||
|
Secret,
|
||||||
|
...finding
|
||||||
|
}) => ({
|
||||||
|
details: titleCaseToCamelCase(finding),
|
||||||
|
fingerprint: finding.Fingerprint,
|
||||||
|
severity: SecretScanningFindingSeverity.High,
|
||||||
|
rule: finding.RuleID
|
||||||
|
})
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
initialize,
|
||||||
|
postInitialization,
|
||||||
|
listRawResources,
|
||||||
|
getFullScanPath,
|
||||||
|
getDiffScanResourcePayload,
|
||||||
|
getDiffScanFindingsPayload,
|
||||||
|
teardown
|
||||||
|
};
|
||||||
|
};
|
@ -0,0 +1,97 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import {
|
||||||
|
SecretScanningDataSource,
|
||||||
|
SecretScanningResource
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
import {
|
||||||
|
BaseCreateSecretScanningDataSourceSchema,
|
||||||
|
BaseSecretScanningDataSourceSchema,
|
||||||
|
BaseSecretScanningFindingSchema,
|
||||||
|
BaseUpdateSecretScanningDataSourceSchema,
|
||||||
|
GitRepositoryScanFindingDetailsSchema
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
|
||||||
|
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
||||||
|
import { BasicRepositoryRegex } from "@app/lib/regex";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
|
||||||
|
export const BitbucketDataSourceConfigSchema = z.object({
|
||||||
|
workspaceSlug: z
|
||||||
|
.string()
|
||||||
|
.min(1, "Workspace slug required")
|
||||||
|
.max(128)
|
||||||
|
.describe(SecretScanningDataSources.CONFIG.BITBUCKET.workspaceSlug),
|
||||||
|
includeRepos: z
|
||||||
|
.array(
|
||||||
|
z
|
||||||
|
.string()
|
||||||
|
.min(1)
|
||||||
|
.max(256)
|
||||||
|
.refine((value) => value === "*" || BasicRepositoryRegex.test(value), "Invalid repository name format")
|
||||||
|
)
|
||||||
|
.nonempty("One or more repositories required")
|
||||||
|
.max(100, "Cannot configure more than 100 repositories")
|
||||||
|
.default(["*"])
|
||||||
|
.describe(SecretScanningDataSources.CONFIG.BITBUCKET.includeRepos)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const BitbucketDataSourceSchema = BaseSecretScanningDataSourceSchema({
|
||||||
|
type: SecretScanningDataSource.Bitbucket,
|
||||||
|
isConnectionRequired: true
|
||||||
|
})
|
||||||
|
.extend({
|
||||||
|
config: BitbucketDataSourceConfigSchema
|
||||||
|
})
|
||||||
|
.describe(
|
||||||
|
JSON.stringify({
|
||||||
|
title: "Bitbucket"
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
export const CreateBitbucketDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
|
||||||
|
type: SecretScanningDataSource.Bitbucket,
|
||||||
|
isConnectionRequired: true
|
||||||
|
})
|
||||||
|
.extend({
|
||||||
|
config: BitbucketDataSourceConfigSchema
|
||||||
|
})
|
||||||
|
.describe(
|
||||||
|
JSON.stringify({
|
||||||
|
title: "Bitbucket"
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
export const UpdateBitbucketDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(
|
||||||
|
SecretScanningDataSource.Bitbucket
|
||||||
|
)
|
||||||
|
.extend({
|
||||||
|
config: BitbucketDataSourceConfigSchema.optional()
|
||||||
|
})
|
||||||
|
.describe(
|
||||||
|
JSON.stringify({
|
||||||
|
title: "Bitbucket"
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
export const BitbucketDataSourceListItemSchema = z
|
||||||
|
.object({
|
||||||
|
name: z.literal("Bitbucket"),
|
||||||
|
connection: z.literal(AppConnection.Bitbucket),
|
||||||
|
type: z.literal(SecretScanningDataSource.Bitbucket)
|
||||||
|
})
|
||||||
|
.describe(
|
||||||
|
JSON.stringify({
|
||||||
|
title: "Bitbucket"
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
export const BitbucketFindingSchema = BaseSecretScanningFindingSchema.extend({
|
||||||
|
resourceType: z.literal(SecretScanningResource.Repository),
|
||||||
|
dataSourceType: z.literal(SecretScanningDataSource.Bitbucket),
|
||||||
|
details: GitRepositoryScanFindingDetailsSchema
|
||||||
|
});
|
||||||
|
|
||||||
|
export const BitbucketDataSourceCredentialsSchema = z.object({
|
||||||
|
webhookId: z.string(),
|
||||||
|
webhookSecret: z.string()
|
||||||
|
});
|
@ -0,0 +1,104 @@
|
|||||||
|
import crypto from "crypto";
|
||||||
|
|
||||||
|
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||||
|
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
|
||||||
|
import { logger } from "@app/lib/logger";
|
||||||
|
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
|
|
||||||
|
import {
|
||||||
|
TBitbucketDataSource,
|
||||||
|
TBitbucketDataSourceCredentials,
|
||||||
|
TBitbucketPushEvent
|
||||||
|
} from "./bitbucket-secret-scanning-types";
|
||||||
|
|
||||||
|
export const bitbucketSecretScanningService = (
|
||||||
|
secretScanningV2DAL: TSecretScanningV2DALFactory,
|
||||||
|
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">,
|
||||||
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||||
|
) => {
|
||||||
|
const handlePushEvent = async (
|
||||||
|
payload: TBitbucketPushEvent & { dataSourceId: string; receivedSignature: string; bodyString: string }
|
||||||
|
) => {
|
||||||
|
const { push, repository, bodyString, receivedSignature } = payload;
|
||||||
|
|
||||||
|
if (!push?.changes?.length || !repository?.workspace?.uuid) {
|
||||||
|
logger.warn(
|
||||||
|
`secretScanningV2PushEvent: Bitbucket - Insufficient data [changes=${
|
||||||
|
push?.changes?.length ?? 0
|
||||||
|
}] [repository=${repository?.name}] [workspaceUuid=${repository?.workspace?.uuid}]`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
|
||||||
|
id: payload.dataSourceId,
|
||||||
|
type: SecretScanningDataSource.Bitbucket
|
||||||
|
})) as TBitbucketDataSource | undefined;
|
||||||
|
|
||||||
|
if (!dataSource) {
|
||||||
|
logger.error(
|
||||||
|
`secretScanningV2PushEvent: Bitbucket - Could not find data source [workspaceUuid=${repository.workspace.uuid}]`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const {
|
||||||
|
isAutoScanEnabled,
|
||||||
|
config: { includeRepos },
|
||||||
|
encryptedCredentials,
|
||||||
|
projectId
|
||||||
|
} = dataSource;
|
||||||
|
|
||||||
|
if (!encryptedCredentials) {
|
||||||
|
logger.info(
|
||||||
|
`secretScanningV2PushEvent: Bitbucket - Could not find encrypted credentials [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId
|
||||||
|
});
|
||||||
|
|
||||||
|
const decryptedCredentials = decryptor({ cipherTextBlob: encryptedCredentials });
|
||||||
|
|
||||||
|
const credentials = JSON.parse(decryptedCredentials.toString()) as TBitbucketDataSourceCredentials;
|
||||||
|
|
||||||
|
const hmac = crypto.createHmac("sha256", credentials.webhookSecret);
|
||||||
|
hmac.update(bodyString);
|
||||||
|
const calculatedSignature = hmac.digest("hex");
|
||||||
|
|
||||||
|
if (calculatedSignature !== receivedSignature) {
|
||||||
|
logger.error(
|
||||||
|
`secretScanningV2PushEvent: Bitbucket - Invalid signature for webhook [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isAutoScanEnabled) {
|
||||||
|
logger.info(
|
||||||
|
`secretScanningV2PushEvent: Bitbucket - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (includeRepos.includes("*") || includeRepos.includes(repository.full_name)) {
|
||||||
|
await secretScanningV2Queue.queueResourceDiffScan({
|
||||||
|
dataSourceType: SecretScanningDataSource.Bitbucket,
|
||||||
|
payload,
|
||||||
|
dataSourceId: dataSource.id
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
logger.info(
|
||||||
|
`secretScanningV2PushEvent: Bitbucket - ignoring due to repository not being present in config [workspaceUuid=${repository.workspace.uuid}] [dataSourceId=${dataSource.id}]`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
handlePushEvent
|
||||||
|
};
|
||||||
|
};
|
@ -0,0 +1,85 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
import { TBitbucketConnection } from "@app/services/app-connection/bitbucket";
|
||||||
|
|
||||||
|
import {
|
||||||
|
BitbucketDataSourceCredentialsSchema,
|
||||||
|
BitbucketDataSourceListItemSchema,
|
||||||
|
BitbucketDataSourceSchema,
|
||||||
|
BitbucketFindingSchema,
|
||||||
|
CreateBitbucketDataSourceSchema
|
||||||
|
} from "./bitbucket-secret-scanning-schemas";
|
||||||
|
|
||||||
|
export type TBitbucketDataSource = z.infer<typeof BitbucketDataSourceSchema>;
|
||||||
|
|
||||||
|
export type TBitbucketDataSourceInput = z.infer<typeof CreateBitbucketDataSourceSchema>;
|
||||||
|
|
||||||
|
export type TBitbucketDataSourceListItem = z.infer<typeof BitbucketDataSourceListItemSchema>;
|
||||||
|
|
||||||
|
export type TBitbucketDataSourceCredentials = z.infer<typeof BitbucketDataSourceCredentialsSchema>;
|
||||||
|
|
||||||
|
export type TBitbucketFinding = z.infer<typeof BitbucketFindingSchema>;
|
||||||
|
|
||||||
|
export type TBitbucketDataSourceWithConnection = TBitbucketDataSource & {
|
||||||
|
connection: TBitbucketConnection;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TBitbucketPushEventRepository = {
|
||||||
|
full_name: string;
|
||||||
|
name: string;
|
||||||
|
workspace: {
|
||||||
|
slug: string;
|
||||||
|
uuid: string;
|
||||||
|
};
|
||||||
|
uuid: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TBitbucketPushEventCommit = {
|
||||||
|
hash: string;
|
||||||
|
message: string;
|
||||||
|
author: {
|
||||||
|
raw: string;
|
||||||
|
user?: {
|
||||||
|
display_name: string;
|
||||||
|
uuid: string;
|
||||||
|
nickname: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
date: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TBitbucketPushEventChange = {
|
||||||
|
new?: {
|
||||||
|
name: string;
|
||||||
|
type: string;
|
||||||
|
};
|
||||||
|
old?: {
|
||||||
|
name: string;
|
||||||
|
type: string;
|
||||||
|
};
|
||||||
|
created: boolean;
|
||||||
|
closed: boolean;
|
||||||
|
forced: boolean;
|
||||||
|
commits: TBitbucketPushEventCommit[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TBitbucketPushEvent = {
|
||||||
|
push: {
|
||||||
|
changes: TBitbucketPushEventChange[];
|
||||||
|
};
|
||||||
|
repository: TBitbucketPushEventRepository;
|
||||||
|
actor: {
|
||||||
|
display_name: string;
|
||||||
|
uuid: string;
|
||||||
|
nickname: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TQueueBitbucketResourceDiffScan = {
|
||||||
|
dataSourceType: SecretScanningDataSource.Bitbucket;
|
||||||
|
payload: TBitbucketPushEvent & { dataSourceId: string };
|
||||||
|
dataSourceId: string;
|
||||||
|
resourceId: string;
|
||||||
|
scanId: string;
|
||||||
|
};
|
@ -0,0 +1,3 @@
|
|||||||
|
export * from "./bitbucket-secret-scanning-constants";
|
||||||
|
export * from "./bitbucket-secret-scanning-schemas";
|
||||||
|
export * from "./bitbucket-secret-scanning-types";
|
@ -19,18 +19,23 @@ import {
|
|||||||
TSecretScanningFactoryGetFullScanPath,
|
TSecretScanningFactoryGetFullScanPath,
|
||||||
TSecretScanningFactoryInitialize,
|
TSecretScanningFactoryInitialize,
|
||||||
TSecretScanningFactoryListRawResources,
|
TSecretScanningFactoryListRawResources,
|
||||||
TSecretScanningFactoryPostInitialization
|
TSecretScanningFactoryPostInitialization,
|
||||||
|
TSecretScanningFactoryTeardown
|
||||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||||
import { GitHubRepositoryRegex } from "@app/lib/regex";
|
import { BasicRepositoryRegex } from "@app/lib/regex";
|
||||||
import { listGitHubRadarRepositories, TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
|
import { listGitHubRadarRepositories, TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
|
||||||
|
|
||||||
import { TGitHubDataSourceWithConnection, TQueueGitHubResourceDiffScan } from "./github-secret-scanning-types";
|
import {
|
||||||
|
TGitHubDataSourceInput,
|
||||||
|
TGitHubDataSourceWithConnection,
|
||||||
|
TQueueGitHubResourceDiffScan
|
||||||
|
} from "./github-secret-scanning-types";
|
||||||
|
|
||||||
export const GitHubSecretScanningFactory = () => {
|
export const GitHubSecretScanningFactory = () => {
|
||||||
const initialize: TSecretScanningFactoryInitialize<TGitHubRadarConnection> = async (
|
const initialize: TSecretScanningFactoryInitialize<TGitHubDataSourceInput, TGitHubRadarConnection> = async (
|
||||||
{ connection, secretScanningV2DAL },
|
{ connection, secretScanningV2DAL },
|
||||||
callback
|
callback
|
||||||
) => {
|
) => {
|
||||||
@ -51,10 +56,17 @@ export const GitHubSecretScanningFactory = () => {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const postInitialization: TSecretScanningFactoryPostInitialization<TGitHubRadarConnection> = async () => {
|
const postInitialization: TSecretScanningFactoryPostInitialization<
|
||||||
|
TGitHubDataSourceInput,
|
||||||
|
TGitHubRadarConnection
|
||||||
|
> = async () => {
|
||||||
// no post-initialization required
|
// no post-initialization required
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const teardown: TSecretScanningFactoryTeardown<TGitHubDataSourceWithConnection> = async () => {
|
||||||
|
// no termination required
|
||||||
|
};
|
||||||
|
|
||||||
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
|
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
|
||||||
dataSource
|
dataSource
|
||||||
) => {
|
) => {
|
||||||
@ -107,7 +119,7 @@ export const GitHubSecretScanningFactory = () => {
|
|||||||
|
|
||||||
const repoPath = join(tempFolder, "repo.git");
|
const repoPath = join(tempFolder, "repo.git");
|
||||||
|
|
||||||
if (!GitHubRepositoryRegex.test(resourceName)) {
|
if (!BasicRepositoryRegex.test(resourceName)) {
|
||||||
throw new Error("Invalid GitHub repository name");
|
throw new Error("Invalid GitHub repository name");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -225,6 +237,7 @@ export const GitHubSecretScanningFactory = () => {
|
|||||||
listRawResources,
|
listRawResources,
|
||||||
getFullScanPath,
|
getFullScanPath,
|
||||||
getDiffScanResourcePayload,
|
getDiffScanResourcePayload,
|
||||||
getDiffScanFindingsPayload
|
getDiffScanFindingsPayload,
|
||||||
|
teardown
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -12,7 +12,7 @@ import {
|
|||||||
GitRepositoryScanFindingDetailsSchema
|
GitRepositoryScanFindingDetailsSchema
|
||||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
|
||||||
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
||||||
import { GitHubRepositoryRegex } from "@app/lib/regex";
|
import { BasicRepositoryRegex } from "@app/lib/regex";
|
||||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
|
||||||
export const GitHubDataSourceConfigSchema = z.object({
|
export const GitHubDataSourceConfigSchema = z.object({
|
||||||
@ -22,7 +22,7 @@ export const GitHubDataSourceConfigSchema = z.object({
|
|||||||
.string()
|
.string()
|
||||||
.min(1)
|
.min(1)
|
||||||
.max(256)
|
.max(256)
|
||||||
.refine((value) => value === "*" || GitHubRepositoryRegex.test(value), "Invalid repository name format")
|
.refine((value) => value === "*" || BasicRepositoryRegex.test(value), "Invalid repository name format")
|
||||||
)
|
)
|
||||||
.nonempty("One or more repositories required")
|
.nonempty("One or more repositories required")
|
||||||
.max(100, "Cannot configure more than 100 repositories")
|
.max(100, "Cannot configure more than 100 repositories")
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
export enum SecretScanningDataSource {
|
export enum SecretScanningDataSource {
|
||||||
GitHub = "github"
|
GitHub = "github",
|
||||||
|
Bitbucket = "bitbucket"
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum SecretScanningScanStatus {
|
export enum SecretScanningScanStatus {
|
||||||
|
@ -1,19 +1,23 @@
|
|||||||
|
import { BitbucketSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-factory";
|
||||||
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
|
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
|
||||||
|
|
||||||
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
|
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
|
||||||
import {
|
import {
|
||||||
TQueueSecretScanningResourceDiffScan,
|
TQueueSecretScanningResourceDiffScan,
|
||||||
TSecretScanningDataSourceCredentials,
|
TSecretScanningDataSourceCredentials,
|
||||||
|
TSecretScanningDataSourceInput,
|
||||||
TSecretScanningDataSourceWithConnection,
|
TSecretScanningDataSourceWithConnection,
|
||||||
TSecretScanningFactory
|
TSecretScanningFactory
|
||||||
} from "./secret-scanning-v2-types";
|
} from "./secret-scanning-v2-types";
|
||||||
|
|
||||||
type TSecretScanningFactoryImplementation = TSecretScanningFactory<
|
type TSecretScanningFactoryImplementation = TSecretScanningFactory<
|
||||||
TSecretScanningDataSourceWithConnection,
|
TSecretScanningDataSourceWithConnection,
|
||||||
TSecretScanningDataSourceCredentials,
|
TQueueSecretScanningResourceDiffScan["payload"],
|
||||||
TQueueSecretScanningResourceDiffScan["payload"]
|
TSecretScanningDataSourceInput,
|
||||||
|
TSecretScanningDataSourceCredentials
|
||||||
>;
|
>;
|
||||||
|
|
||||||
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
|
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
|
||||||
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation
|
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation,
|
||||||
|
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation
|
||||||
};
|
};
|
||||||
|
@ -4,6 +4,7 @@ import RE2 from "re2";
|
|||||||
|
|
||||||
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||||
|
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||||
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
|
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
|
||||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||||
|
|
||||||
@ -11,7 +12,8 @@ import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secre
|
|||||||
import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListItem } from "./secret-scanning-v2-types";
|
import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListItem } from "./secret-scanning-v2-types";
|
||||||
|
|
||||||
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
|
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
|
||||||
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
|
||||||
|
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
||||||
};
|
};
|
||||||
|
|
||||||
export const listSecretScanningDataSourceOptions = () => {
|
export const listSecretScanningDataSourceOptions = () => {
|
||||||
|
@ -2,13 +2,16 @@ import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/se
|
|||||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
|
||||||
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
|
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
|
||||||
[SecretScanningDataSource.GitHub]: "GitHub"
|
[SecretScanningDataSource.GitHub]: "GitHub",
|
||||||
|
[SecretScanningDataSource.Bitbucket]: "Bitbucket"
|
||||||
};
|
};
|
||||||
|
|
||||||
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
|
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
|
||||||
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar
|
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar,
|
||||||
|
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket
|
||||||
};
|
};
|
||||||
|
|
||||||
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
|
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
|
||||||
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" }
|
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" },
|
||||||
|
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" }
|
||||||
};
|
};
|
||||||
|
@ -19,8 +19,7 @@ export const BaseSecretScanningDataSourceSchema = ({
|
|||||||
// unique to provider
|
// unique to provider
|
||||||
type: true,
|
type: true,
|
||||||
connectionId: true,
|
connectionId: true,
|
||||||
config: true,
|
config: true
|
||||||
encryptedCredentials: true
|
|
||||||
}).extend({
|
}).extend({
|
||||||
type: z.literal(type),
|
type: z.literal(type),
|
||||||
connectionId: isConnectionRequired ? z.string().uuid() : z.null(),
|
connectionId: isConnectionRequired ? z.string().uuid() : z.null(),
|
||||||
|
@ -30,6 +30,8 @@ import {
|
|||||||
TFindSecretScanningDataSourceByNameDTO,
|
TFindSecretScanningDataSourceByNameDTO,
|
||||||
TListSecretScanningDataSourcesByProjectId,
|
TListSecretScanningDataSourcesByProjectId,
|
||||||
TSecretScanningDataSource,
|
TSecretScanningDataSource,
|
||||||
|
TSecretScanningDataSourceCredentials,
|
||||||
|
TSecretScanningDataSourceInput,
|
||||||
TSecretScanningDataSourceWithConnection,
|
TSecretScanningDataSourceWithConnection,
|
||||||
TSecretScanningDataSourceWithDetails,
|
TSecretScanningDataSourceWithDetails,
|
||||||
TSecretScanningFinding,
|
TSecretScanningFinding,
|
||||||
@ -49,6 +51,7 @@ import { TAppConnection } from "@app/services/app-connection/app-connection-type
|
|||||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
|
|
||||||
|
import { bitbucketSecretScanningService } from "./bitbucket/bitbucket-secret-scanning-service";
|
||||||
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
|
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
|
||||||
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
|
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
|
||||||
|
|
||||||
@ -256,7 +259,7 @@ export const secretScanningV2ServiceFactory = ({
|
|||||||
try {
|
try {
|
||||||
const createdDataSource = await factory.initialize(
|
const createdDataSource = await factory.initialize(
|
||||||
{
|
{
|
||||||
payload,
|
payload: payload as TSecretScanningDataSourceInput,
|
||||||
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
|
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
|
||||||
secretScanningV2DAL
|
secretScanningV2DAL
|
||||||
},
|
},
|
||||||
@ -287,7 +290,7 @@ export const secretScanningV2ServiceFactory = ({
|
|||||||
);
|
);
|
||||||
|
|
||||||
await factory.postInitialization({
|
await factory.postInitialization({
|
||||||
payload,
|
payload: payload as TSecretScanningDataSourceInput,
|
||||||
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
|
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
|
||||||
dataSourceId: dataSource.id,
|
dataSourceId: dataSource.id,
|
||||||
credentials
|
credentials
|
||||||
@ -398,7 +401,6 @@ export const secretScanningV2ServiceFactory = ({
|
|||||||
actorId: actor.id,
|
actorId: actor.id,
|
||||||
actorAuthMethod: actor.authMethod,
|
actorAuthMethod: actor.authMethod,
|
||||||
actorOrgId: actor.orgId,
|
actorOrgId: actor.orgId,
|
||||||
|
|
||||||
projectId: dataSource.projectId
|
projectId: dataSource.projectId
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -412,7 +414,36 @@ export const secretScanningV2ServiceFactory = ({
|
|||||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||||
});
|
});
|
||||||
|
|
||||||
// TODO: clean up webhooks
|
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
|
||||||
|
|
||||||
|
let connection: TAppConnection | null = null;
|
||||||
|
if (dataSource.connection) {
|
||||||
|
connection = await decryptAppConnection(dataSource.connection, kmsService);
|
||||||
|
}
|
||||||
|
|
||||||
|
let credentials: TSecretScanningDataSourceCredentials | undefined;
|
||||||
|
|
||||||
|
if (dataSource.encryptedCredentials) {
|
||||||
|
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId: dataSource.projectId
|
||||||
|
});
|
||||||
|
|
||||||
|
credentials = JSON.parse(
|
||||||
|
decryptor({
|
||||||
|
cipherTextBlob: dataSource.encryptedCredentials
|
||||||
|
}).toString()
|
||||||
|
) as TSecretScanningDataSourceCredentials;
|
||||||
|
}
|
||||||
|
|
||||||
|
await factory.teardown({
|
||||||
|
dataSource: {
|
||||||
|
...dataSource,
|
||||||
|
// @ts-expect-error currently we don't have a null connection data source
|
||||||
|
connection
|
||||||
|
},
|
||||||
|
credentials
|
||||||
|
});
|
||||||
|
|
||||||
await secretScanningV2DAL.dataSources.deleteById(dataSourceId);
|
await secretScanningV2DAL.dataSources.deleteById(dataSourceId);
|
||||||
|
|
||||||
@ -869,6 +900,7 @@ export const secretScanningV2ServiceFactory = ({
|
|||||||
updateSecretScanningFindingById,
|
updateSecretScanningFindingById,
|
||||||
findSecretScanningConfigByProjectId,
|
findSecretScanningConfigByProjectId,
|
||||||
upsertSecretScanningConfig,
|
upsertSecretScanningConfig,
|
||||||
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue)
|
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue),
|
||||||
|
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -4,6 +4,15 @@ import {
|
|||||||
TSecretScanningResources,
|
TSecretScanningResources,
|
||||||
TSecretScanningScans
|
TSecretScanningScans
|
||||||
} from "@app/db/schemas";
|
} from "@app/db/schemas";
|
||||||
|
import {
|
||||||
|
TBitbucketDataSource,
|
||||||
|
TBitbucketDataSourceCredentials,
|
||||||
|
TBitbucketDataSourceInput,
|
||||||
|
TBitbucketDataSourceListItem,
|
||||||
|
TBitbucketDataSourceWithConnection,
|
||||||
|
TBitbucketFinding,
|
||||||
|
TQueueBitbucketResourceDiffScan
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||||
import {
|
import {
|
||||||
TGitHubDataSource,
|
TGitHubDataSource,
|
||||||
TGitHubDataSourceInput,
|
TGitHubDataSourceInput,
|
||||||
@ -19,7 +28,7 @@ import {
|
|||||||
SecretScanningScanStatus
|
SecretScanningScanStatus
|
||||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
|
||||||
export type TSecretScanningDataSource = TGitHubDataSource;
|
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource;
|
||||||
|
|
||||||
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
|
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
|
||||||
lastScannedAt?: Date | null;
|
lastScannedAt?: Date | null;
|
||||||
@ -41,13 +50,17 @@ export type TSecretScanningScanWithDetails = TSecretScanningScans & {
|
|||||||
resourceName: string;
|
resourceName: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TSecretScanningDataSourceWithConnection = TGitHubDataSourceWithConnection;
|
export type TSecretScanningDataSourceWithConnection =
|
||||||
|
| TGitHubDataSourceWithConnection
|
||||||
|
| TBitbucketDataSourceWithConnection;
|
||||||
|
|
||||||
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput;
|
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput | TBitbucketDataSourceInput;
|
||||||
|
|
||||||
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem;
|
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem | TBitbucketDataSourceListItem;
|
||||||
|
|
||||||
export type TSecretScanningFinding = TGitHubFinding;
|
export type TSecretScanningDataSourceCredentials = TBitbucketDataSourceCredentials | undefined;
|
||||||
|
|
||||||
|
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding;
|
||||||
|
|
||||||
export type TListSecretScanningDataSourcesByProjectId = {
|
export type TListSecretScanningDataSourcesByProjectId = {
|
||||||
projectId: string;
|
projectId: string;
|
||||||
@ -99,7 +112,7 @@ export type TQueueSecretScanningDataSourceFullScan = {
|
|||||||
scanId: string;
|
scanId: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan;
|
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan | TQueueBitbucketResourceDiffScan;
|
||||||
|
|
||||||
export type TQueueSecretScanningSendNotification = {
|
export type TQueueSecretScanningSendNotification = {
|
||||||
dataSource: TSecretScanningDataSources;
|
dataSource: TSecretScanningDataSources;
|
||||||
@ -138,11 +151,12 @@ export type TSecretScanningDataSourceRaw = NonNullable<
|
|||||||
>;
|
>;
|
||||||
|
|
||||||
export type TSecretScanningFactoryInitialize<
|
export type TSecretScanningFactoryInitialize<
|
||||||
|
P extends TSecretScanningDataSourceInput,
|
||||||
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
||||||
C extends TSecretScanningDataSourceCredentials = undefined
|
C extends TSecretScanningDataSourceCredentials = undefined
|
||||||
> = (
|
> = (
|
||||||
params: {
|
params: {
|
||||||
payload: TCreateSecretScanningDataSourceDTO;
|
payload: P;
|
||||||
connection: T;
|
connection: T;
|
||||||
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
||||||
},
|
},
|
||||||
@ -150,24 +164,27 @@ export type TSecretScanningFactoryInitialize<
|
|||||||
) => Promise<TSecretScanningDataSourceRaw>;
|
) => Promise<TSecretScanningDataSourceRaw>;
|
||||||
|
|
||||||
export type TSecretScanningFactoryPostInitialization<
|
export type TSecretScanningFactoryPostInitialization<
|
||||||
|
P extends TSecretScanningDataSourceInput,
|
||||||
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
||||||
C extends TSecretScanningDataSourceCredentials = undefined
|
C extends TSecretScanningDataSourceCredentials = undefined
|
||||||
> = (params: {
|
> = (params: { payload: P; connection: T; credentials: C; dataSourceId: string }) => Promise<void>;
|
||||||
payload: TCreateSecretScanningDataSourceDTO;
|
|
||||||
connection: T;
|
export type TSecretScanningFactoryTeardown<
|
||||||
credentials: C;
|
T extends TSecretScanningDataSourceWithConnection,
|
||||||
dataSourceId: string;
|
C extends TSecretScanningDataSourceCredentials = undefined
|
||||||
}) => Promise<void>;
|
> = (params: { dataSource: T; credentials: C }) => Promise<void>;
|
||||||
|
|
||||||
export type TSecretScanningFactory<
|
export type TSecretScanningFactory<
|
||||||
T extends TSecretScanningDataSourceWithConnection,
|
T extends TSecretScanningDataSourceWithConnection,
|
||||||
C extends TSecretScanningDataSourceCredentials,
|
P extends TQueueSecretScanningResourceDiffScan["payload"],
|
||||||
P extends TQueueSecretScanningResourceDiffScan["payload"]
|
I extends TSecretScanningDataSourceInput,
|
||||||
|
C extends TSecretScanningDataSourceCredentials | undefined = undefined
|
||||||
> = () => {
|
> = () => {
|
||||||
listRawResources: TSecretScanningFactoryListRawResources<T>;
|
listRawResources: TSecretScanningFactoryListRawResources<T>;
|
||||||
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
|
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
|
||||||
initialize: TSecretScanningFactoryInitialize<T["connection"] | undefined, C>;
|
initialize: TSecretScanningFactoryInitialize<I, T["connection"] | undefined, C>;
|
||||||
postInitialization: TSecretScanningFactoryPostInitialization<T["connection"] | undefined, C>;
|
postInitialization: TSecretScanningFactoryPostInitialization<I, T["connection"] | undefined, C>;
|
||||||
|
teardown: TSecretScanningFactoryTeardown<T, C>;
|
||||||
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
|
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
|
||||||
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
|
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
|
||||||
};
|
};
|
||||||
@ -185,5 +202,3 @@ export type TUpsertSecretScanningConfigDTO = {
|
|||||||
projectId: string;
|
projectId: string;
|
||||||
content: string | null;
|
content: string | null;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TSecretScanningDataSourceCredentials = undefined;
|
|
||||||
|
@ -1,7 +1,22 @@
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { BitbucketDataSourceSchema, BitbucketFindingSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||||
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
|
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||||
|
|
||||||
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [GitHubDataSourceSchema]);
|
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [
|
||||||
|
GitHubDataSourceSchema,
|
||||||
|
BitbucketDataSourceSchema
|
||||||
|
]);
|
||||||
|
|
||||||
export const SecretScanningFindingSchema = z.discriminatedUnion("resourceType", [GitHubFindingSchema]);
|
export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType", [
|
||||||
|
GitHubFindingSchema.describe(
|
||||||
|
JSON.stringify({
|
||||||
|
title: "GitHub"
|
||||||
|
})
|
||||||
|
),
|
||||||
|
BitbucketFindingSchema.describe(
|
||||||
|
JSON.stringify({
|
||||||
|
title: "Bitbucket"
|
||||||
|
})
|
||||||
|
)
|
||||||
|
]);
|
||||||
|
@ -66,7 +66,10 @@ export enum ApiDocsTags {
|
|||||||
KmsKeys = "KMS Keys",
|
KmsKeys = "KMS Keys",
|
||||||
KmsEncryption = "KMS Encryption",
|
KmsEncryption = "KMS Encryption",
|
||||||
KmsSigning = "KMS Signing",
|
KmsSigning = "KMS Signing",
|
||||||
SecretScanning = "Secret Scanning"
|
SecretScanning = "Secret Scanning",
|
||||||
|
OidcSso = "OIDC SSO",
|
||||||
|
SamlSso = "SAML SSO",
|
||||||
|
LdapSso = "LDAP SSO"
|
||||||
}
|
}
|
||||||
|
|
||||||
export const GROUPS = {
|
export const GROUPS = {
|
||||||
@ -2269,9 +2272,16 @@ export const AppConnections = {
|
|||||||
code: "The OAuth code to use to connect with GitLab.",
|
code: "The OAuth code to use to connect with GitLab.",
|
||||||
accessTokenType: "The type of token used to connect with GitLab."
|
accessTokenType: "The type of token used to connect with GitLab."
|
||||||
},
|
},
|
||||||
|
BITBUCKET: {
|
||||||
|
email: "The email used to access Bitbucket.",
|
||||||
|
apiToken: "The API token used to access Bitbucket."
|
||||||
|
},
|
||||||
ZABBIX: {
|
ZABBIX: {
|
||||||
apiToken: "The API Token used to access Zabbix.",
|
apiToken: "The API Token used to access Zabbix.",
|
||||||
instanceUrl: "The Zabbix instance URL to connect with."
|
instanceUrl: "The Zabbix instance URL to connect with."
|
||||||
|
},
|
||||||
|
RAILWAY: {
|
||||||
|
apiToken: "The API token used to authenticate with Railway."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -2467,6 +2477,14 @@ export const SecretSyncs = {
|
|||||||
hostId: "The ID of the Zabbix host to sync secrets to.",
|
hostId: "The ID of the Zabbix host to sync secrets to.",
|
||||||
hostName: "The name of the Zabbix host to sync secrets to.",
|
hostName: "The name of the Zabbix host to sync secrets to.",
|
||||||
macroType: "The type of macro to sync secrets to. (0: Text, 1: Secret)"
|
macroType: "The type of macro to sync secrets to. (0: Text, 1: Secret)"
|
||||||
|
},
|
||||||
|
RAILWAY: {
|
||||||
|
projectId: "The ID of the Railway project to sync secrets to.",
|
||||||
|
projectName: "The name of the Railway project to sync secrets to.",
|
||||||
|
environmentId: "The Railway environment to sync secrets to.",
|
||||||
|
environmentName: "The Railway environment to sync secrets to.",
|
||||||
|
serviceId: "The Railway service that secrets should be synced to.",
|
||||||
|
serviceName: "The Railway service that secrets should be synced to."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -2587,7 +2605,9 @@ export const SecretRotations = {
|
|||||||
|
|
||||||
export const SecretScanningDataSources = {
|
export const SecretScanningDataSources = {
|
||||||
LIST: (type?: SecretScanningDataSource) => ({
|
LIST: (type?: SecretScanningDataSource) => ({
|
||||||
projectId: `The ID of the project to list ${type ? SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type] : "Scanning"} Data Sources from.`
|
projectId: `The ID of the project to list ${
|
||||||
|
type ? SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type] : "Scanning"
|
||||||
|
} Data Sources from.`
|
||||||
}),
|
}),
|
||||||
GET_BY_ID: (type: SecretScanningDataSource) => ({
|
GET_BY_ID: (type: SecretScanningDataSource) => ({
|
||||||
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`
|
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`
|
||||||
@ -2638,6 +2658,10 @@ export const SecretScanningDataSources = {
|
|||||||
CONFIG: {
|
CONFIG: {
|
||||||
GITHUB: {
|
GITHUB: {
|
||||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||||
|
},
|
||||||
|
BITBUCKET: {
|
||||||
|
workspaceSlug: "The workspace to scan.",
|
||||||
|
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -2662,3 +2686,113 @@ export const SecretScanningConfigs = {
|
|||||||
content: "The contents of the Secret Scanning Configuration file."
|
content: "The contents of the Secret Scanning Configuration file."
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const OidcSSo = {
|
||||||
|
GET_CONFIG: {
|
||||||
|
organizationId: "The ID of the organization to get the OIDC config for."
|
||||||
|
},
|
||||||
|
UPDATE_CONFIG: {
|
||||||
|
organizationId: "The ID of the organization to update the OIDC config for.",
|
||||||
|
allowedEmailDomains:
|
||||||
|
"A list of allowed email domains that users can use to authenticate with. This field is comma separated. Example: 'example.com,acme.com'",
|
||||||
|
discoveryURL: "The URL of the OIDC discovery endpoint.",
|
||||||
|
configurationType: "The configuration type to use for the OIDC configuration.",
|
||||||
|
issuer:
|
||||||
|
"The issuer for the OIDC configuration. This is only supported when the OIDC configuration type is set to 'custom'.",
|
||||||
|
authorizationEndpoint:
|
||||||
|
"The endpoint to use for OIDC authorization. This is only supported when the OIDC configuration type is set to 'custom'.",
|
||||||
|
jwksUri: "The URL of the OIDC JWKS endpoint.",
|
||||||
|
tokenEndpoint: "The token endpoint to use for OIDC token exchange.",
|
||||||
|
userinfoEndpoint: "The userinfo endpoint to get user information from the OIDC provider.",
|
||||||
|
clientId: "The client ID to use for OIDC authentication.",
|
||||||
|
clientSecret: "The client secret to use for OIDC authentication.",
|
||||||
|
isActive: "Whether to enable or disable this OIDC configuration.",
|
||||||
|
manageGroupMemberships:
|
||||||
|
"Whether to manage group memberships for the OIDC configuration. If enabled, users will automatically be assigned groups when they sign in, based on which groups they are a member of in the OIDC provider.",
|
||||||
|
jwtSignatureAlgorithm: "The algorithm to use for JWT signature verification."
|
||||||
|
},
|
||||||
|
CREATE_CONFIG: {
|
||||||
|
organizationId: "The ID of the organization to create the OIDC config for.",
|
||||||
|
allowedEmailDomains:
|
||||||
|
"A list of allowed email domains that users can use to authenticate with. This field is comma separated.",
|
||||||
|
discoveryURL: "The URL of the OIDC discovery endpoint.",
|
||||||
|
configurationType: "The configuration type to use for the OIDC configuration.",
|
||||||
|
issuer:
|
||||||
|
"The issuer for the OIDC configuration. This is only supported when the OIDC configuration type is set to 'custom'.",
|
||||||
|
authorizationEndpoint:
|
||||||
|
"The authorization endpoint to use for OIDC authorization. This is only supported when the OIDC configuration type is set to 'custom'.",
|
||||||
|
jwksUri: "The URL of the OIDC JWKS endpoint.",
|
||||||
|
tokenEndpoint: "The token endpoint to use for OIDC token exchange.",
|
||||||
|
userinfoEndpoint: "The userinfo endpoint to get user information from the OIDC provider.",
|
||||||
|
clientId: "The client ID to use for OIDC authentication.",
|
||||||
|
clientSecret: "The client secret to use for OIDC authentication.",
|
||||||
|
isActive: "Whether to enable or disable this OIDC configuration.",
|
||||||
|
manageGroupMemberships:
|
||||||
|
"Whether to manage group memberships for the OIDC configuration. If enabled, users will automatically be assigned groups when they sign in, based on which groups they are a member of in the OIDC provider.",
|
||||||
|
jwtSignatureAlgorithm: "The algorithm to use for JWT signature verification."
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const SamlSso = {
|
||||||
|
GET_CONFIG: {
|
||||||
|
organizationId: "The ID of the organization to get the SAML config for."
|
||||||
|
},
|
||||||
|
UPDATE_CONFIG: {
|
||||||
|
organizationId: "The ID of the organization to update the SAML config for.",
|
||||||
|
authProvider: "Authentication provider to use for SAML authentication.",
|
||||||
|
isActive: "Whether to enable or disable this SAML configuration.",
|
||||||
|
entryPoint:
|
||||||
|
"The entry point for the SAML authentication. This is the URL that the user will be redirected to after they have authenticated with the SAML provider.",
|
||||||
|
issuer: "The SAML provider issuer URL or entity ID.",
|
||||||
|
cert: "The certificate to use for SAML authentication."
|
||||||
|
},
|
||||||
|
CREATE_CONFIG: {
|
||||||
|
organizationId: "The ID of the organization to create the SAML config for.",
|
||||||
|
authProvider: "Authentication provider to use for SAML authentication.",
|
||||||
|
isActive: "Whether to enable or disable this SAML configuration.",
|
||||||
|
entryPoint:
|
||||||
|
"The entry point for the SAML authentication. This is the URL that the user will be redirected to after they have authenticated with the SAML provider.",
|
||||||
|
issuer: "The SAML provider issuer URL or entity ID.",
|
||||||
|
cert: "The certificate to use for SAML authentication."
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const LdapSso = {
|
||||||
|
GET_CONFIG: {
|
||||||
|
organizationId: "The ID of the organization to get the LDAP config for."
|
||||||
|
},
|
||||||
|
CREATE_CONFIG: {
|
||||||
|
organizationId: "The ID of the organization to create the LDAP config for.",
|
||||||
|
isActive: "Whether to enable or disable this LDAP configuration.",
|
||||||
|
url: "The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.",
|
||||||
|
bindDN:
|
||||||
|
"The distinguished name of the object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`",
|
||||||
|
bindPass: "The password to use along with Bind DN when performing the user search.",
|
||||||
|
searchBase: "The base DN to use for the user search such as `ou=Users,dc=acme,dc=com`",
|
||||||
|
uniqueUserAttribute:
|
||||||
|
"The attribute to use as the unique identifier of LDAP users such as `sAMAccountName`, `cn`, `uid`, `objectGUID`. If left blank, defaults to uidNumber",
|
||||||
|
searchFilter:
|
||||||
|
"The template used to construct the LDAP user search filter such as `(uid={{username}})` uses literal `{{username}}` to have the given username used in the search. The default is `(uid={{username}})` which is compatible with several common directory schemas.",
|
||||||
|
groupSearchBase: "LDAP search base to use for group membership search such as `ou=Groups,dc=acme,dc=com`",
|
||||||
|
groupSearchFilter:
|
||||||
|
"The template used when constructing the group membership query such as `(&(objectClass=posixGroup)(memberUid={{.Username}}))`. The template can access the following context variables: `[UserDN, UserName]`. The default is `(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))` which is compatible with several common directory schemas.",
|
||||||
|
caCert: "The CA certificate to use when verifying the LDAP server certificate."
|
||||||
|
},
|
||||||
|
UPDATE_CONFIG: {
|
||||||
|
organizationId: "The ID of the organization to update the LDAP config for.",
|
||||||
|
isActive: "Whether to enable or disable this LDAP configuration.",
|
||||||
|
url: "The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.",
|
||||||
|
bindDN:
|
||||||
|
"The distinguished name of object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`",
|
||||||
|
bindPass: "The password to use along with Bind DN when performing the user search.",
|
||||||
|
uniqueUserAttribute:
|
||||||
|
"The attribute to use as the unique identifier of LDAP users such as `sAMAccountName`, `cn`, `uid`, `objectGUID`. If left blank, defaults to uidNumber",
|
||||||
|
searchFilter:
|
||||||
|
"The template used to construct the LDAP user search filter such as `(uid={{username}})` uses literal `{{username}}` to have the given username used in the search. The default is `(uid={{username}})` which is compatible with several common directory schemas.",
|
||||||
|
searchBase: "The base DN to use for the user search such as `ou=Users,dc=acme,dc=com`",
|
||||||
|
groupSearchBase: "LDAP search base to use for group membership search such as `ou=Groups,dc=acme,dc=com`",
|
||||||
|
groupSearchFilter:
|
||||||
|
"The template used when constructing the group membership query such as `(&(objectClass=posixGroup)(memberUid={{.Username}}))`. The template can access the following context variables: `[UserDN, UserName]`. The default is `(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))` which is compatible with several common directory schemas.",
|
||||||
|
caCert: "The CA certificate to use when verifying the LDAP server certificate."
|
||||||
|
}
|
||||||
|
};
|
||||||
|
@ -28,6 +28,7 @@ const databaseReadReplicaSchema = z
|
|||||||
const envSchema = z
|
const envSchema = z
|
||||||
.object({
|
.object({
|
||||||
INFISICAL_PLATFORM_VERSION: zpStr(z.string().optional()),
|
INFISICAL_PLATFORM_VERSION: zpStr(z.string().optional()),
|
||||||
|
KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN: zodStrBool.default("false"),
|
||||||
PORT: z.coerce.number().default(IS_PACKAGED ? 8080 : 4000),
|
PORT: z.coerce.number().default(IS_PACKAGED ? 8080 : 4000),
|
||||||
DISABLE_SECRET_SCANNING: z
|
DISABLE_SECRET_SCANNING: z
|
||||||
.enum(["true", "false"])
|
.enum(["true", "false"])
|
||||||
@ -373,6 +374,19 @@ export const overwriteSchema: {
|
|||||||
fields: { key: keyof TEnvConfig; description?: string }[];
|
fields: { key: keyof TEnvConfig; description?: string }[];
|
||||||
};
|
};
|
||||||
} = {
|
} = {
|
||||||
|
aws: {
|
||||||
|
name: "AWS",
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_AWS_ACCESS_KEY_ID",
|
||||||
|
description: "The Access Key ID of your AWS account."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY",
|
||||||
|
description: "The Client Secret of your AWS application."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
azure: {
|
azure: {
|
||||||
name: "Azure",
|
name: "Azure",
|
||||||
fields: [
|
fields: [
|
||||||
@ -386,16 +400,79 @@ export const overwriteSchema: {
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
google_sso: {
|
gcp: {
|
||||||
name: "Google SSO",
|
name: "GCP",
|
||||||
fields: [
|
fields: [
|
||||||
{
|
{
|
||||||
key: "CLIENT_ID_GOOGLE_LOGIN",
|
key: "INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL",
|
||||||
description: "The Client ID of your GCP OAuth2 application."
|
description: "The GCP Service Account JSON credentials."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
github_app: {
|
||||||
|
name: "GitHub App",
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID",
|
||||||
|
description: "The Client ID of your GitHub application."
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: "CLIENT_SECRET_GOOGLE_LOGIN",
|
key: "INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET",
|
||||||
description: "The Client Secret of your GCP OAuth2 application."
|
description: "The Client Secret of your GitHub application."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITHUB_APP_SLUG",
|
||||||
|
description: "The Slug of your GitHub application. This is the one found in the URL."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITHUB_APP_ID",
|
||||||
|
description: "The App ID of your GitHub application."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY",
|
||||||
|
description: "The Private Key of your GitHub application."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
github_oauth: {
|
||||||
|
name: "GitHub OAuth",
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID",
|
||||||
|
description: "The Client ID of your GitHub OAuth application."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET",
|
||||||
|
description: "The Client Secret of your GitHub OAuth application."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
github_radar_app: {
|
||||||
|
name: "GitHub Radar App",
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID",
|
||||||
|
description: "The Client ID of your GitHub application."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET",
|
||||||
|
description: "The Client Secret of your GitHub application."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG",
|
||||||
|
description: "The Slug of your GitHub application. This is the one found in the URL."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_ID",
|
||||||
|
description: "The App ID of your GitHub application."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY",
|
||||||
|
description: "The Private Key of your GitHub application."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET",
|
||||||
|
description: "The Webhook Secret of your GitHub application."
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@ -412,6 +489,19 @@ export const overwriteSchema: {
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
gitlab_oauth: {
|
||||||
|
name: "GitLab OAuth",
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID",
|
||||||
|
description: "The Client ID of your GitLab OAuth application."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET",
|
||||||
|
description: "The Client Secret of your GitLab OAuth application."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
gitlab_sso: {
|
gitlab_sso: {
|
||||||
name: "GitLab SSO",
|
name: "GitLab SSO",
|
||||||
fields: [
|
fields: [
|
||||||
@ -429,6 +519,19 @@ export const overwriteSchema: {
|
|||||||
"The URL of your self-hosted instance of GitLab where the OAuth application is registered. If no URL is passed in, this will default to https://gitlab.com."
|
"The URL of your self-hosted instance of GitLab where the OAuth application is registered. If no URL is passed in, this will default to https://gitlab.com."
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
google_sso: {
|
||||||
|
name: "Google SSO",
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
key: "CLIENT_ID_GOOGLE_LOGIN",
|
||||||
|
description: "The Client ID of your GCP OAuth2 application."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "CLIENT_SECRET_GOOGLE_LOGIN",
|
||||||
|
description: "The Client Secret of your GCP OAuth2 application."
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,11 +1,18 @@
|
|||||||
import axios from "axios";
|
import axios, { AxiosInstance, CreateAxiosDefaults } from "axios";
|
||||||
import axiosRetry from "axios-retry";
|
import axiosRetry, { IAxiosRetryConfig } from "axios-retry";
|
||||||
|
|
||||||
export const request = axios.create();
|
export function createRequestClient(defaults: CreateAxiosDefaults = {}, retry: IAxiosRetryConfig = {}): AxiosInstance {
|
||||||
|
const client = axios.create(defaults);
|
||||||
|
|
||||||
axiosRetry(request, {
|
axiosRetry(client, {
|
||||||
retries: 3,
|
retries: 3,
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
retryDelay: axiosRetry.exponentialDelay,
|
retryDelay: axiosRetry.exponentialDelay,
|
||||||
retryCondition: (err) => axiosRetry.isNetworkError(err) || axiosRetry.isRetryableError(err)
|
retryCondition: (err) => axiosRetry.isNetworkError(err) || axiosRetry.isRetryableError(err),
|
||||||
});
|
...retry
|
||||||
|
});
|
||||||
|
|
||||||
|
return client;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const request = createRequestClient();
|
||||||
|
@ -10,4 +10,4 @@ export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]
|
|||||||
|
|
||||||
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
|
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
|
||||||
|
|
||||||
export const GitHubRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
|
export const BasicRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
import type { EmitterWebhookEventName } from "@octokit/webhooks/dist-types/types";
|
import type { EmitterWebhookEventName } from "@octokit/webhooks/dist-types/types";
|
||||||
import { PushEvent } from "@octokit/webhooks-types";
|
import { PushEvent } from "@octokit/webhooks-types";
|
||||||
import { Probot } from "probot";
|
import { Probot } from "probot";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { TBitbucketPushEvent } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-types";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||||
@ -63,4 +65,52 @@ export const registerSecretScanningV2Webhooks = async (server: FastifyZodProvide
|
|||||||
return res.send("ok");
|
return res.send("ok");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// bitbucket push event webhook
|
||||||
|
server.route({
|
||||||
|
method: "POST",
|
||||||
|
url: "/bitbucket",
|
||||||
|
schema: {
|
||||||
|
querystring: z.object({
|
||||||
|
dataSourceId: z.string().min(1, { message: "Data Source ID is required" })
|
||||||
|
}),
|
||||||
|
headers: z
|
||||||
|
.object({
|
||||||
|
"x-hub-signature": z.string().min(1, { message: "X-Hub-Signature header is required" })
|
||||||
|
})
|
||||||
|
.passthrough()
|
||||||
|
},
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
handler: async (req, res) => {
|
||||||
|
const { dataSourceId } = req.query;
|
||||||
|
|
||||||
|
// Verify signature
|
||||||
|
const signature = req.headers["x-hub-signature"];
|
||||||
|
if (!signature) {
|
||||||
|
logger.error("Missing X-Hub-Signature header for Bitbucket webhook");
|
||||||
|
return res.status(401).send({ message: "Unauthorized: Missing signature" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const expectedSignaturePrefix = "sha256=";
|
||||||
|
if (!signature.startsWith(expectedSignaturePrefix)) {
|
||||||
|
logger.error({ signature }, "Invalid X-Hub-Signature format for Bitbucket webhook");
|
||||||
|
return res.status(401).send({ message: "Unauthorized: Invalid signature format" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const receivedSignature = signature.substring(expectedSignaturePrefix.length);
|
||||||
|
|
||||||
|
if (!dataSourceId) return res.status(400).send({ message: "Data Source ID is required" });
|
||||||
|
|
||||||
|
await server.services.secretScanningV2.bitbucket.handlePushEvent({
|
||||||
|
...(req.body as TBitbucketPushEvent),
|
||||||
|
dataSourceId,
|
||||||
|
receivedSignature,
|
||||||
|
bodyString: JSON.stringify(req.body)
|
||||||
|
});
|
||||||
|
|
||||||
|
return res.send("ok");
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
@ -1911,6 +1911,7 @@ export const registerRoutes = async (
|
|||||||
await hsmService.startService();
|
await hsmService.startService();
|
||||||
|
|
||||||
await telemetryQueue.startTelemetryCheck();
|
await telemetryQueue.startTelemetryCheck();
|
||||||
|
await telemetryQueue.startAggregatedEventsJob();
|
||||||
await dailyResourceCleanUp.startCleanUp();
|
await dailyResourceCleanUp.startCleanUp();
|
||||||
await dailyExpiringPkiItemAlert.startSendingAlerts();
|
await dailyExpiringPkiItemAlert.startSendingAlerts();
|
||||||
await pkiSubscriberQueue.startDailyAutoRenewalJob();
|
await pkiSubscriberQueue.startDailyAutoRenewalJob();
|
||||||
|
@ -49,7 +49,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
|||||||
defaultAuthOrgSlug: z.string().nullable(),
|
defaultAuthOrgSlug: z.string().nullable(),
|
||||||
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
|
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
|
||||||
defaultAuthOrgAuthMethod: z.string().nullish(),
|
defaultAuthOrgAuthMethod: z.string().nullish(),
|
||||||
isSecretScanningDisabled: z.boolean()
|
isSecretScanningDisabled: z.boolean(),
|
||||||
|
kubernetesAutoFetchServiceAccountToken: z.boolean()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -61,7 +62,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
|||||||
config: {
|
config: {
|
||||||
...config,
|
...config,
|
||||||
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
|
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
|
||||||
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING
|
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
|
||||||
|
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -31,6 +31,10 @@ import {
|
|||||||
AzureKeyVaultConnectionListItemSchema,
|
AzureKeyVaultConnectionListItemSchema,
|
||||||
SanitizedAzureKeyVaultConnectionSchema
|
SanitizedAzureKeyVaultConnectionSchema
|
||||||
} from "@app/services/app-connection/azure-key-vault";
|
} from "@app/services/app-connection/azure-key-vault";
|
||||||
|
import {
|
||||||
|
BitbucketConnectionListItemSchema,
|
||||||
|
SanitizedBitbucketConnectionSchema
|
||||||
|
} from "@app/services/app-connection/bitbucket";
|
||||||
import {
|
import {
|
||||||
CamundaConnectionListItemSchema,
|
CamundaConnectionListItemSchema,
|
||||||
SanitizedCamundaConnectionSchema
|
SanitizedCamundaConnectionSchema
|
||||||
@ -67,6 +71,10 @@ import {
|
|||||||
PostgresConnectionListItemSchema,
|
PostgresConnectionListItemSchema,
|
||||||
SanitizedPostgresConnectionSchema
|
SanitizedPostgresConnectionSchema
|
||||||
} from "@app/services/app-connection/postgres";
|
} from "@app/services/app-connection/postgres";
|
||||||
|
import {
|
||||||
|
RailwayConnectionListItemSchema,
|
||||||
|
SanitizedRailwayConnectionSchema
|
||||||
|
} from "@app/services/app-connection/railway";
|
||||||
import {
|
import {
|
||||||
RenderConnectionListItemSchema,
|
RenderConnectionListItemSchema,
|
||||||
SanitizedRenderConnectionSchema
|
SanitizedRenderConnectionSchema
|
||||||
@ -118,7 +126,9 @@ const SanitizedAppConnectionSchema = z.union([
|
|||||||
...SanitizedFlyioConnectionSchema.options,
|
...SanitizedFlyioConnectionSchema.options,
|
||||||
...SanitizedGitLabConnectionSchema.options,
|
...SanitizedGitLabConnectionSchema.options,
|
||||||
...SanitizedCloudflareConnectionSchema.options,
|
...SanitizedCloudflareConnectionSchema.options,
|
||||||
...SanitizedZabbixConnectionSchema.options
|
...SanitizedBitbucketConnectionSchema.options,
|
||||||
|
...SanitizedZabbixConnectionSchema.options,
|
||||||
|
...SanitizedRailwayConnectionSchema.options
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||||
@ -151,7 +161,9 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
|||||||
FlyioConnectionListItemSchema,
|
FlyioConnectionListItemSchema,
|
||||||
GitLabConnectionListItemSchema,
|
GitLabConnectionListItemSchema,
|
||||||
CloudflareConnectionListItemSchema,
|
CloudflareConnectionListItemSchema,
|
||||||
ZabbixConnectionListItemSchema
|
BitbucketConnectionListItemSchema,
|
||||||
|
ZabbixConnectionListItemSchema,
|
||||||
|
RailwayConnectionListItemSchema
|
||||||
]);
|
]);
|
||||||
|
|
||||||
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {
|
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {
|
||||||
|
@ -0,0 +1,88 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { readLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import {
|
||||||
|
CreateBitbucketConnectionSchema,
|
||||||
|
SanitizedBitbucketConnectionSchema,
|
||||||
|
UpdateBitbucketConnectionSchema
|
||||||
|
} from "@app/services/app-connection/bitbucket";
|
||||||
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
|
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||||
|
|
||||||
|
export const registerBitbucketConnectionRouter = async (server: FastifyZodProvider) => {
|
||||||
|
registerAppConnectionEndpoints({
|
||||||
|
app: AppConnection.Bitbucket,
|
||||||
|
server,
|
||||||
|
sanitizedResponseSchema: SanitizedBitbucketConnectionSchema,
|
||||||
|
createSchema: CreateBitbucketConnectionSchema,
|
||||||
|
updateSchema: UpdateBitbucketConnectionSchema
|
||||||
|
});
|
||||||
|
|
||||||
|
// The below endpoints are not exposed and for Infisical App use
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: `/:connectionId/workspaces`,
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
params: z.object({
|
||||||
|
connectionId: z.string().uuid()
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
workspaces: z.object({ slug: z.string() }).array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const {
|
||||||
|
params: { connectionId }
|
||||||
|
} = req;
|
||||||
|
|
||||||
|
const workspaces = await server.services.appConnection.bitbucket.listWorkspaces(connectionId, req.permission);
|
||||||
|
|
||||||
|
return { workspaces };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: `/:connectionId/repositories`,
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
params: z.object({
|
||||||
|
connectionId: z.string().uuid()
|
||||||
|
}),
|
||||||
|
querystring: z.object({
|
||||||
|
workspaceSlug: z.string().min(1).max(255)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
repositories: z.object({ slug: z.string(), full_name: z.string(), uuid: z.string() }).array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const {
|
||||||
|
params: { connectionId },
|
||||||
|
query: { workspaceSlug }
|
||||||
|
} = req;
|
||||||
|
|
||||||
|
const repositories = await server.services.appConnection.bitbucket.listRepositories(
|
||||||
|
{ connectionId, workspaceSlug },
|
||||||
|
req.permission
|
||||||
|
);
|
||||||
|
|
||||||
|
return { repositories };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
@ -9,6 +9,7 @@ import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-confi
|
|||||||
import { registerAzureClientSecretsConnectionRouter } from "./azure-client-secrets-connection-router";
|
import { registerAzureClientSecretsConnectionRouter } from "./azure-client-secrets-connection-router";
|
||||||
import { registerAzureDevOpsConnectionRouter } from "./azure-devops-connection-router";
|
import { registerAzureDevOpsConnectionRouter } from "./azure-devops-connection-router";
|
||||||
import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router";
|
import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router";
|
||||||
|
import { registerBitbucketConnectionRouter } from "./bitbucket-connection-router";
|
||||||
import { registerCamundaConnectionRouter } from "./camunda-connection-router";
|
import { registerCamundaConnectionRouter } from "./camunda-connection-router";
|
||||||
import { registerCloudflareConnectionRouter } from "./cloudflare-connection-router";
|
import { registerCloudflareConnectionRouter } from "./cloudflare-connection-router";
|
||||||
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
|
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
|
||||||
@ -24,6 +25,7 @@ import { registerLdapConnectionRouter } from "./ldap-connection-router";
|
|||||||
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
|
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
|
||||||
import { registerMySqlConnectionRouter } from "./mysql-connection-router";
|
import { registerMySqlConnectionRouter } from "./mysql-connection-router";
|
||||||
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
|
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
|
||||||
|
import { registerRailwayConnectionRouter } from "./railway-connection-router";
|
||||||
import { registerRenderConnectionRouter } from "./render-connection-router";
|
import { registerRenderConnectionRouter } from "./render-connection-router";
|
||||||
import { registerTeamCityConnectionRouter } from "./teamcity-connection-router";
|
import { registerTeamCityConnectionRouter } from "./teamcity-connection-router";
|
||||||
import { registerTerraformCloudConnectionRouter } from "./terraform-cloud-router";
|
import { registerTerraformCloudConnectionRouter } from "./terraform-cloud-router";
|
||||||
@ -64,5 +66,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
|||||||
[AppConnection.Flyio]: registerFlyioConnectionRouter,
|
[AppConnection.Flyio]: registerFlyioConnectionRouter,
|
||||||
[AppConnection.GitLab]: registerGitLabConnectionRouter,
|
[AppConnection.GitLab]: registerGitLabConnectionRouter,
|
||||||
[AppConnection.Cloudflare]: registerCloudflareConnectionRouter,
|
[AppConnection.Cloudflare]: registerCloudflareConnectionRouter,
|
||||||
[AppConnection.Zabbix]: registerZabbixConnectionRouter
|
[AppConnection.Bitbucket]: registerBitbucketConnectionRouter,
|
||||||
|
[AppConnection.Zabbix]: registerZabbixConnectionRouter,
|
||||||
|
[AppConnection.Railway]: registerRailwayConnectionRouter
|
||||||
};
|
};
|
||||||
|
@ -0,0 +1,67 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { readLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import {
|
||||||
|
CreateRailwayConnectionSchema,
|
||||||
|
SanitizedRailwayConnectionSchema,
|
||||||
|
UpdateRailwayConnectionSchema
|
||||||
|
} from "@app/services/app-connection/railway";
|
||||||
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
|
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||||
|
|
||||||
|
export const registerRailwayConnectionRouter = async (server: FastifyZodProvider) => {
|
||||||
|
registerAppConnectionEndpoints({
|
||||||
|
app: AppConnection.Railway,
|
||||||
|
server,
|
||||||
|
sanitizedResponseSchema: SanitizedRailwayConnectionSchema,
|
||||||
|
createSchema: CreateRailwayConnectionSchema,
|
||||||
|
updateSchema: UpdateRailwayConnectionSchema
|
||||||
|
});
|
||||||
|
|
||||||
|
// The below endpoints are not exposed and for Infisical App use
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: `/:connectionId/projects`,
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
params: z.object({
|
||||||
|
connectionId: z.string().uuid()
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
projects: z
|
||||||
|
.object({
|
||||||
|
name: z.string(),
|
||||||
|
id: z.string(),
|
||||||
|
services: z.array(
|
||||||
|
z.object({
|
||||||
|
name: z.string(),
|
||||||
|
id: z.string()
|
||||||
|
})
|
||||||
|
),
|
||||||
|
environments: z.array(
|
||||||
|
z.object({
|
||||||
|
name: z.string(),
|
||||||
|
id: z.string()
|
||||||
|
})
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { connectionId } = req.params;
|
||||||
|
|
||||||
|
const projects = await server.services.appConnection.railway.listProjects(connectionId, req.permission);
|
||||||
|
|
||||||
|
return { projects };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
@ -732,8 +732,8 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
|||||||
actorOrgId: req.permission.orgId,
|
actorOrgId: req.permission.orgId,
|
||||||
projectId,
|
projectId,
|
||||||
environment,
|
environment,
|
||||||
path: secretPath,
|
path: secretPath
|
||||||
search
|
// search scott: removing for now because this prevents searching imported secrets which are fetched separately client side
|
||||||
});
|
});
|
||||||
|
|
||||||
if (remainingLimit > 0 && totalImportCount > adjustedOffset) {
|
if (remainingLimit > 0 && totalImportCount > adjustedOffset) {
|
||||||
@ -745,7 +745,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
|||||||
projectId,
|
projectId,
|
||||||
environment,
|
environment,
|
||||||
path: secretPath,
|
path: secretPath,
|
||||||
search,
|
// search scott: removing for now because this prevents searching imported secrets which are fetched separately client side
|
||||||
limit: remainingLimit,
|
limit: remainingLimit,
|
||||||
offset: adjustedOffset
|
offset: adjustedOffset
|
||||||
});
|
});
|
||||||
|
@ -17,6 +17,7 @@ import { registerGitLabSyncRouter } from "./gitlab-sync-router";
|
|||||||
import { registerHCVaultSyncRouter } from "./hc-vault-sync-router";
|
import { registerHCVaultSyncRouter } from "./hc-vault-sync-router";
|
||||||
import { registerHerokuSyncRouter } from "./heroku-sync-router";
|
import { registerHerokuSyncRouter } from "./heroku-sync-router";
|
||||||
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
|
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
|
||||||
|
import { registerRailwaySyncRouter } from "./railway-sync-router";
|
||||||
import { registerRenderSyncRouter } from "./render-sync-router";
|
import { registerRenderSyncRouter } from "./render-sync-router";
|
||||||
import { registerTeamCitySyncRouter } from "./teamcity-sync-router";
|
import { registerTeamCitySyncRouter } from "./teamcity-sync-router";
|
||||||
import { registerTerraformCloudSyncRouter } from "./terraform-cloud-sync-router";
|
import { registerTerraformCloudSyncRouter } from "./terraform-cloud-sync-router";
|
||||||
@ -49,5 +50,6 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
|
|||||||
[SecretSync.Flyio]: registerFlyioSyncRouter,
|
[SecretSync.Flyio]: registerFlyioSyncRouter,
|
||||||
[SecretSync.GitLab]: registerGitLabSyncRouter,
|
[SecretSync.GitLab]: registerGitLabSyncRouter,
|
||||||
[SecretSync.CloudflarePages]: registerCloudflarePagesSyncRouter,
|
[SecretSync.CloudflarePages]: registerCloudflarePagesSyncRouter,
|
||||||
[SecretSync.Zabbix]: registerZabbixSyncRouter
|
[SecretSync.Zabbix]: registerZabbixSyncRouter,
|
||||||
|
[SecretSync.Railway]: registerRailwaySyncRouter
|
||||||
};
|
};
|
||||||
|
@ -0,0 +1,17 @@
|
|||||||
|
import {
|
||||||
|
CreateRailwaySyncSchema,
|
||||||
|
RailwaySyncSchema,
|
||||||
|
UpdateRailwaySyncSchema
|
||||||
|
} from "@app/services/secret-sync/railway/railway-sync-schemas";
|
||||||
|
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||||
|
|
||||||
|
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
|
||||||
|
|
||||||
|
export const registerRailwaySyncRouter = async (server: FastifyZodProvider) =>
|
||||||
|
registerSyncSecretsEndpoints({
|
||||||
|
destination: SecretSync.Railway,
|
||||||
|
server,
|
||||||
|
responseSchema: RailwaySyncSchema,
|
||||||
|
createSchema: CreateRailwaySyncSchema,
|
||||||
|
updateSchema: UpdateRailwaySyncSchema
|
||||||
|
});
|
@ -34,6 +34,7 @@ import { GitLabSyncListItemSchema, GitLabSyncSchema } from "@app/services/secret
|
|||||||
import { HCVaultSyncListItemSchema, HCVaultSyncSchema } from "@app/services/secret-sync/hc-vault";
|
import { HCVaultSyncListItemSchema, HCVaultSyncSchema } from "@app/services/secret-sync/hc-vault";
|
||||||
import { HerokuSyncListItemSchema, HerokuSyncSchema } from "@app/services/secret-sync/heroku";
|
import { HerokuSyncListItemSchema, HerokuSyncSchema } from "@app/services/secret-sync/heroku";
|
||||||
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
|
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
|
||||||
|
import { RailwaySyncListItemSchema, RailwaySyncSchema } from "@app/services/secret-sync/railway/railway-sync-schemas";
|
||||||
import { RenderSyncListItemSchema, RenderSyncSchema } from "@app/services/secret-sync/render/render-sync-schemas";
|
import { RenderSyncListItemSchema, RenderSyncSchema } from "@app/services/secret-sync/render/render-sync-schemas";
|
||||||
import { TeamCitySyncListItemSchema, TeamCitySyncSchema } from "@app/services/secret-sync/teamcity";
|
import { TeamCitySyncListItemSchema, TeamCitySyncSchema } from "@app/services/secret-sync/teamcity";
|
||||||
import { TerraformCloudSyncListItemSchema, TerraformCloudSyncSchema } from "@app/services/secret-sync/terraform-cloud";
|
import { TerraformCloudSyncListItemSchema, TerraformCloudSyncSchema } from "@app/services/secret-sync/terraform-cloud";
|
||||||
@ -64,7 +65,8 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
|
|||||||
FlyioSyncSchema,
|
FlyioSyncSchema,
|
||||||
GitLabSyncSchema,
|
GitLabSyncSchema,
|
||||||
CloudflarePagesSyncSchema,
|
CloudflarePagesSyncSchema,
|
||||||
ZabbixSyncSchema
|
ZabbixSyncSchema,
|
||||||
|
RailwaySyncSchema
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
||||||
@ -90,7 +92,8 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
|||||||
FlyioSyncListItemSchema,
|
FlyioSyncListItemSchema,
|
||||||
GitLabSyncListItemSchema,
|
GitLabSyncListItemSchema,
|
||||||
CloudflarePagesSyncListItemSchema,
|
CloudflarePagesSyncListItemSchema,
|
||||||
ZabbixSyncListItemSchema
|
ZabbixSyncListItemSchema,
|
||||||
|
RailwaySyncListItemSchema
|
||||||
]);
|
]);
|
||||||
|
|
||||||
export const registerSecretSyncRouter = async (server: FastifyZodProvider) => {
|
export const registerSecretSyncRouter = async (server: FastifyZodProvider) => {
|
||||||
|
@ -2,7 +2,7 @@ import picomatch from "picomatch";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { SecretApprovalRequestsSchema, SecretsSchema, SecretType, ServiceTokenScopes } from "@app/db/schemas";
|
import { SecretApprovalRequestsSchema, SecretsSchema, SecretType, ServiceTokenScopes } from "@app/db/schemas";
|
||||||
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
import { EventType, SecretApprovalEvent, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
import { ApiDocsTags, RAW_SECRETS, SECRETS } from "@app/lib/api-docs";
|
import { ApiDocsTags, RAW_SECRETS, SECRETS } from "@app/lib/api-docs";
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
import { removeTrailingSlash } from "@app/lib/fn";
|
import { removeTrailingSlash } from "@app/lib/fn";
|
||||||
@ -594,6 +594,23 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
secretReminderRepeatDays: req.body.secretReminderRepeatDays
|
secretReminderRepeatDays: req.body.secretReminderRepeatDays
|
||||||
});
|
});
|
||||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
projectId: req.body.workspaceId,
|
||||||
|
...req.auditLogInfo,
|
||||||
|
event: {
|
||||||
|
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||||
|
metadata: {
|
||||||
|
committedBy: secretOperation.approval.committerUserId,
|
||||||
|
secretApprovalRequestId: secretOperation.approval.id,
|
||||||
|
secretApprovalRequestSlug: secretOperation.approval.slug,
|
||||||
|
secretPath: req.body.secretPath,
|
||||||
|
environment: req.body.environment,
|
||||||
|
secretKey: req.params.secretName,
|
||||||
|
eventType: SecretApprovalEvent.Create
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return { approval: secretOperation.approval };
|
return { approval: secretOperation.approval };
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -730,6 +747,23 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
projectId: req.body.workspaceId,
|
||||||
|
...req.auditLogInfo,
|
||||||
|
event: {
|
||||||
|
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||||
|
metadata: {
|
||||||
|
committedBy: secretOperation.approval.committerUserId,
|
||||||
|
secretApprovalRequestId: secretOperation.approval.id,
|
||||||
|
secretApprovalRequestSlug: secretOperation.approval.slug,
|
||||||
|
secretPath: req.body.secretPath,
|
||||||
|
environment: req.body.environment,
|
||||||
|
secretKey: req.params.secretName,
|
||||||
|
eventType: SecretApprovalEvent.Update
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return { approval: secretOperation.approval };
|
return { approval: secretOperation.approval };
|
||||||
}
|
}
|
||||||
const { secret } = secretOperation;
|
const { secret } = secretOperation;
|
||||||
@ -831,6 +865,23 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
type: req.body.type
|
type: req.body.type
|
||||||
});
|
});
|
||||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
projectId: req.body.workspaceId,
|
||||||
|
...req.auditLogInfo,
|
||||||
|
event: {
|
||||||
|
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||||
|
metadata: {
|
||||||
|
committedBy: secretOperation.approval.committerUserId,
|
||||||
|
secretApprovalRequestId: secretOperation.approval.id,
|
||||||
|
secretApprovalRequestSlug: secretOperation.approval.slug,
|
||||||
|
secretPath: req.body.secretPath,
|
||||||
|
environment: req.body.environment,
|
||||||
|
secretKey: req.params.secretName,
|
||||||
|
eventType: SecretApprovalEvent.Delete
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return { approval: secretOperation.approval };
|
return { approval: secretOperation.approval };
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1165,7 +1216,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
metadata: {
|
metadata: {
|
||||||
committedBy: approval.committerUserId,
|
committedBy: approval.committerUserId,
|
||||||
secretApprovalRequestId: approval.id,
|
secretApprovalRequestId: approval.id,
|
||||||
secretApprovalRequestSlug: approval.slug
|
secretApprovalRequestSlug: approval.slug,
|
||||||
|
secretPath,
|
||||||
|
environment,
|
||||||
|
eventType: SecretApprovalEvent.Create
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -1351,7 +1405,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
metadata: {
|
metadata: {
|
||||||
committedBy: approval.committerUserId,
|
committedBy: approval.committerUserId,
|
||||||
secretApprovalRequestId: approval.id,
|
secretApprovalRequestId: approval.id,
|
||||||
secretApprovalRequestSlug: approval.slug
|
secretApprovalRequestSlug: approval.slug,
|
||||||
|
secretPath,
|
||||||
|
environment,
|
||||||
|
secretKey: req.params.secretName,
|
||||||
|
eventType: SecretApprovalEvent.Update
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -1489,7 +1547,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
metadata: {
|
metadata: {
|
||||||
committedBy: approval.committerUserId,
|
committedBy: approval.committerUserId,
|
||||||
secretApprovalRequestId: approval.id,
|
secretApprovalRequestId: approval.id,
|
||||||
secretApprovalRequestSlug: approval.slug
|
secretApprovalRequestSlug: approval.slug,
|
||||||
|
secretPath,
|
||||||
|
environment,
|
||||||
|
secretKey: req.params.secretName,
|
||||||
|
eventType: SecretApprovalEvent.Delete
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -1673,7 +1735,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
metadata: {
|
metadata: {
|
||||||
committedBy: approval.committerUserId,
|
committedBy: approval.committerUserId,
|
||||||
secretApprovalRequestId: approval.id,
|
secretApprovalRequestId: approval.id,
|
||||||
secretApprovalRequestSlug: approval.slug
|
secretApprovalRequestSlug: approval.slug,
|
||||||
|
secretPath,
|
||||||
|
environment,
|
||||||
|
eventType: SecretApprovalEvent.CreateMany
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -1801,7 +1866,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
metadata: {
|
metadata: {
|
||||||
committedBy: approval.committerUserId,
|
committedBy: approval.committerUserId,
|
||||||
secretApprovalRequestId: approval.id,
|
secretApprovalRequestId: approval.id,
|
||||||
secretApprovalRequestSlug: approval.slug
|
secretApprovalRequestSlug: approval.slug,
|
||||||
|
secretPath,
|
||||||
|
environment,
|
||||||
|
eventType: SecretApprovalEvent.UpdateMany,
|
||||||
|
secrets: inputSecrets.map((secret) => ({
|
||||||
|
secretKey: secret.secretName
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -1920,7 +1991,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
metadata: {
|
metadata: {
|
||||||
committedBy: approval.committerUserId,
|
committedBy: approval.committerUserId,
|
||||||
secretApprovalRequestId: approval.id,
|
secretApprovalRequestId: approval.id,
|
||||||
secretApprovalRequestSlug: approval.slug
|
secretApprovalRequestSlug: approval.slug,
|
||||||
|
secretPath,
|
||||||
|
environment,
|
||||||
|
secrets: inputSecrets.map((secret) => ({
|
||||||
|
secretKey: secret.secretName
|
||||||
|
})),
|
||||||
|
eventType: SecretApprovalEvent.DeleteMany
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -2038,6 +2115,24 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
secrets: inputSecrets
|
secrets: inputSecrets
|
||||||
});
|
});
|
||||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
projectId: req.body.workspaceId,
|
||||||
|
...req.auditLogInfo,
|
||||||
|
event: {
|
||||||
|
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||||
|
metadata: {
|
||||||
|
committedBy: secretOperation.approval.committerUserId,
|
||||||
|
secretApprovalRequestId: secretOperation.approval.id,
|
||||||
|
secretApprovalRequestSlug: secretOperation.approval.slug,
|
||||||
|
secretPath,
|
||||||
|
environment,
|
||||||
|
secrets: inputSecrets.map((secret) => ({
|
||||||
|
secretKey: secret.secretKey
|
||||||
|
})),
|
||||||
|
eventType: SecretApprovalEvent.CreateMany
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
return { approval: secretOperation.approval };
|
return { approval: secretOperation.approval };
|
||||||
}
|
}
|
||||||
const { secrets } = secretOperation;
|
const { secrets } = secretOperation;
|
||||||
@ -2170,6 +2265,25 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
mode: req.body.mode
|
mode: req.body.mode
|
||||||
});
|
});
|
||||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
projectId: req.body.workspaceId,
|
||||||
|
...req.auditLogInfo,
|
||||||
|
event: {
|
||||||
|
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||||
|
metadata: {
|
||||||
|
committedBy: secretOperation.approval.committerUserId,
|
||||||
|
secretApprovalRequestId: secretOperation.approval.id,
|
||||||
|
secretApprovalRequestSlug: secretOperation.approval.slug,
|
||||||
|
secretPath,
|
||||||
|
environment,
|
||||||
|
secrets: inputSecrets.map((secret) => ({
|
||||||
|
secretKey: secret.secretKey,
|
||||||
|
secretPath: secret.secretPath
|
||||||
|
})),
|
||||||
|
eventType: SecretApprovalEvent.UpdateMany
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
return { approval: secretOperation.approval };
|
return { approval: secretOperation.approval };
|
||||||
}
|
}
|
||||||
const { secrets } = secretOperation;
|
const { secrets } = secretOperation;
|
||||||
@ -2298,6 +2412,25 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
secrets: inputSecrets
|
secrets: inputSecrets
|
||||||
});
|
});
|
||||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
projectId: req.body.workspaceId,
|
||||||
|
...req.auditLogInfo,
|
||||||
|
event: {
|
||||||
|
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||||
|
metadata: {
|
||||||
|
committedBy: secretOperation.approval.committerUserId,
|
||||||
|
secretApprovalRequestId: secretOperation.approval.id,
|
||||||
|
secretApprovalRequestSlug: secretOperation.approval.slug,
|
||||||
|
secretPath,
|
||||||
|
environment,
|
||||||
|
secrets: inputSecrets.map((secret) => ({
|
||||||
|
secretKey: secret.secretKey
|
||||||
|
})),
|
||||||
|
eventType: SecretApprovalEvent.DeleteMany
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return { approval: secretOperation.approval };
|
return { approval: secretOperation.approval };
|
||||||
}
|
}
|
||||||
const { secrets } = secretOperation;
|
const { secrets } = secretOperation;
|
||||||
|
@ -28,7 +28,9 @@ export enum AppConnection {
|
|||||||
Flyio = "flyio",
|
Flyio = "flyio",
|
||||||
GitLab = "gitlab",
|
GitLab = "gitlab",
|
||||||
Cloudflare = "cloudflare",
|
Cloudflare = "cloudflare",
|
||||||
Zabbix = "zabbix"
|
Zabbix = "zabbix",
|
||||||
|
Railway = "railway",
|
||||||
|
Bitbucket = "bitbucket"
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum AWSRegion {
|
export enum AWSRegion {
|
||||||
|
@ -50,6 +50,11 @@ import {
|
|||||||
getAzureKeyVaultConnectionListItem,
|
getAzureKeyVaultConnectionListItem,
|
||||||
validateAzureKeyVaultConnectionCredentials
|
validateAzureKeyVaultConnectionCredentials
|
||||||
} from "./azure-key-vault";
|
} from "./azure-key-vault";
|
||||||
|
import {
|
||||||
|
BitbucketConnectionMethod,
|
||||||
|
getBitbucketConnectionListItem,
|
||||||
|
validateBitbucketConnectionCredentials
|
||||||
|
} from "./bitbucket";
|
||||||
import { CamundaConnectionMethod, getCamundaConnectionListItem, validateCamundaConnectionCredentials } from "./camunda";
|
import { CamundaConnectionMethod, getCamundaConnectionListItem, validateCamundaConnectionCredentials } from "./camunda";
|
||||||
import { CloudflareConnectionMethod } from "./cloudflare/cloudflare-connection-enum";
|
import { CloudflareConnectionMethod } from "./cloudflare/cloudflare-connection-enum";
|
||||||
import {
|
import {
|
||||||
@ -86,6 +91,7 @@ import { getMsSqlConnectionListItem, MsSqlConnectionMethod } from "./mssql";
|
|||||||
import { MySqlConnectionMethod } from "./mysql/mysql-connection-enums";
|
import { MySqlConnectionMethod } from "./mysql/mysql-connection-enums";
|
||||||
import { getMySqlConnectionListItem } from "./mysql/mysql-connection-fns";
|
import { getMySqlConnectionListItem } from "./mysql/mysql-connection-fns";
|
||||||
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
|
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
|
||||||
|
import { getRailwayConnectionListItem, validateRailwayConnectionCredentials } from "./railway";
|
||||||
import { RenderConnectionMethod } from "./render/render-connection-enums";
|
import { RenderConnectionMethod } from "./render/render-connection-enums";
|
||||||
import { getRenderConnectionListItem, validateRenderConnectionCredentials } from "./render/render-connection-fns";
|
import { getRenderConnectionListItem, validateRenderConnectionCredentials } from "./render/render-connection-fns";
|
||||||
import {
|
import {
|
||||||
@ -138,7 +144,9 @@ export const listAppConnectionOptions = () => {
|
|||||||
getFlyioConnectionListItem(),
|
getFlyioConnectionListItem(),
|
||||||
getGitLabConnectionListItem(),
|
getGitLabConnectionListItem(),
|
||||||
getCloudflareConnectionListItem(),
|
getCloudflareConnectionListItem(),
|
||||||
getZabbixConnectionListItem()
|
getZabbixConnectionListItem(),
|
||||||
|
getRailwayConnectionListItem(),
|
||||||
|
getBitbucketConnectionListItem()
|
||||||
].sort((a, b) => a.name.localeCompare(b.name));
|
].sort((a, b) => a.name.localeCompare(b.name));
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -219,7 +227,9 @@ export const validateAppConnectionCredentials = async (
|
|||||||
[AppConnection.Flyio]: validateFlyioConnectionCredentials as TAppConnectionCredentialsValidator,
|
[AppConnection.Flyio]: validateFlyioConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||||
[AppConnection.GitLab]: validateGitLabConnectionCredentials as TAppConnectionCredentialsValidator,
|
[AppConnection.GitLab]: validateGitLabConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||||
[AppConnection.Cloudflare]: validateCloudflareConnectionCredentials as TAppConnectionCredentialsValidator,
|
[AppConnection.Cloudflare]: validateCloudflareConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||||
[AppConnection.Zabbix]: validateZabbixConnectionCredentials as TAppConnectionCredentialsValidator
|
[AppConnection.Zabbix]: validateZabbixConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||||
|
[AppConnection.Railway]: validateRailwayConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||||
|
[AppConnection.Bitbucket]: validateBitbucketConnectionCredentials as TAppConnectionCredentialsValidator
|
||||||
};
|
};
|
||||||
|
|
||||||
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection);
|
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection);
|
||||||
@ -256,6 +266,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
|
|||||||
case VercelConnectionMethod.ApiToken:
|
case VercelConnectionMethod.ApiToken:
|
||||||
case OnePassConnectionMethod.ApiToken:
|
case OnePassConnectionMethod.ApiToken:
|
||||||
case CloudflareConnectionMethod.APIToken:
|
case CloudflareConnectionMethod.APIToken:
|
||||||
|
case BitbucketConnectionMethod.ApiToken:
|
||||||
case ZabbixConnectionMethod.ApiToken:
|
case ZabbixConnectionMethod.ApiToken:
|
||||||
return "API Token";
|
return "API Token";
|
||||||
case PostgresConnectionMethod.UsernameAndPassword:
|
case PostgresConnectionMethod.UsernameAndPassword:
|
||||||
@ -337,7 +348,9 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
|
|||||||
[AppConnection.Flyio]: platformManagedCredentialsNotSupported,
|
[AppConnection.Flyio]: platformManagedCredentialsNotSupported,
|
||||||
[AppConnection.GitLab]: platformManagedCredentialsNotSupported,
|
[AppConnection.GitLab]: platformManagedCredentialsNotSupported,
|
||||||
[AppConnection.Cloudflare]: platformManagedCredentialsNotSupported,
|
[AppConnection.Cloudflare]: platformManagedCredentialsNotSupported,
|
||||||
[AppConnection.Zabbix]: platformManagedCredentialsNotSupported
|
[AppConnection.Zabbix]: platformManagedCredentialsNotSupported,
|
||||||
|
[AppConnection.Railway]: platformManagedCredentialsNotSupported,
|
||||||
|
[AppConnection.Bitbucket]: platformManagedCredentialsNotSupported
|
||||||
};
|
};
|
||||||
|
|
||||||
export const enterpriseAppCheck = async (
|
export const enterpriseAppCheck = async (
|
||||||
|
@ -30,7 +30,9 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
|||||||
[AppConnection.Flyio]: "Fly.io",
|
[AppConnection.Flyio]: "Fly.io",
|
||||||
[AppConnection.GitLab]: "GitLab",
|
[AppConnection.GitLab]: "GitLab",
|
||||||
[AppConnection.Cloudflare]: "Cloudflare",
|
[AppConnection.Cloudflare]: "Cloudflare",
|
||||||
[AppConnection.Zabbix]: "Zabbix"
|
[AppConnection.Zabbix]: "Zabbix",
|
||||||
|
[AppConnection.Railway]: "Railway",
|
||||||
|
[AppConnection.Bitbucket]: "Bitbucket"
|
||||||
};
|
};
|
||||||
|
|
||||||
export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanType> = {
|
export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanType> = {
|
||||||
@ -63,5 +65,7 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
|
|||||||
[AppConnection.Flyio]: AppConnectionPlanType.Regular,
|
[AppConnection.Flyio]: AppConnectionPlanType.Regular,
|
||||||
[AppConnection.GitLab]: AppConnectionPlanType.Regular,
|
[AppConnection.GitLab]: AppConnectionPlanType.Regular,
|
||||||
[AppConnection.Cloudflare]: AppConnectionPlanType.Regular,
|
[AppConnection.Cloudflare]: AppConnectionPlanType.Regular,
|
||||||
[AppConnection.Zabbix]: AppConnectionPlanType.Regular
|
[AppConnection.Zabbix]: AppConnectionPlanType.Regular,
|
||||||
|
[AppConnection.Railway]: AppConnectionPlanType.Regular,
|
||||||
|
[AppConnection.Bitbucket]: AppConnectionPlanType.Regular
|
||||||
};
|
};
|
||||||
|
@ -45,6 +45,8 @@ import { azureClientSecretsConnectionService } from "./azure-client-secrets/azur
|
|||||||
import { ValidateAzureDevOpsConnectionCredentialsSchema } from "./azure-devops/azure-devops-schemas";
|
import { ValidateAzureDevOpsConnectionCredentialsSchema } from "./azure-devops/azure-devops-schemas";
|
||||||
import { azureDevOpsConnectionService } from "./azure-devops/azure-devops-service";
|
import { azureDevOpsConnectionService } from "./azure-devops/azure-devops-service";
|
||||||
import { ValidateAzureKeyVaultConnectionCredentialsSchema } from "./azure-key-vault";
|
import { ValidateAzureKeyVaultConnectionCredentialsSchema } from "./azure-key-vault";
|
||||||
|
import { ValidateBitbucketConnectionCredentialsSchema } from "./bitbucket";
|
||||||
|
import { bitbucketConnectionService } from "./bitbucket/bitbucket-connection-service";
|
||||||
import { ValidateCamundaConnectionCredentialsSchema } from "./camunda";
|
import { ValidateCamundaConnectionCredentialsSchema } from "./camunda";
|
||||||
import { camundaConnectionService } from "./camunda/camunda-connection-service";
|
import { camundaConnectionService } from "./camunda/camunda-connection-service";
|
||||||
import { ValidateCloudflareConnectionCredentialsSchema } from "./cloudflare/cloudflare-connection-schema";
|
import { ValidateCloudflareConnectionCredentialsSchema } from "./cloudflare/cloudflare-connection-schema";
|
||||||
@ -70,6 +72,8 @@ import { ValidateLdapConnectionCredentialsSchema } from "./ldap";
|
|||||||
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
|
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
|
||||||
import { ValidateMySqlConnectionCredentialsSchema } from "./mysql";
|
import { ValidateMySqlConnectionCredentialsSchema } from "./mysql";
|
||||||
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
|
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
|
||||||
|
import { ValidateRailwayConnectionCredentialsSchema } from "./railway";
|
||||||
|
import { railwayConnectionService } from "./railway/railway-connection-service";
|
||||||
import { ValidateRenderConnectionCredentialsSchema } from "./render/render-connection-schema";
|
import { ValidateRenderConnectionCredentialsSchema } from "./render/render-connection-schema";
|
||||||
import { renderConnectionService } from "./render/render-connection-service";
|
import { renderConnectionService } from "./render/render-connection-service";
|
||||||
import { ValidateTeamCityConnectionCredentialsSchema } from "./teamcity";
|
import { ValidateTeamCityConnectionCredentialsSchema } from "./teamcity";
|
||||||
@ -122,7 +126,9 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
|
|||||||
[AppConnection.Flyio]: ValidateFlyioConnectionCredentialsSchema,
|
[AppConnection.Flyio]: ValidateFlyioConnectionCredentialsSchema,
|
||||||
[AppConnection.GitLab]: ValidateGitLabConnectionCredentialsSchema,
|
[AppConnection.GitLab]: ValidateGitLabConnectionCredentialsSchema,
|
||||||
[AppConnection.Cloudflare]: ValidateCloudflareConnectionCredentialsSchema,
|
[AppConnection.Cloudflare]: ValidateCloudflareConnectionCredentialsSchema,
|
||||||
[AppConnection.Zabbix]: ValidateZabbixConnectionCredentialsSchema
|
[AppConnection.Zabbix]: ValidateZabbixConnectionCredentialsSchema,
|
||||||
|
[AppConnection.Railway]: ValidateRailwayConnectionCredentialsSchema,
|
||||||
|
[AppConnection.Bitbucket]: ValidateBitbucketConnectionCredentialsSchema
|
||||||
};
|
};
|
||||||
|
|
||||||
export const appConnectionServiceFactory = ({
|
export const appConnectionServiceFactory = ({
|
||||||
@ -533,6 +539,8 @@ export const appConnectionServiceFactory = ({
|
|||||||
flyio: flyioConnectionService(connectAppConnectionById),
|
flyio: flyioConnectionService(connectAppConnectionById),
|
||||||
gitlab: gitlabConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
gitlab: gitlabConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||||
cloudflare: cloudflareConnectionService(connectAppConnectionById),
|
cloudflare: cloudflareConnectionService(connectAppConnectionById),
|
||||||
zabbix: zabbixConnectionService(connectAppConnectionById)
|
zabbix: zabbixConnectionService(connectAppConnectionById),
|
||||||
|
railway: railwayConnectionService(connectAppConnectionById),
|
||||||
|
bitbucket: bitbucketConnectionService(connectAppConnectionById)
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -56,6 +56,12 @@ import {
|
|||||||
TAzureKeyVaultConnectionInput,
|
TAzureKeyVaultConnectionInput,
|
||||||
TValidateAzureKeyVaultConnectionCredentialsSchema
|
TValidateAzureKeyVaultConnectionCredentialsSchema
|
||||||
} from "./azure-key-vault";
|
} from "./azure-key-vault";
|
||||||
|
import {
|
||||||
|
TBitbucketConnection,
|
||||||
|
TBitbucketConnectionConfig,
|
||||||
|
TBitbucketConnectionInput,
|
||||||
|
TValidateBitbucketConnectionCredentialsSchema
|
||||||
|
} from "./bitbucket";
|
||||||
import {
|
import {
|
||||||
TCamundaConnection,
|
TCamundaConnection,
|
||||||
TCamundaConnectionConfig,
|
TCamundaConnectionConfig,
|
||||||
@ -135,6 +141,12 @@ import {
|
|||||||
TPostgresConnectionInput,
|
TPostgresConnectionInput,
|
||||||
TValidatePostgresConnectionCredentialsSchema
|
TValidatePostgresConnectionCredentialsSchema
|
||||||
} from "./postgres";
|
} from "./postgres";
|
||||||
|
import {
|
||||||
|
TRailwayConnection,
|
||||||
|
TRailwayConnectionConfig,
|
||||||
|
TRailwayConnectionInput,
|
||||||
|
TValidateRailwayConnectionCredentialsSchema
|
||||||
|
} from "./railway";
|
||||||
import {
|
import {
|
||||||
TRenderConnection,
|
TRenderConnection,
|
||||||
TRenderConnectionConfig,
|
TRenderConnectionConfig,
|
||||||
@ -202,7 +214,9 @@ export type TAppConnection = { id: string } & (
|
|||||||
| TFlyioConnection
|
| TFlyioConnection
|
||||||
| TGitLabConnection
|
| TGitLabConnection
|
||||||
| TCloudflareConnection
|
| TCloudflareConnection
|
||||||
|
| TBitbucketConnection
|
||||||
| TZabbixConnection
|
| TZabbixConnection
|
||||||
|
| TRailwayConnection
|
||||||
);
|
);
|
||||||
|
|
||||||
export type TAppConnectionRaw = NonNullable<Awaited<ReturnType<TAppConnectionDALFactory["findById"]>>>;
|
export type TAppConnectionRaw = NonNullable<Awaited<ReturnType<TAppConnectionDALFactory["findById"]>>>;
|
||||||
@ -239,7 +253,9 @@ export type TAppConnectionInput = { id: string } & (
|
|||||||
| TFlyioConnectionInput
|
| TFlyioConnectionInput
|
||||||
| TGitLabConnectionInput
|
| TGitLabConnectionInput
|
||||||
| TCloudflareConnectionInput
|
| TCloudflareConnectionInput
|
||||||
|
| TBitbucketConnectionInput
|
||||||
| TZabbixConnectionInput
|
| TZabbixConnectionInput
|
||||||
|
| TRailwayConnectionInput
|
||||||
);
|
);
|
||||||
|
|
||||||
export type TSqlConnectionInput =
|
export type TSqlConnectionInput =
|
||||||
@ -284,7 +300,9 @@ export type TAppConnectionConfig =
|
|||||||
| TFlyioConnectionConfig
|
| TFlyioConnectionConfig
|
||||||
| TGitLabConnectionConfig
|
| TGitLabConnectionConfig
|
||||||
| TCloudflareConnectionConfig
|
| TCloudflareConnectionConfig
|
||||||
| TZabbixConnectionConfig;
|
| TBitbucketConnectionConfig
|
||||||
|
| TZabbixConnectionConfig
|
||||||
|
| TRailwayConnectionConfig;
|
||||||
|
|
||||||
export type TValidateAppConnectionCredentialsSchema =
|
export type TValidateAppConnectionCredentialsSchema =
|
||||||
| TValidateAwsConnectionCredentialsSchema
|
| TValidateAwsConnectionCredentialsSchema
|
||||||
@ -316,7 +334,9 @@ export type TValidateAppConnectionCredentialsSchema =
|
|||||||
| TValidateFlyioConnectionCredentialsSchema
|
| TValidateFlyioConnectionCredentialsSchema
|
||||||
| TValidateGitLabConnectionCredentialsSchema
|
| TValidateGitLabConnectionCredentialsSchema
|
||||||
| TValidateCloudflareConnectionCredentialsSchema
|
| TValidateCloudflareConnectionCredentialsSchema
|
||||||
| TValidateZabbixConnectionCredentialsSchema;
|
| TValidateBitbucketConnectionCredentialsSchema
|
||||||
|
| TValidateZabbixConnectionCredentialsSchema
|
||||||
|
| TValidateRailwayConnectionCredentialsSchema;
|
||||||
|
|
||||||
export type TListAwsConnectionKmsKeys = {
|
export type TListAwsConnectionKmsKeys = {
|
||||||
connectionId: string;
|
connectionId: string;
|
||||||
|
@ -0,0 +1,3 @@
|
|||||||
|
export enum BitbucketConnectionMethod {
|
||||||
|
ApiToken = "api-token"
|
||||||
|
}
|
@ -0,0 +1,117 @@
|
|||||||
|
import { AxiosError } from "axios";
|
||||||
|
|
||||||
|
import { request } from "@app/lib/config/request";
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||||
|
|
||||||
|
import { BitbucketConnectionMethod } from "./bitbucket-connection-enums";
|
||||||
|
import {
|
||||||
|
TBitbucketConnection,
|
||||||
|
TBitbucketConnectionConfig,
|
||||||
|
TBitbucketRepo,
|
||||||
|
TBitbucketWorkspace
|
||||||
|
} from "./bitbucket-connection-types";
|
||||||
|
|
||||||
|
export const getBitbucketConnectionListItem = () => {
|
||||||
|
return {
|
||||||
|
name: "Bitbucket" as const,
|
||||||
|
app: AppConnection.Bitbucket as const,
|
||||||
|
methods: Object.values(BitbucketConnectionMethod) as [BitbucketConnectionMethod.ApiToken]
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getBitbucketUser = async ({ email, apiToken }: { email: string; apiToken: string }) => {
|
||||||
|
try {
|
||||||
|
const { data } = await request.get<{ username: string }>(`${IntegrationUrls.BITBUCKET_API_URL}/2.0/user`, {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`,
|
||||||
|
Accept: "application/json"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return data;
|
||||||
|
} catch (error: unknown) {
|
||||||
|
if (error instanceof AxiosError) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Unable to validate connection: verify credentials"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const validateBitbucketConnectionCredentials = async (config: TBitbucketConnectionConfig) => {
|
||||||
|
await getBitbucketUser(config.credentials);
|
||||||
|
return config.credentials;
|
||||||
|
};
|
||||||
|
|
||||||
|
interface BitbucketWorkspacesResponse {
|
||||||
|
values: TBitbucketWorkspace[];
|
||||||
|
next?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const listBitbucketWorkspaces = async (appConnection: TBitbucketConnection) => {
|
||||||
|
const { email, apiToken } = appConnection.credentials;
|
||||||
|
|
||||||
|
const headers = {
|
||||||
|
Authorization: `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`,
|
||||||
|
Accept: "application/json"
|
||||||
|
};
|
||||||
|
|
||||||
|
let allWorkspaces: TBitbucketWorkspace[] = [];
|
||||||
|
let nextUrl: string | undefined = `${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces?pagelen=100`;
|
||||||
|
let iterationCount = 0;
|
||||||
|
|
||||||
|
// Limit to 10 iterations, fetching at most 10 * 100 = 1000 workspaces
|
||||||
|
while (nextUrl && iterationCount < 10) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
const { data }: { data: BitbucketWorkspacesResponse } = await request.get<BitbucketWorkspacesResponse>(nextUrl, {
|
||||||
|
headers
|
||||||
|
});
|
||||||
|
|
||||||
|
allWorkspaces = allWorkspaces.concat(data.values.map((workspace) => ({ slug: workspace.slug })));
|
||||||
|
nextUrl = data.next;
|
||||||
|
iterationCount += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return allWorkspaces;
|
||||||
|
};
|
||||||
|
|
||||||
|
interface BitbucketRepositoriesResponse {
|
||||||
|
values: TBitbucketRepo[];
|
||||||
|
next?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const listBitbucketRepositories = async (appConnection: TBitbucketConnection, workspaceSlug: string) => {
|
||||||
|
const { email, apiToken } = appConnection.credentials;
|
||||||
|
|
||||||
|
const headers = {
|
||||||
|
Authorization: `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`,
|
||||||
|
Accept: "application/json"
|
||||||
|
};
|
||||||
|
|
||||||
|
let allRepos: TBitbucketRepo[] = [];
|
||||||
|
let nextUrl: string | undefined =
|
||||||
|
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${encodeURIComponent(workspaceSlug)}?pagelen=100`;
|
||||||
|
let iterationCount = 0;
|
||||||
|
|
||||||
|
// Limit to 10 iterations, fetching at most 10 * 100 = 1000 repositories
|
||||||
|
while (nextUrl && iterationCount < 10) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
const { data }: { data: BitbucketRepositoriesResponse } = await request.get<BitbucketRepositoriesResponse>(
|
||||||
|
nextUrl,
|
||||||
|
{
|
||||||
|
headers
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
allRepos = allRepos.concat(data.values);
|
||||||
|
nextUrl = data.next;
|
||||||
|
iterationCount += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return allRepos;
|
||||||
|
};
|
@ -0,0 +1,72 @@
|
|||||||
|
import z from "zod";
|
||||||
|
|
||||||
|
import { AppConnections } from "@app/lib/api-docs";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import {
|
||||||
|
BaseAppConnectionSchema,
|
||||||
|
GenericCreateAppConnectionFieldsSchema,
|
||||||
|
GenericUpdateAppConnectionFieldsSchema
|
||||||
|
} from "@app/services/app-connection/app-connection-schemas";
|
||||||
|
|
||||||
|
import { BitbucketConnectionMethod } from "./bitbucket-connection-enums";
|
||||||
|
|
||||||
|
export const BitbucketConnectionAccessTokenCredentialsSchema = z.object({
|
||||||
|
apiToken: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1, "API Token required")
|
||||||
|
.max(255)
|
||||||
|
.describe(AppConnections.CREDENTIALS.BITBUCKET.apiToken),
|
||||||
|
email: z
|
||||||
|
.string()
|
||||||
|
.email()
|
||||||
|
.trim()
|
||||||
|
.min(1, "Email required")
|
||||||
|
.max(255)
|
||||||
|
.describe(AppConnections.CREDENTIALS.BITBUCKET.email)
|
||||||
|
});
|
||||||
|
|
||||||
|
const BaseBitbucketConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.Bitbucket) });
|
||||||
|
|
||||||
|
export const BitbucketConnectionSchema = BaseBitbucketConnectionSchema.extend({
|
||||||
|
method: z.literal(BitbucketConnectionMethod.ApiToken),
|
||||||
|
credentials: BitbucketConnectionAccessTokenCredentialsSchema
|
||||||
|
});
|
||||||
|
|
||||||
|
export const SanitizedBitbucketConnectionSchema = z.discriminatedUnion("method", [
|
||||||
|
BaseBitbucketConnectionSchema.extend({
|
||||||
|
method: z.literal(BitbucketConnectionMethod.ApiToken),
|
||||||
|
credentials: BitbucketConnectionAccessTokenCredentialsSchema.pick({
|
||||||
|
email: true
|
||||||
|
})
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
|
export const ValidateBitbucketConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||||
|
z.object({
|
||||||
|
method: z
|
||||||
|
.literal(BitbucketConnectionMethod.ApiToken)
|
||||||
|
.describe(AppConnections.CREATE(AppConnection.Bitbucket).method),
|
||||||
|
credentials: BitbucketConnectionAccessTokenCredentialsSchema.describe(
|
||||||
|
AppConnections.CREATE(AppConnection.Bitbucket).credentials
|
||||||
|
)
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
|
export const CreateBitbucketConnectionSchema = ValidateBitbucketConnectionCredentialsSchema.and(
|
||||||
|
GenericCreateAppConnectionFieldsSchema(AppConnection.Bitbucket)
|
||||||
|
);
|
||||||
|
|
||||||
|
export const UpdateBitbucketConnectionSchema = z
|
||||||
|
.object({
|
||||||
|
credentials: BitbucketConnectionAccessTokenCredentialsSchema.optional().describe(
|
||||||
|
AppConnections.UPDATE(AppConnection.Bitbucket).credentials
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Bitbucket));
|
||||||
|
|
||||||
|
export const BitbucketConnectionListItemSchema = z.object({
|
||||||
|
name: z.literal("Bitbucket"),
|
||||||
|
app: z.literal(AppConnection.Bitbucket),
|
||||||
|
methods: z.nativeEnum(BitbucketConnectionMethod).array()
|
||||||
|
});
|
@ -0,0 +1,33 @@
|
|||||||
|
import { OrgServiceActor } from "@app/lib/types";
|
||||||
|
|
||||||
|
import { AppConnection } from "../app-connection-enums";
|
||||||
|
import { listBitbucketRepositories, listBitbucketWorkspaces } from "./bitbucket-connection-fns";
|
||||||
|
import { TBitbucketConnection, TGetBitbucketRepositoriesDTO } from "./bitbucket-connection-types";
|
||||||
|
|
||||||
|
type TGetAppConnectionFunc = (
|
||||||
|
app: AppConnection,
|
||||||
|
connectionId: string,
|
||||||
|
actor: OrgServiceActor
|
||||||
|
) => Promise<TBitbucketConnection>;
|
||||||
|
|
||||||
|
export const bitbucketConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||||
|
const listWorkspaces = async (connectionId: string, actor: OrgServiceActor) => {
|
||||||
|
const appConnection = await getAppConnection(AppConnection.Bitbucket, connectionId, actor);
|
||||||
|
const workspaces = await listBitbucketWorkspaces(appConnection);
|
||||||
|
return workspaces;
|
||||||
|
};
|
||||||
|
|
||||||
|
const listRepositories = async (
|
||||||
|
{ connectionId, workspaceSlug }: TGetBitbucketRepositoriesDTO,
|
||||||
|
actor: OrgServiceActor
|
||||||
|
) => {
|
||||||
|
const appConnection = await getAppConnection(AppConnection.Bitbucket, connectionId, actor);
|
||||||
|
const repositories = await listBitbucketRepositories(appConnection, workspaceSlug);
|
||||||
|
return repositories;
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
listWorkspaces,
|
||||||
|
listRepositories
|
||||||
|
};
|
||||||
|
};
|
@ -0,0 +1,40 @@
|
|||||||
|
import z from "zod";
|
||||||
|
|
||||||
|
import { DiscriminativePick } from "@app/lib/types";
|
||||||
|
|
||||||
|
import { AppConnection } from "../app-connection-enums";
|
||||||
|
import {
|
||||||
|
BitbucketConnectionSchema,
|
||||||
|
CreateBitbucketConnectionSchema,
|
||||||
|
ValidateBitbucketConnectionCredentialsSchema
|
||||||
|
} from "./bitbucket-connection-schemas";
|
||||||
|
|
||||||
|
export type TBitbucketConnection = z.infer<typeof BitbucketConnectionSchema>;
|
||||||
|
|
||||||
|
export type TBitbucketConnectionInput = z.infer<typeof CreateBitbucketConnectionSchema> & {
|
||||||
|
app: AppConnection.Bitbucket;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TValidateBitbucketConnectionCredentialsSchema = typeof ValidateBitbucketConnectionCredentialsSchema;
|
||||||
|
|
||||||
|
export type TBitbucketConnectionConfig = DiscriminativePick<
|
||||||
|
TBitbucketConnectionInput,
|
||||||
|
"method" | "app" | "credentials"
|
||||||
|
> & {
|
||||||
|
orgId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TGetBitbucketRepositoriesDTO = {
|
||||||
|
connectionId: string;
|
||||||
|
workspaceSlug: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TBitbucketWorkspace = {
|
||||||
|
slug: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TBitbucketRepo = {
|
||||||
|
uuid: string;
|
||||||
|
full_name: string; // workspace-slug/repo-slug
|
||||||
|
slug: string;
|
||||||
|
};
|
4
backend/src/services/app-connection/bitbucket/index.ts
Normal file
4
backend/src/services/app-connection/bitbucket/index.ts
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
export * from "./bitbucket-connection-enums";
|
||||||
|
export * from "./bitbucket-connection-fns";
|
||||||
|
export * from "./bitbucket-connection-schemas";
|
||||||
|
export * from "./bitbucket-connection-types";
|
4
backend/src/services/app-connection/railway/index.ts
Normal file
4
backend/src/services/app-connection/railway/index.ts
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
export * from "./railway-connection-constants";
|
||||||
|
export * from "./railway-connection-fns";
|
||||||
|
export * from "./railway-connection-schemas";
|
||||||
|
export * from "./railway-connection-types";
|
@ -0,0 +1,5 @@
|
|||||||
|
export enum RailwayConnectionMethod {
|
||||||
|
AccountToken = "account-token",
|
||||||
|
ProjectToken = "project-token",
|
||||||
|
TeamToken = "team-token"
|
||||||
|
}
|
@ -0,0 +1,66 @@
|
|||||||
|
/* eslint-disable no-await-in-loop */
|
||||||
|
import { AxiosError } from "axios";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
|
||||||
|
import { RailwayConnectionMethod } from "./railway-connection-constants";
|
||||||
|
import { RailwayPublicAPI } from "./railway-connection-public-client";
|
||||||
|
import { TRailwayConnection, TRailwayConnectionConfig } from "./railway-connection-types";
|
||||||
|
|
||||||
|
export const getRailwayConnectionListItem = () => {
|
||||||
|
return {
|
||||||
|
name: "Railway" as const,
|
||||||
|
app: AppConnection.Railway as const,
|
||||||
|
methods: Object.values(RailwayConnectionMethod)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const validateRailwayConnectionCredentials = async (config: TRailwayConnectionConfig) => {
|
||||||
|
const { credentials, method } = config;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await RailwayPublicAPI.healthcheck({
|
||||||
|
method,
|
||||||
|
credentials
|
||||||
|
});
|
||||||
|
} catch (error: unknown) {
|
||||||
|
if (error instanceof AxiosError) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Unable to validate connection - verify credentials"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return credentials;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const listProjects = async (appConnection: TRailwayConnection) => {
|
||||||
|
const { credentials, method } = appConnection;
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await RailwayPublicAPI.listProjects({
|
||||||
|
method,
|
||||||
|
credentials
|
||||||
|
});
|
||||||
|
} catch (error: unknown) {
|
||||||
|
if (error instanceof AxiosError) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to list projects: ${error.message || "Unknown error"}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error instanceof BadRequestError) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Unable to list projects",
|
||||||
|
error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
@ -0,0 +1,237 @@
|
|||||||
|
/* eslint-disable class-methods-use-this */
|
||||||
|
import { AxiosError, AxiosInstance, AxiosResponse } from "axios";
|
||||||
|
|
||||||
|
import { createRequestClient } from "@app/lib/config/request";
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||||
|
|
||||||
|
import { RailwayConnectionMethod } from "./railway-connection-constants";
|
||||||
|
import {
|
||||||
|
RailwayAccountWorkspaceListSchema,
|
||||||
|
RailwayGetProjectsByProjectTokenSchema,
|
||||||
|
RailwayGetSubscriptionTypeSchema,
|
||||||
|
RailwayProjectsListSchema
|
||||||
|
} from "./railway-connection-schemas";
|
||||||
|
import { RailwayProject, TRailwayConnectionConfig, TRailwayResponse } from "./railway-connection-types";
|
||||||
|
|
||||||
|
type RailwaySendReqOptions = Pick<TRailwayConnectionConfig, "credentials" | "method">;
|
||||||
|
|
||||||
|
export function getRailwayAuthHeaders(method: RailwayConnectionMethod, token: string): Record<string, string> {
|
||||||
|
switch (method) {
|
||||||
|
case RailwayConnectionMethod.AccountToken:
|
||||||
|
case RailwayConnectionMethod.TeamToken:
|
||||||
|
return {
|
||||||
|
Authorization: token
|
||||||
|
};
|
||||||
|
case RailwayConnectionMethod.ProjectToken:
|
||||||
|
return {
|
||||||
|
"Project-Access-Token": token
|
||||||
|
};
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported Railway connection method`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getRailwayRatelimiter(headers: AxiosResponse["headers"]): {
|
||||||
|
isRatelimited: boolean;
|
||||||
|
maxAttempts: number;
|
||||||
|
wait: () => Promise<void>;
|
||||||
|
} {
|
||||||
|
const retryAfter: number | undefined = headers["Retry-After"] as number | undefined;
|
||||||
|
const requestsLeft = parseInt(headers["X-RateLimit-Remaining"] as string, 10);
|
||||||
|
const limitResetAt = headers["X-RateLimit-Reset"] as string;
|
||||||
|
|
||||||
|
const now = +new Date();
|
||||||
|
const nextReset = +new Date(limitResetAt);
|
||||||
|
|
||||||
|
const remaining = Math.min(0, nextReset - now);
|
||||||
|
|
||||||
|
const wait = () => {
|
||||||
|
return new Promise<void>((res) => {
|
||||||
|
setTimeout(res, remaining);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
isRatelimited: Boolean(retryAfter || requestsLeft === 0),
|
||||||
|
wait,
|
||||||
|
maxAttempts: 3
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
class RailwayPublicClient {
|
||||||
|
private client: AxiosInstance;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.client = createRequestClient({
|
||||||
|
method: "POST",
|
||||||
|
baseURL: IntegrationUrls.RAILWAY_API_URL,
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async send<T extends TRailwayResponse>(
|
||||||
|
query: string,
|
||||||
|
options: RailwaySendReqOptions,
|
||||||
|
variables: Record<string, string | Record<string, string>> = {},
|
||||||
|
retryAttempt: number = 0
|
||||||
|
): Promise<T["data"] | undefined> {
|
||||||
|
const body = {
|
||||||
|
query,
|
||||||
|
variables
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = await this.client.request<T>({
|
||||||
|
data: body,
|
||||||
|
headers: getRailwayAuthHeaders(options.method, options.credentials.apiToken)
|
||||||
|
});
|
||||||
|
|
||||||
|
const { errors } = response.data;
|
||||||
|
|
||||||
|
if (Array.isArray(errors) && errors.length > 0) {
|
||||||
|
throw new AxiosError(errors[0].message);
|
||||||
|
}
|
||||||
|
|
||||||
|
const limiter = getRailwayRatelimiter(response.headers);
|
||||||
|
|
||||||
|
if (limiter.isRatelimited && retryAttempt <= limiter.maxAttempts) {
|
||||||
|
await limiter.wait();
|
||||||
|
return this.send(query, options, variables, retryAttempt + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.data.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
healthcheck(config: RailwaySendReqOptions) {
|
||||||
|
switch (config.method) {
|
||||||
|
case RailwayConnectionMethod.AccountToken:
|
||||||
|
return this.send(`{ me { teams { edges { node { id } } } } }`, config);
|
||||||
|
case RailwayConnectionMethod.ProjectToken:
|
||||||
|
return this.send(`{ projectToken { projectId environmentId project { id } } }`, config);
|
||||||
|
case RailwayConnectionMethod.TeamToken:
|
||||||
|
return this.send(`{ projects { edges { node { id name team { id } } } } }`, config);
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported Railway connection method`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getSubscriptionType(config: RailwaySendReqOptions & { projectId: string }) {
|
||||||
|
const res = await this.send(
|
||||||
|
`query project($projectId: String!) { project(id: $projectId) { subscriptionType }}`,
|
||||||
|
config,
|
||||||
|
{
|
||||||
|
projectId: config.projectId
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const data = await RailwayGetSubscriptionTypeSchema.parseAsync(res);
|
||||||
|
|
||||||
|
return data.project.subscriptionType;
|
||||||
|
}
|
||||||
|
|
||||||
|
async listProjects(config: RailwaySendReqOptions): Promise<RailwayProject[]> {
|
||||||
|
switch (config.method) {
|
||||||
|
case RailwayConnectionMethod.TeamToken: {
|
||||||
|
const res = await this.send(
|
||||||
|
`{ projects { edges { node { id, name, services{ edges{ node { id, name } } } environments { edges { node { name, id } } } } } } }`,
|
||||||
|
config
|
||||||
|
);
|
||||||
|
|
||||||
|
const data = await RailwayProjectsListSchema.parseAsync(res);
|
||||||
|
|
||||||
|
return data.projects.edges.map((p) => ({
|
||||||
|
id: p.node.id,
|
||||||
|
name: p.node.name,
|
||||||
|
environments: p.node.environments.edges.map((e) => e.node),
|
||||||
|
services: p.node.services.edges.map((s) => s.node)
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
case RailwayConnectionMethod.AccountToken: {
|
||||||
|
const res = await this.send(
|
||||||
|
`{ me { workspaces { id, name, team{ projects{ edges{ node{ id, name, services{ edges { node { name, id } } } environments { edges { node { name, id } } } } } } } } } }`,
|
||||||
|
config
|
||||||
|
);
|
||||||
|
|
||||||
|
const data = await RailwayAccountWorkspaceListSchema.parseAsync(res);
|
||||||
|
|
||||||
|
return data.me.workspaces.flatMap((w) =>
|
||||||
|
w.team.projects.edges.map((p) => ({
|
||||||
|
id: p.node.id,
|
||||||
|
name: p.node.name,
|
||||||
|
environments: p.node.environments.edges.map((e) => e.node),
|
||||||
|
services: p.node.services.edges.map((s) => s.node)
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
case RailwayConnectionMethod.ProjectToken: {
|
||||||
|
const res = await this.send(
|
||||||
|
`query { projectToken { project { id, name, services { edges { node { name, id } } } environments { edges { node { name, id } } } } } }`,
|
||||||
|
config
|
||||||
|
);
|
||||||
|
|
||||||
|
const data = await RailwayGetProjectsByProjectTokenSchema.parseAsync(res);
|
||||||
|
|
||||||
|
const p = data.projectToken.project;
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
id: p.id,
|
||||||
|
name: p.name,
|
||||||
|
environments: p.environments.edges.map((e) => e.node),
|
||||||
|
services: p.services.edges.map((s) => s.node)
|
||||||
|
}
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported Railway connection method`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getVariables(
|
||||||
|
config: RailwaySendReqOptions,
|
||||||
|
variables: { projectId: string; environmentId: string; serviceId?: string }
|
||||||
|
) {
|
||||||
|
const res = await this.send<TRailwayResponse<{ variables: Record<string, string> }>>(
|
||||||
|
`query variables($environmentId: String!, $projectId: String!, $serviceId: String) { variables( projectId: $projectId, environmentId: $environmentId, serviceId: $serviceId ) }`,
|
||||||
|
config,
|
||||||
|
variables
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!res?.variables) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Failed to get railway variables - empty response"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return res.variables;
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteVariable(
|
||||||
|
config: RailwaySendReqOptions,
|
||||||
|
variables: { input: { projectId: string; environmentId: string; name: string; serviceId?: string } }
|
||||||
|
) {
|
||||||
|
await this.send<TRailwayResponse<{ variables: Record<string, string> }>>(
|
||||||
|
`mutation variableDelete($input: VariableDeleteInput!) { variableDelete(input: $input) }`,
|
||||||
|
config,
|
||||||
|
variables
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async upsertVariable(
|
||||||
|
config: RailwaySendReqOptions,
|
||||||
|
variables: { input: { projectId: string; environmentId: string; name: string; value: string; serviceId?: string } }
|
||||||
|
) {
|
||||||
|
await this.send<TRailwayResponse<{ variables: Record<string, string> }>>(
|
||||||
|
`mutation variableUpsert($input: VariableUpsertInput!) { variableUpsert(input: $input) }`,
|
||||||
|
config,
|
||||||
|
variables
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const RailwayPublicAPI = new RailwayPublicClient();
|
@ -0,0 +1,117 @@
|
|||||||
|
import z from "zod";
|
||||||
|
|
||||||
|
import { AppConnections } from "@app/lib/api-docs";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import {
|
||||||
|
BaseAppConnectionSchema,
|
||||||
|
GenericCreateAppConnectionFieldsSchema,
|
||||||
|
GenericUpdateAppConnectionFieldsSchema
|
||||||
|
} from "@app/services/app-connection/app-connection-schemas";
|
||||||
|
|
||||||
|
import { RailwayConnectionMethod } from "./railway-connection-constants";
|
||||||
|
|
||||||
|
export const RailwayConnectionMethodSchema = z
|
||||||
|
.nativeEnum(RailwayConnectionMethod)
|
||||||
|
.describe(AppConnections.CREATE(AppConnection.Railway).method);
|
||||||
|
|
||||||
|
export const RailwayConnectionAccessTokenCredentialsSchema = z.object({
|
||||||
|
apiToken: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1, "API Token required")
|
||||||
|
.max(255)
|
||||||
|
.describe(AppConnections.CREDENTIALS.RAILWAY.apiToken)
|
||||||
|
});
|
||||||
|
|
||||||
|
const BaseRailwayConnectionSchema = BaseAppConnectionSchema.extend({
|
||||||
|
app: z.literal(AppConnection.Railway)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const RailwayConnectionSchema = BaseRailwayConnectionSchema.extend({
|
||||||
|
method: RailwayConnectionMethodSchema,
|
||||||
|
credentials: RailwayConnectionAccessTokenCredentialsSchema
|
||||||
|
});
|
||||||
|
|
||||||
|
export const SanitizedRailwayConnectionSchema = z.discriminatedUnion("method", [
|
||||||
|
BaseRailwayConnectionSchema.extend({
|
||||||
|
method: RailwayConnectionMethodSchema,
|
||||||
|
credentials: RailwayConnectionAccessTokenCredentialsSchema.pick({})
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
|
export const ValidateRailwayConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||||
|
z.object({
|
||||||
|
method: RailwayConnectionMethodSchema,
|
||||||
|
credentials: RailwayConnectionAccessTokenCredentialsSchema.describe(
|
||||||
|
AppConnections.CREATE(AppConnection.Railway).credentials
|
||||||
|
)
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
|
export const CreateRailwayConnectionSchema = ValidateRailwayConnectionCredentialsSchema.and(
|
||||||
|
GenericCreateAppConnectionFieldsSchema(AppConnection.Railway)
|
||||||
|
);
|
||||||
|
|
||||||
|
export const UpdateRailwayConnectionSchema = z
|
||||||
|
.object({
|
||||||
|
credentials: RailwayConnectionAccessTokenCredentialsSchema.optional().describe(
|
||||||
|
AppConnections.UPDATE(AppConnection.Railway).credentials
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Railway));
|
||||||
|
|
||||||
|
export const RailwayConnectionListItemSchema = z.object({
|
||||||
|
name: z.literal("Railway"),
|
||||||
|
app: z.literal(AppConnection.Railway),
|
||||||
|
methods: z.nativeEnum(RailwayConnectionMethod).array()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const RailwayResourceSchema = z.object({
|
||||||
|
node: z.object({
|
||||||
|
id: z.string(),
|
||||||
|
name: z.string()
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
export const RailwayProjectEdgeSchema = z.object({
|
||||||
|
node: z.object({
|
||||||
|
id: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
services: z.object({
|
||||||
|
edges: z.array(RailwayResourceSchema)
|
||||||
|
}),
|
||||||
|
environments: z.object({
|
||||||
|
edges: z.array(RailwayResourceSchema)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
export const RailwayProjectsListSchema = z.object({
|
||||||
|
projects: z.object({
|
||||||
|
edges: z.array(RailwayProjectEdgeSchema)
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
export const RailwayAccountWorkspaceListSchema = z.object({
|
||||||
|
me: z.object({
|
||||||
|
workspaces: z.array(
|
||||||
|
z.object({
|
||||||
|
id: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
team: RailwayProjectsListSchema
|
||||||
|
})
|
||||||
|
)
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
export const RailwayGetProjectsByProjectTokenSchema = z.object({
|
||||||
|
projectToken: z.object({
|
||||||
|
project: RailwayProjectEdgeSchema.shape.node
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
export const RailwayGetSubscriptionTypeSchema = z.object({
|
||||||
|
project: z.object({
|
||||||
|
subscriptionType: z.enum(["free", "hobby", "pro", "trial"])
|
||||||
|
})
|
||||||
|
});
|
@ -0,0 +1,30 @@
|
|||||||
|
import { logger } from "@app/lib/logger";
|
||||||
|
import { OrgServiceActor } from "@app/lib/types";
|
||||||
|
|
||||||
|
import { AppConnection } from "../app-connection-enums";
|
||||||
|
import { listProjects as getRailwayProjects } from "./railway-connection-fns";
|
||||||
|
import { TRailwayConnection } from "./railway-connection-types";
|
||||||
|
|
||||||
|
type TGetAppConnectionFunc = (
|
||||||
|
app: AppConnection,
|
||||||
|
connectionId: string,
|
||||||
|
actor: OrgServiceActor
|
||||||
|
) => Promise<TRailwayConnection>;
|
||||||
|
|
||||||
|
export const railwayConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||||
|
const listProjects = async (connectionId: string, actor: OrgServiceActor) => {
|
||||||
|
const appConnection = await getAppConnection(AppConnection.Railway, connectionId, actor);
|
||||||
|
try {
|
||||||
|
const projects = await getRailwayProjects(appConnection);
|
||||||
|
|
||||||
|
return projects;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "Failed to establish connection with Railway");
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
listProjects
|
||||||
|
};
|
||||||
|
};
|
@ -0,0 +1,79 @@
|
|||||||
|
import z from "zod";
|
||||||
|
|
||||||
|
import { DiscriminativePick } from "@app/lib/types";
|
||||||
|
|
||||||
|
import { AppConnection } from "../app-connection-enums";
|
||||||
|
import {
|
||||||
|
CreateRailwayConnectionSchema,
|
||||||
|
RailwayConnectionSchema,
|
||||||
|
ValidateRailwayConnectionCredentialsSchema
|
||||||
|
} from "./railway-connection-schemas";
|
||||||
|
|
||||||
|
export type TRailwayConnection = z.infer<typeof RailwayConnectionSchema>;
|
||||||
|
|
||||||
|
export type TRailwayConnectionInput = z.infer<typeof CreateRailwayConnectionSchema> & {
|
||||||
|
app: AppConnection.Railway;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TValidateRailwayConnectionCredentialsSchema = typeof ValidateRailwayConnectionCredentialsSchema;
|
||||||
|
|
||||||
|
export type TRailwayConnectionConfig = DiscriminativePick<TRailwayConnection, "method" | "app" | "credentials"> & {
|
||||||
|
orgId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TRailwayService = {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TRailwayEnvironment = {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type RailwayProject = {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
services: TRailwayService[];
|
||||||
|
environments: TRailwayEnvironment[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TRailwayResponse<T = unknown> = {
|
||||||
|
data?: T;
|
||||||
|
errors?: {
|
||||||
|
message: string;
|
||||||
|
}[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TAccountProjectListResponse = TRailwayResponse<{
|
||||||
|
projects: {
|
||||||
|
edges: TProjectEdge[];
|
||||||
|
};
|
||||||
|
}>;
|
||||||
|
|
||||||
|
export interface TProjectEdge {
|
||||||
|
node: {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
services: {
|
||||||
|
edges: TServiceEdge[];
|
||||||
|
};
|
||||||
|
environments: {
|
||||||
|
edges: TEnvironmentEdge[];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
type TServiceEdge = {
|
||||||
|
node: {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
type TEnvironmentEdge = {
|
||||||
|
node: {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
};
|
||||||
|
};
|
@ -814,9 +814,9 @@ const getAppsCloudflareWorkers = async ({ accessToken, accountId }: { accessToke
|
|||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return list of repositories for the BitBucket integration based on provided BitBucket workspace
|
* Return list of repositories for the Bitbucket integration based on provided Bitbucket workspace
|
||||||
*/
|
*/
|
||||||
const getAppsBitBucket = async ({ accessToken, workspaceSlug }: { accessToken: string; workspaceSlug?: string }) => {
|
const getAppsBitbucket = async ({ accessToken, workspaceSlug }: { accessToken: string; workspaceSlug?: string }) => {
|
||||||
interface RepositoriesResponse {
|
interface RepositoriesResponse {
|
||||||
size: number;
|
size: number;
|
||||||
page: number;
|
page: number;
|
||||||
@ -1302,7 +1302,7 @@ export const getApps = async ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
case Integrations.BITBUCKET:
|
case Integrations.BITBUCKET:
|
||||||
return getAppsBitBucket({
|
return getAppsBitbucket({
|
||||||
accessToken,
|
accessToken,
|
||||||
workspaceSlug
|
workspaceSlug
|
||||||
});
|
});
|
||||||
|
@ -342,7 +342,7 @@ export const getIntegrationOptions = async () => {
|
|||||||
{
|
{
|
||||||
name: "Bitbucket",
|
name: "Bitbucket",
|
||||||
slug: "bitbucket",
|
slug: "bitbucket",
|
||||||
image: "BitBucket.png",
|
image: "Bitbucket.png",
|
||||||
isAvailable: true,
|
isAvailable: true,
|
||||||
type: "oauth",
|
type: "oauth",
|
||||||
clientId: appCfg.CLIENT_ID_BITBUCKET,
|
clientId: appCfg.CLIENT_ID_BITBUCKET,
|
||||||
|
@ -3921,9 +3921,9 @@ const syncSecretsCloudflareWorkers = async ({
|
|||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sync/push [secrets] to BitBucket repo with name [integration.app]
|
* Sync/push [secrets] to Bitbucket repo with name [integration.app]
|
||||||
*/
|
*/
|
||||||
const syncSecretsBitBucket = async ({
|
const syncSecretsBitbucket = async ({
|
||||||
integration,
|
integration,
|
||||||
secrets,
|
secrets,
|
||||||
accessToken
|
accessToken
|
||||||
@ -4832,7 +4832,7 @@ export const syncIntegrationSecrets = async ({
|
|||||||
});
|
});
|
||||||
break;
|
break;
|
||||||
case Integrations.BITBUCKET:
|
case Integrations.BITBUCKET:
|
||||||
await syncSecretsBitBucket({
|
await syncSecretsBitbucket({
|
||||||
integration,
|
integration,
|
||||||
secrets,
|
secrets,
|
||||||
accessToken
|
accessToken
|
||||||
|
@ -64,7 +64,7 @@ type ExchangeCodeGitlabResponse = {
|
|||||||
created_at: number;
|
created_at: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
type ExchangeCodeBitBucketResponse = {
|
type ExchangeCodeBitbucketResponse = {
|
||||||
access_token: string;
|
access_token: string;
|
||||||
token_type: string;
|
token_type: string;
|
||||||
expires_in: number;
|
expires_in: number;
|
||||||
@ -392,10 +392,10 @@ const exchangeCodeGitlab = async ({ code, url }: { code: string; url?: string })
|
|||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for BitBucket
|
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Bitbucket
|
||||||
* code-token exchange
|
* code-token exchange
|
||||||
*/
|
*/
|
||||||
const exchangeCodeBitBucket = async ({ code }: { code: string }) => {
|
const exchangeCodeBitbucket = async ({ code }: { code: string }) => {
|
||||||
const accessExpiresAt = new Date();
|
const accessExpiresAt = new Date();
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
if (!appCfg.CLIENT_SECRET_BITBUCKET || !appCfg.CLIENT_ID_BITBUCKET) {
|
if (!appCfg.CLIENT_SECRET_BITBUCKET || !appCfg.CLIENT_ID_BITBUCKET) {
|
||||||
@ -403,7 +403,7 @@ const exchangeCodeBitBucket = async ({ code }: { code: string }) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const res = (
|
const res = (
|
||||||
await request.post<ExchangeCodeBitBucketResponse>(
|
await request.post<ExchangeCodeBitbucketResponse>(
|
||||||
IntegrationUrls.BITBUCKET_TOKEN_URL,
|
IntegrationUrls.BITBUCKET_TOKEN_URL,
|
||||||
new URLSearchParams({
|
new URLSearchParams({
|
||||||
grant_type: "authorization_code",
|
grant_type: "authorization_code",
|
||||||
@ -490,7 +490,7 @@ export const exchangeCode = async ({
|
|||||||
url
|
url
|
||||||
});
|
});
|
||||||
case Integrations.BITBUCKET:
|
case Integrations.BITBUCKET:
|
||||||
return exchangeCodeBitBucket({
|
return exchangeCodeBitbucket({
|
||||||
code
|
code
|
||||||
});
|
});
|
||||||
default:
|
default:
|
||||||
@ -524,7 +524,7 @@ type RefreshTokenGitLabResponse = {
|
|||||||
created_at: number;
|
created_at: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
type RefreshTokenBitBucketResponse = {
|
type RefreshTokenBitbucketResponse = {
|
||||||
access_token: string;
|
access_token: string;
|
||||||
token_type: string;
|
token_type: string;
|
||||||
expires_in: number;
|
expires_in: number;
|
||||||
@ -653,9 +653,9 @@ const exchangeRefreshGitLab = async ({ refreshToken, url }: { url?: string | nul
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Return new access token by exchanging refresh token [refreshToken] for the
|
* Return new access token by exchanging refresh token [refreshToken] for the
|
||||||
* BitBucket integration
|
* Bitbucket integration
|
||||||
*/
|
*/
|
||||||
const exchangeRefreshBitBucket = async ({ refreshToken }: { refreshToken: string }) => {
|
const exchangeRefreshBitbucket = async ({ refreshToken }: { refreshToken: string }) => {
|
||||||
const accessExpiresAt = new Date();
|
const accessExpiresAt = new Date();
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
if (!appCfg.CLIENT_SECRET_BITBUCKET || !appCfg.CLIENT_ID_BITBUCKET) {
|
if (!appCfg.CLIENT_SECRET_BITBUCKET || !appCfg.CLIENT_ID_BITBUCKET) {
|
||||||
@ -664,7 +664,7 @@ const exchangeRefreshBitBucket = async ({ refreshToken }: { refreshToken: string
|
|||||||
const {
|
const {
|
||||||
data
|
data
|
||||||
}: {
|
}: {
|
||||||
data: RefreshTokenBitBucketResponse;
|
data: RefreshTokenBitbucketResponse;
|
||||||
} = await request.post(
|
} = await request.post(
|
||||||
IntegrationUrls.BITBUCKET_TOKEN_URL,
|
IntegrationUrls.BITBUCKET_TOKEN_URL,
|
||||||
new URLSearchParams({
|
new URLSearchParams({
|
||||||
@ -794,7 +794,7 @@ export const exchangeRefresh = async (
|
|||||||
url
|
url
|
||||||
});
|
});
|
||||||
case Integrations.BITBUCKET:
|
case Integrations.BITBUCKET:
|
||||||
return exchangeRefreshBitBucket({
|
return exchangeRefreshBitbucket({
|
||||||
refreshToken
|
refreshToken
|
||||||
});
|
});
|
||||||
case Integrations.GCP_SECRET_MANAGER:
|
case Integrations.GCP_SECRET_MANAGER:
|
||||||
|
@ -108,22 +108,22 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
|
|||||||
const now = new Date();
|
const now = new Date();
|
||||||
const oneWeekAgo = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
|
const oneWeekAgo = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
|
||||||
const oneMonthAgo = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000);
|
const oneMonthAgo = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000);
|
||||||
const threeMonthsAgo = new Date(now.getTime() - 90 * 24 * 60 * 60 * 1000);
|
const twelveMonthsAgo = new Date(now.getTime() - 360 * 24 * 60 * 60 * 1000);
|
||||||
|
|
||||||
const memberships = await db
|
const memberships = await db
|
||||||
.replicaNode()(TableName.OrgMembership)
|
.replicaNode()(TableName.OrgMembership)
|
||||||
.where("status", "invited")
|
.where("status", "invited")
|
||||||
.where((qb) => {
|
.where((qb) => {
|
||||||
// lastInvitedAt is null AND createdAt is between 1 week and 3 months ago
|
// lastInvitedAt is null AND createdAt is between 1 week and 12 months ago
|
||||||
void qb
|
void qb
|
||||||
.whereNull(`${TableName.OrgMembership}.lastInvitedAt`)
|
.whereNull(`${TableName.OrgMembership}.lastInvitedAt`)
|
||||||
.whereBetween(`${TableName.OrgMembership}.createdAt`, [threeMonthsAgo, oneWeekAgo]);
|
.whereBetween(`${TableName.OrgMembership}.createdAt`, [twelveMonthsAgo, oneWeekAgo]);
|
||||||
})
|
})
|
||||||
.orWhere((qb) => {
|
.orWhere((qb) => {
|
||||||
// lastInvitedAt is older than 1 week ago AND createdAt is younger than 1 month ago
|
// lastInvitedAt is older than 1 week ago AND createdAt is younger than 1 month ago
|
||||||
void qb
|
void qb
|
||||||
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneMonthAgo)
|
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneWeekAgo)
|
||||||
.where(`${TableName.OrgMembership}.createdAt`, ">", oneWeekAgo);
|
.where(`${TableName.OrgMembership}.createdAt`, ">", oneMonthAgo);
|
||||||
});
|
});
|
||||||
|
|
||||||
return memberships;
|
return memberships;
|
||||||
@ -135,9 +135,22 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const updateLastInvitedAtByIds = async (membershipIds: string[]) => {
|
||||||
|
try {
|
||||||
|
if (membershipIds.length === 0) return;
|
||||||
|
await db(TableName.OrgMembership).whereIn("id", membershipIds).update({ lastInvitedAt: new Date() });
|
||||||
|
} catch (error) {
|
||||||
|
throw new DatabaseError({
|
||||||
|
error,
|
||||||
|
name: "Update last invited at by ids"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...orgMembershipOrm,
|
...orgMembershipOrm,
|
||||||
findOrgMembershipById,
|
findOrgMembershipById,
|
||||||
findRecentInvitedMemberships
|
findRecentInvitedMemberships,
|
||||||
|
updateLastInvitedAtByIds
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -36,6 +36,8 @@ import { getConfig } from "@app/lib/config/env";
|
|||||||
import { generateAsymmetricKeyPair } from "@app/lib/crypto";
|
import { generateAsymmetricKeyPair } from "@app/lib/crypto";
|
||||||
import { generateSymmetricKey, infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
import { generateSymmetricKey, infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||||
import { generateUserSrpKeys } from "@app/lib/crypto/srp";
|
import { generateUserSrpKeys } from "@app/lib/crypto/srp";
|
||||||
|
import { applyJitter } from "@app/lib/dates";
|
||||||
|
import { delay as delayMs } from "@app/lib/delay";
|
||||||
import {
|
import {
|
||||||
BadRequestError,
|
BadRequestError,
|
||||||
ForbiddenRequestError,
|
ForbiddenRequestError,
|
||||||
@ -44,9 +46,10 @@ import {
|
|||||||
UnauthorizedError
|
UnauthorizedError
|
||||||
} from "@app/lib/errors";
|
} from "@app/lib/errors";
|
||||||
import { groupBy } from "@app/lib/fn";
|
import { groupBy } from "@app/lib/fn";
|
||||||
|
import { logger } from "@app/lib/logger";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { isDisposableEmail } from "@app/lib/validator";
|
import { isDisposableEmail } from "@app/lib/validator";
|
||||||
import { TQueueServiceFactory } from "@app/queue";
|
import { QueueName, TQueueServiceFactory } from "@app/queue";
|
||||||
import { getDefaultOrgMembershipRoleForUpdateOrg } from "@app/services/org/org-role-fns";
|
import { getDefaultOrgMembershipRoleForUpdateOrg } from "@app/services/org/org-role-fns";
|
||||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||||
@ -109,7 +112,12 @@ type TOrgServiceFactoryDep = {
|
|||||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "insertMany" | "findLatestProjectKey" | "create">;
|
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "insertMany" | "findLatestProjectKey" | "create">;
|
||||||
orgMembershipDAL: Pick<
|
orgMembershipDAL: Pick<
|
||||||
TOrgMembershipDALFactory,
|
TOrgMembershipDALFactory,
|
||||||
"findOrgMembershipById" | "findOne" | "findById" | "findRecentInvitedMemberships" | "updateById"
|
| "findOrgMembershipById"
|
||||||
|
| "findOne"
|
||||||
|
| "findById"
|
||||||
|
| "findRecentInvitedMemberships"
|
||||||
|
| "updateById"
|
||||||
|
| "updateLastInvitedAtByIds"
|
||||||
>;
|
>;
|
||||||
incidentContactDAL: TIncidentContactsDALFactory;
|
incidentContactDAL: TIncidentContactsDALFactory;
|
||||||
samlConfigDAL: Pick<TSamlConfigDALFactory, "findOne">;
|
samlConfigDAL: Pick<TSamlConfigDALFactory, "findOne">;
|
||||||
@ -763,6 +771,10 @@ export const orgServiceFactory = ({
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
await orgMembershipDAL.updateById(inviteeOrgMembership.id, {
|
||||||
|
lastInvitedAt: new Date()
|
||||||
|
});
|
||||||
|
|
||||||
return { signupToken: undefined };
|
return { signupToken: undefined };
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1429,10 +1441,13 @@ export const orgServiceFactory = ({
|
|||||||
* Re-send emails to users who haven't accepted an invite yet
|
* Re-send emails to users who haven't accepted an invite yet
|
||||||
*/
|
*/
|
||||||
const notifyInvitedUsers = async () => {
|
const notifyInvitedUsers = async () => {
|
||||||
|
logger.info(`${QueueName.DailyResourceCleanUp}: notify invited users started`);
|
||||||
|
|
||||||
const invitedUsers = await orgMembershipDAL.findRecentInvitedMemberships();
|
const invitedUsers = await orgMembershipDAL.findRecentInvitedMemberships();
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
|
|
||||||
const orgCache: Record<string, { name: string; id: string } | undefined> = {};
|
const orgCache: Record<string, { name: string; id: string } | undefined> = {};
|
||||||
|
const notifiedUsers: string[] = [];
|
||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
invitedUsers.map(async (invitedUser) => {
|
invitedUsers.map(async (invitedUser) => {
|
||||||
@ -1451,25 +1466,32 @@ export const orgServiceFactory = ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (invitedUser.inviteEmail) {
|
if (invitedUser.inviteEmail) {
|
||||||
await smtpService.sendMail({
|
await delayMs(Math.max(0, applyJitter(0, 2000)));
|
||||||
template: SmtpTemplates.OrgInvite,
|
|
||||||
subjectLine: `Reminder: You have been invited to ${org.name} on Infisical`,
|
|
||||||
recipients: [invitedUser.inviteEmail],
|
|
||||||
substitutions: {
|
|
||||||
organizationName: org.name,
|
|
||||||
email: invitedUser.inviteEmail,
|
|
||||||
organizationId: org.id.toString(),
|
|
||||||
token,
|
|
||||||
callback_url: `${appCfg.SITE_URL}/signupinvite`
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
await orgMembershipDAL.updateById(invitedUser.id, {
|
try {
|
||||||
lastInvitedAt: new Date()
|
await smtpService.sendMail({
|
||||||
});
|
template: SmtpTemplates.OrgInvite,
|
||||||
|
subjectLine: `Reminder: You have been invited to ${org.name} on Infisical`,
|
||||||
|
recipients: [invitedUser.inviteEmail],
|
||||||
|
substitutions: {
|
||||||
|
organizationName: org.name,
|
||||||
|
email: invitedUser.inviteEmail,
|
||||||
|
organizationId: org.id.toString(),
|
||||||
|
token,
|
||||||
|
callback_url: `${appCfg.SITE_URL}/signupinvite`
|
||||||
|
}
|
||||||
|
});
|
||||||
|
notifiedUsers.push(invitedUser.id);
|
||||||
|
} catch (err) {
|
||||||
|
logger.error(err, `${QueueName.DailyResourceCleanUp}: notify invited users failed to send email`);
|
||||||
|
}
|
||||||
|
}
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
|
await orgMembershipDAL.updateLastInvitedAtByIds(notifiedUsers);
|
||||||
|
|
||||||
|
logger.info(`${QueueName.DailyResourceCleanUp}: notify invited users completed`);
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -214,7 +214,7 @@ export const secretFolderServiceFactory = ({
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
message: "Folder created",
|
message: "Folder created",
|
||||||
folderId: doc.id,
|
folderId: parentFolder.id,
|
||||||
changes: [
|
changes: [
|
||||||
{
|
{
|
||||||
type: CommitType.ADD,
|
type: CommitType.ADD,
|
||||||
|
@ -0,0 +1,10 @@
|
|||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||||
|
import { TSecretSyncListItem } from "@app/services/secret-sync/secret-sync-types";
|
||||||
|
|
||||||
|
export const RAILWAY_SYNC_LIST_OPTION: TSecretSyncListItem = {
|
||||||
|
name: "Railway",
|
||||||
|
destination: SecretSync.Railway,
|
||||||
|
connection: AppConnection.Railway,
|
||||||
|
canImportSecrets: true
|
||||||
|
};
|
124
backend/src/services/secret-sync/railway/railway-sync-fns.ts
Normal file
124
backend/src/services/secret-sync/railway/railway-sync-fns.ts
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||||
|
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||||
|
|
||||||
|
import { RailwayPublicAPI } from "@app/services/app-connection/railway/railway-connection-public-client";
|
||||||
|
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
|
||||||
|
|
||||||
|
import { SecretSyncError } from "../secret-sync-errors";
|
||||||
|
import { TSecretMap } from "../secret-sync-types";
|
||||||
|
import { TRailwaySyncWithCredentials } from "./railway-sync-types";
|
||||||
|
|
||||||
|
export const RailwaySyncFns = {
|
||||||
|
async getSecrets(secretSync: TRailwaySyncWithCredentials): Promise<TSecretMap> {
|
||||||
|
try {
|
||||||
|
const config = secretSync.destinationConfig;
|
||||||
|
|
||||||
|
const variables = await RailwayPublicAPI.getVariables(secretSync.connection, {
|
||||||
|
projectId: config.projectId,
|
||||||
|
environmentId: config.environmentId,
|
||||||
|
serviceId: config.serviceId || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
const entries = {} as TSecretMap;
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(variables)) {
|
||||||
|
// Skip importing private railway variables
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
if (key.startsWith("RAILWAY_")) continue;
|
||||||
|
|
||||||
|
entries[key] = {
|
||||||
|
value
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return entries;
|
||||||
|
} catch (error) {
|
||||||
|
throw new SecretSyncError({
|
||||||
|
error,
|
||||||
|
message: "Failed to import secrets from Railway"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
async syncSecrets(secretSync: TRailwaySyncWithCredentials, secretMap: TSecretMap) {
|
||||||
|
const {
|
||||||
|
environment,
|
||||||
|
syncOptions: { disableSecretDeletion, keySchema }
|
||||||
|
} = secretSync;
|
||||||
|
const railwaySecrets = await this.getSecrets(secretSync);
|
||||||
|
const config = secretSync.destinationConfig;
|
||||||
|
|
||||||
|
for await (const key of Object.keys(secretMap)) {
|
||||||
|
try {
|
||||||
|
const existing = railwaySecrets[key];
|
||||||
|
|
||||||
|
if (existing === undefined || existing.value !== secretMap[key].value) {
|
||||||
|
await RailwayPublicAPI.upsertVariable(secretSync.connection, {
|
||||||
|
input: {
|
||||||
|
projectId: config.projectId,
|
||||||
|
environmentId: config.environmentId,
|
||||||
|
serviceId: config.serviceId || undefined,
|
||||||
|
name: key,
|
||||||
|
value: secretMap[key].value ?? ""
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
throw new SecretSyncError({
|
||||||
|
error,
|
||||||
|
secretKey: key
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (disableSecretDeletion) return;
|
||||||
|
|
||||||
|
for await (const key of Object.keys(railwaySecrets)) {
|
||||||
|
try {
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
if (!matchesSchema(key, environment?.slug || "", keySchema)) continue;
|
||||||
|
|
||||||
|
if (!secretMap[key]) {
|
||||||
|
await RailwayPublicAPI.deleteVariable(secretSync.connection, {
|
||||||
|
input: {
|
||||||
|
projectId: config.projectId,
|
||||||
|
environmentId: config.environmentId,
|
||||||
|
serviceId: config.serviceId || undefined,
|
||||||
|
name: key
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
throw new SecretSyncError({
|
||||||
|
error,
|
||||||
|
secretKey: key
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
async removeSecrets(secretSync: TRailwaySyncWithCredentials, secretMap: TSecretMap) {
|
||||||
|
const existing = await this.getSecrets(secretSync);
|
||||||
|
const config = secretSync.destinationConfig;
|
||||||
|
|
||||||
|
for await (const secret of Object.keys(existing)) {
|
||||||
|
try {
|
||||||
|
if (secret in secretMap) {
|
||||||
|
await RailwayPublicAPI.deleteVariable(secretSync.connection, {
|
||||||
|
input: {
|
||||||
|
projectId: config.projectId,
|
||||||
|
environmentId: config.environmentId,
|
||||||
|
serviceId: config.serviceId || undefined,
|
||||||
|
name: secret
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
throw new SecretSyncError({
|
||||||
|
error,
|
||||||
|
secretKey: secret
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
@ -0,0 +1,56 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { SecretSyncs } from "@app/lib/api-docs";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||||
|
import {
|
||||||
|
BaseSecretSyncSchema,
|
||||||
|
GenericCreateSecretSyncFieldsSchema,
|
||||||
|
GenericUpdateSecretSyncFieldsSchema
|
||||||
|
} from "@app/services/secret-sync/secret-sync-schemas";
|
||||||
|
import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types";
|
||||||
|
|
||||||
|
const RailwaySyncDestinationConfigSchema = z.object({
|
||||||
|
projectId: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1, "Railway project ID required")
|
||||||
|
.describe(SecretSyncs.DESTINATION_CONFIG.RAILWAY.projectId),
|
||||||
|
projectName: z.string().trim().describe(SecretSyncs.DESTINATION_CONFIG.RAILWAY.projectName),
|
||||||
|
environmentId: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1, "Railway environment ID required")
|
||||||
|
.describe(SecretSyncs.DESTINATION_CONFIG.RAILWAY.environmentId),
|
||||||
|
environmentName: z.string().trim().describe(SecretSyncs.DESTINATION_CONFIG.RAILWAY.environmentName),
|
||||||
|
serviceId: z.string().optional().describe(SecretSyncs.DESTINATION_CONFIG.RAILWAY.serviceId),
|
||||||
|
serviceName: z.string().optional().describe(SecretSyncs.DESTINATION_CONFIG.RAILWAY.serviceName)
|
||||||
|
});
|
||||||
|
|
||||||
|
const RailwaySyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true };
|
||||||
|
|
||||||
|
export const RailwaySyncSchema = BaseSecretSyncSchema(SecretSync.Railway, RailwaySyncOptionsConfig).extend({
|
||||||
|
destination: z.literal(SecretSync.Railway),
|
||||||
|
destinationConfig: RailwaySyncDestinationConfigSchema
|
||||||
|
});
|
||||||
|
|
||||||
|
export const CreateRailwaySyncSchema = GenericCreateSecretSyncFieldsSchema(
|
||||||
|
SecretSync.Railway,
|
||||||
|
RailwaySyncOptionsConfig
|
||||||
|
).extend({
|
||||||
|
destinationConfig: RailwaySyncDestinationConfigSchema
|
||||||
|
});
|
||||||
|
|
||||||
|
export const UpdateRailwaySyncSchema = GenericUpdateSecretSyncFieldsSchema(
|
||||||
|
SecretSync.Railway,
|
||||||
|
RailwaySyncOptionsConfig
|
||||||
|
).extend({
|
||||||
|
destinationConfig: RailwaySyncDestinationConfigSchema.optional()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const RailwaySyncListItemSchema = z.object({
|
||||||
|
name: z.literal("Railway"),
|
||||||
|
connection: z.literal(AppConnection.Railway),
|
||||||
|
destination: z.literal(SecretSync.Railway),
|
||||||
|
canImportSecrets: z.literal(true)
|
||||||
|
});
|
@ -0,0 +1,31 @@
|
|||||||
|
import z from "zod";
|
||||||
|
|
||||||
|
import { TRailwayConnection } from "@app/services/app-connection/railway";
|
||||||
|
|
||||||
|
import { CreateRailwaySyncSchema, RailwaySyncListItemSchema, RailwaySyncSchema } from "./railway-sync-schemas";
|
||||||
|
|
||||||
|
export type TRailwaySyncListItem = z.infer<typeof RailwaySyncListItemSchema>;
|
||||||
|
|
||||||
|
export type TRailwaySync = z.infer<typeof RailwaySyncSchema>;
|
||||||
|
|
||||||
|
export type TRailwaySyncInput = z.infer<typeof CreateRailwaySyncSchema>;
|
||||||
|
|
||||||
|
export type TRailwaySyncWithCredentials = TRailwaySync & {
|
||||||
|
connection: TRailwayConnection;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TRailwaySecret = {
|
||||||
|
createdAt: string;
|
||||||
|
environmentId?: string | null;
|
||||||
|
id: string;
|
||||||
|
isSealed: boolean;
|
||||||
|
name: string;
|
||||||
|
serviceId?: string | null;
|
||||||
|
updatedAt: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TRailwayVariablesGraphResponse = {
|
||||||
|
data: {
|
||||||
|
variables: Record<string, string>;
|
||||||
|
};
|
||||||
|
};
|
@ -21,7 +21,8 @@ export enum SecretSync {
|
|||||||
Flyio = "flyio",
|
Flyio = "flyio",
|
||||||
GitLab = "gitlab",
|
GitLab = "gitlab",
|
||||||
CloudflarePages = "cloudflare-pages",
|
CloudflarePages = "cloudflare-pages",
|
||||||
Zabbix = "zabbix"
|
Zabbix = "zabbix",
|
||||||
|
Railway = "railway"
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum SecretSyncInitialSyncBehavior {
|
export enum SecretSyncInitialSyncBehavior {
|
||||||
|
@ -39,6 +39,8 @@ import { HC_VAULT_SYNC_LIST_OPTION, HCVaultSyncFns } from "./hc-vault";
|
|||||||
import { HEROKU_SYNC_LIST_OPTION, HerokuSyncFns } from "./heroku";
|
import { HEROKU_SYNC_LIST_OPTION, HerokuSyncFns } from "./heroku";
|
||||||
import { HUMANITEC_SYNC_LIST_OPTION } from "./humanitec";
|
import { HUMANITEC_SYNC_LIST_OPTION } from "./humanitec";
|
||||||
import { HumanitecSyncFns } from "./humanitec/humanitec-sync-fns";
|
import { HumanitecSyncFns } from "./humanitec/humanitec-sync-fns";
|
||||||
|
import { RAILWAY_SYNC_LIST_OPTION } from "./railway/railway-sync-constants";
|
||||||
|
import { RailwaySyncFns } from "./railway/railway-sync-fns";
|
||||||
import { RENDER_SYNC_LIST_OPTION, RenderSyncFns } from "./render";
|
import { RENDER_SYNC_LIST_OPTION, RenderSyncFns } from "./render";
|
||||||
import { SECRET_SYNC_PLAN_MAP } from "./secret-sync-maps";
|
import { SECRET_SYNC_PLAN_MAP } from "./secret-sync-maps";
|
||||||
import { TEAMCITY_SYNC_LIST_OPTION, TeamCitySyncFns } from "./teamcity";
|
import { TEAMCITY_SYNC_LIST_OPTION, TeamCitySyncFns } from "./teamcity";
|
||||||
@ -70,7 +72,8 @@ const SECRET_SYNC_LIST_OPTIONS: Record<SecretSync, TSecretSyncListItem> = {
|
|||||||
[SecretSync.Flyio]: FLYIO_SYNC_LIST_OPTION,
|
[SecretSync.Flyio]: FLYIO_SYNC_LIST_OPTION,
|
||||||
[SecretSync.GitLab]: GITLAB_SYNC_LIST_OPTION,
|
[SecretSync.GitLab]: GITLAB_SYNC_LIST_OPTION,
|
||||||
[SecretSync.CloudflarePages]: CLOUDFLARE_PAGES_SYNC_LIST_OPTION,
|
[SecretSync.CloudflarePages]: CLOUDFLARE_PAGES_SYNC_LIST_OPTION,
|
||||||
[SecretSync.Zabbix]: ZABBIX_SYNC_LIST_OPTION
|
[SecretSync.Zabbix]: ZABBIX_SYNC_LIST_OPTION,
|
||||||
|
[SecretSync.Railway]: RAILWAY_SYNC_LIST_OPTION
|
||||||
};
|
};
|
||||||
|
|
||||||
export const listSecretSyncOptions = () => {
|
export const listSecretSyncOptions = () => {
|
||||||
@ -240,6 +243,8 @@ export const SecretSyncFns = {
|
|||||||
return CloudflarePagesSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
return CloudflarePagesSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||||
case SecretSync.Zabbix:
|
case SecretSync.Zabbix:
|
||||||
return ZabbixSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
return ZabbixSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||||
|
case SecretSync.Railway:
|
||||||
|
return RailwaySyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||||
default:
|
default:
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Unhandled sync destination for sync secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
`Unhandled sync destination for sync secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
||||||
@ -335,6 +340,9 @@ export const SecretSyncFns = {
|
|||||||
case SecretSync.Zabbix:
|
case SecretSync.Zabbix:
|
||||||
secretMap = await ZabbixSyncFns.getSecrets(secretSync);
|
secretMap = await ZabbixSyncFns.getSecrets(secretSync);
|
||||||
break;
|
break;
|
||||||
|
case SecretSync.Railway:
|
||||||
|
secretMap = await RailwaySyncFns.getSecrets(secretSync);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Unhandled sync destination for get secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
`Unhandled sync destination for get secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
||||||
@ -414,6 +422,8 @@ export const SecretSyncFns = {
|
|||||||
return CloudflarePagesSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
return CloudflarePagesSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||||
case SecretSync.Zabbix:
|
case SecretSync.Zabbix:
|
||||||
return ZabbixSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
return ZabbixSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||||
|
case SecretSync.Railway:
|
||||||
|
return RailwaySyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||||
default:
|
default:
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Unhandled sync destination for remove secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
`Unhandled sync destination for remove secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
||||||
|
@ -24,7 +24,8 @@ export const SECRET_SYNC_NAME_MAP: Record<SecretSync, string> = {
|
|||||||
[SecretSync.Flyio]: "Fly.io",
|
[SecretSync.Flyio]: "Fly.io",
|
||||||
[SecretSync.GitLab]: "GitLab",
|
[SecretSync.GitLab]: "GitLab",
|
||||||
[SecretSync.CloudflarePages]: "Cloudflare Pages",
|
[SecretSync.CloudflarePages]: "Cloudflare Pages",
|
||||||
[SecretSync.Zabbix]: "Zabbix"
|
[SecretSync.Zabbix]: "Zabbix",
|
||||||
|
[SecretSync.Railway]: "Railway"
|
||||||
};
|
};
|
||||||
|
|
||||||
export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
|
export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
|
||||||
@ -50,7 +51,8 @@ export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
|
|||||||
[SecretSync.Flyio]: AppConnection.Flyio,
|
[SecretSync.Flyio]: AppConnection.Flyio,
|
||||||
[SecretSync.GitLab]: AppConnection.GitLab,
|
[SecretSync.GitLab]: AppConnection.GitLab,
|
||||||
[SecretSync.CloudflarePages]: AppConnection.Cloudflare,
|
[SecretSync.CloudflarePages]: AppConnection.Cloudflare,
|
||||||
[SecretSync.Zabbix]: AppConnection.Zabbix
|
[SecretSync.Zabbix]: AppConnection.Zabbix,
|
||||||
|
[SecretSync.Railway]: AppConnection.Railway
|
||||||
};
|
};
|
||||||
|
|
||||||
export const SECRET_SYNC_PLAN_MAP: Record<SecretSync, SecretSyncPlanType> = {
|
export const SECRET_SYNC_PLAN_MAP: Record<SecretSync, SecretSyncPlanType> = {
|
||||||
@ -76,5 +78,6 @@ export const SECRET_SYNC_PLAN_MAP: Record<SecretSync, SecretSyncPlanType> = {
|
|||||||
[SecretSync.Flyio]: SecretSyncPlanType.Regular,
|
[SecretSync.Flyio]: SecretSyncPlanType.Regular,
|
||||||
[SecretSync.GitLab]: SecretSyncPlanType.Regular,
|
[SecretSync.GitLab]: SecretSyncPlanType.Regular,
|
||||||
[SecretSync.CloudflarePages]: SecretSyncPlanType.Regular,
|
[SecretSync.CloudflarePages]: SecretSyncPlanType.Regular,
|
||||||
[SecretSync.Zabbix]: SecretSyncPlanType.Regular
|
[SecretSync.Zabbix]: SecretSyncPlanType.Regular,
|
||||||
|
[SecretSync.Railway]: SecretSyncPlanType.Regular
|
||||||
};
|
};
|
||||||
|
@ -94,6 +94,12 @@ import {
|
|||||||
THumanitecSyncListItem,
|
THumanitecSyncListItem,
|
||||||
THumanitecSyncWithCredentials
|
THumanitecSyncWithCredentials
|
||||||
} from "./humanitec";
|
} from "./humanitec";
|
||||||
|
import {
|
||||||
|
TRailwaySync,
|
||||||
|
TRailwaySyncInput,
|
||||||
|
TRailwaySyncListItem,
|
||||||
|
TRailwaySyncWithCredentials
|
||||||
|
} from "./railway/railway-sync-types";
|
||||||
import {
|
import {
|
||||||
TRenderSync,
|
TRenderSync,
|
||||||
TRenderSyncInput,
|
TRenderSyncInput,
|
||||||
@ -138,7 +144,8 @@ export type TSecretSync =
|
|||||||
| TFlyioSync
|
| TFlyioSync
|
||||||
| TGitLabSync
|
| TGitLabSync
|
||||||
| TCloudflarePagesSync
|
| TCloudflarePagesSync
|
||||||
| TZabbixSync;
|
| TZabbixSync
|
||||||
|
| TRailwaySync;
|
||||||
|
|
||||||
export type TSecretSyncWithCredentials =
|
export type TSecretSyncWithCredentials =
|
||||||
| TAwsParameterStoreSyncWithCredentials
|
| TAwsParameterStoreSyncWithCredentials
|
||||||
@ -163,7 +170,8 @@ export type TSecretSyncWithCredentials =
|
|||||||
| TFlyioSyncWithCredentials
|
| TFlyioSyncWithCredentials
|
||||||
| TGitLabSyncWithCredentials
|
| TGitLabSyncWithCredentials
|
||||||
| TCloudflarePagesSyncWithCredentials
|
| TCloudflarePagesSyncWithCredentials
|
||||||
| TZabbixSyncWithCredentials;
|
| TZabbixSyncWithCredentials
|
||||||
|
| TRailwaySyncWithCredentials;
|
||||||
|
|
||||||
export type TSecretSyncInput =
|
export type TSecretSyncInput =
|
||||||
| TAwsParameterStoreSyncInput
|
| TAwsParameterStoreSyncInput
|
||||||
@ -188,7 +196,8 @@ export type TSecretSyncInput =
|
|||||||
| TFlyioSyncInput
|
| TFlyioSyncInput
|
||||||
| TGitLabSyncInput
|
| TGitLabSyncInput
|
||||||
| TCloudflarePagesSyncInput
|
| TCloudflarePagesSyncInput
|
||||||
| TZabbixSyncInput;
|
| TZabbixSyncInput
|
||||||
|
| TRailwaySyncInput;
|
||||||
|
|
||||||
export type TSecretSyncListItem =
|
export type TSecretSyncListItem =
|
||||||
| TAwsParameterStoreSyncListItem
|
| TAwsParameterStoreSyncListItem
|
||||||
@ -213,7 +222,8 @@ export type TSecretSyncListItem =
|
|||||||
| TFlyioSyncListItem
|
| TFlyioSyncListItem
|
||||||
| TGitLabSyncListItem
|
| TGitLabSyncListItem
|
||||||
| TCloudflarePagesSyncListItem
|
| TCloudflarePagesSyncListItem
|
||||||
| TZabbixSyncListItem;
|
| TZabbixSyncListItem
|
||||||
|
| TRailwaySyncListItem;
|
||||||
|
|
||||||
export type TSyncOptionsConfig = {
|
export type TSyncOptionsConfig = {
|
||||||
canImportSecrets: boolean;
|
canImportSecrets: boolean;
|
||||||
|
@ -71,6 +71,15 @@ export const telemetryQueueServiceFactory = ({
|
|||||||
QueueName.TelemetryInstanceStats // just a job id
|
QueueName.TelemetryInstanceStats // just a job id
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (postHog) {
|
||||||
|
await queueService.queue(QueueName.TelemetryInstanceStats, QueueJobs.TelemetryInstanceStats, undefined, {
|
||||||
|
jobId: QueueName.TelemetryInstanceStats,
|
||||||
|
repeat: { pattern: "0 0 * * *", utc: true }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const startAggregatedEventsJob = async () => {
|
||||||
// clear previous aggregated events job
|
// clear previous aggregated events job
|
||||||
await queueService.stopRepeatableJob(
|
await queueService.stopRepeatableJob(
|
||||||
QueueName.TelemetryAggregatedEvents,
|
QueueName.TelemetryAggregatedEvents,
|
||||||
@ -80,11 +89,6 @@ export const telemetryQueueServiceFactory = ({
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (postHog) {
|
if (postHog) {
|
||||||
await queueService.queue(QueueName.TelemetryInstanceStats, QueueJobs.TelemetryInstanceStats, undefined, {
|
|
||||||
jobId: QueueName.TelemetryInstanceStats,
|
|
||||||
repeat: { pattern: "0 0 * * *", utc: true }
|
|
||||||
});
|
|
||||||
|
|
||||||
// Start aggregated events job (runs every five minutes)
|
// Start aggregated events job (runs every five minutes)
|
||||||
await queueService.queue(QueueName.TelemetryAggregatedEvents, QueueJobs.TelemetryAggregatedEvents, undefined, {
|
await queueService.queue(QueueName.TelemetryAggregatedEvents, QueueJobs.TelemetryAggregatedEvents, undefined, {
|
||||||
jobId: QueueName.TelemetryAggregatedEvents,
|
jobId: QueueName.TelemetryAggregatedEvents,
|
||||||
@ -102,6 +106,7 @@ export const telemetryQueueServiceFactory = ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
startTelemetryCheck
|
startTelemetryCheck,
|
||||||
|
startAggregatedEventsJob
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -14,7 +14,7 @@ export const TELEMETRY_SECRET_PROCESSED_KEY = "telemetry-secret-processed";
|
|||||||
export const TELEMETRY_SECRET_OPERATIONS_KEY = "telemetry-secret-operations";
|
export const TELEMETRY_SECRET_OPERATIONS_KEY = "telemetry-secret-operations";
|
||||||
|
|
||||||
export const POSTHOG_AGGREGATED_EVENTS = [PostHogEventTypes.SecretPulled];
|
export const POSTHOG_AGGREGATED_EVENTS = [PostHogEventTypes.SecretPulled];
|
||||||
const TELEMETRY_AGGREGATED_KEY_EXP = 900; // 15mins
|
const TELEMETRY_AGGREGATED_KEY_EXP = 600; // 10mins
|
||||||
|
|
||||||
// Bucket configuration
|
// Bucket configuration
|
||||||
const TELEMETRY_BUCKET_COUNT = 30;
|
const TELEMETRY_BUCKET_COUNT = 30;
|
||||||
@ -102,13 +102,6 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme
|
|||||||
const instanceType = licenseService.getInstanceType();
|
const instanceType = licenseService.getInstanceType();
|
||||||
// capture posthog only when its cloud or signup event happens in self-hosted
|
// capture posthog only when its cloud or signup event happens in self-hosted
|
||||||
if (instanceType === InstanceType.Cloud || event.event === PostHogEventTypes.UserSignedUp) {
|
if (instanceType === InstanceType.Cloud || event.event === PostHogEventTypes.UserSignedUp) {
|
||||||
if (event.organizationId) {
|
|
||||||
try {
|
|
||||||
postHog.groupIdentify({ groupType: "organization", groupKey: event.organizationId });
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(error, "Failed to identify PostHog organization");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (POSTHOG_AGGREGATED_EVENTS.includes(event.event)) {
|
if (POSTHOG_AGGREGATED_EVENTS.includes(event.event)) {
|
||||||
const eventKey = createTelemetryEventKey(event.event, event.distinctId);
|
const eventKey = createTelemetryEventKey(event.event, event.distinctId);
|
||||||
await keyStore.setItemWithExpiry(
|
await keyStore.setItemWithExpiry(
|
||||||
@ -122,6 +115,13 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
|
if (event.organizationId) {
|
||||||
|
try {
|
||||||
|
postHog.groupIdentify({ groupType: "organization", groupKey: event.organizationId });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "Failed to identify PostHog organization");
|
||||||
|
}
|
||||||
|
}
|
||||||
postHog.capture({
|
postHog.capture({
|
||||||
event: event.event,
|
event: event.event,
|
||||||
distinctId: event.distinctId,
|
distinctId: event.distinctId,
|
||||||
|
@ -35,6 +35,7 @@ const (
|
|||||||
GitHubPlatform
|
GitHubPlatform
|
||||||
GitLabPlatform
|
GitLabPlatform
|
||||||
AzureDevOpsPlatform
|
AzureDevOpsPlatform
|
||||||
|
BitBucketPlatform
|
||||||
// TODO: Add others.
|
// TODO: Add others.
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -45,6 +46,7 @@ func (p Platform) String() string {
|
|||||||
"github",
|
"github",
|
||||||
"gitlab",
|
"gitlab",
|
||||||
"azuredevops",
|
"azuredevops",
|
||||||
|
"bitbucket",
|
||||||
}[p]
|
}[p]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -60,6 +62,8 @@ func PlatformFromString(s string) (Platform, error) {
|
|||||||
return GitLabPlatform, nil
|
return GitLabPlatform, nil
|
||||||
case "azuredevops":
|
case "azuredevops":
|
||||||
return AzureDevOpsPlatform, nil
|
return AzureDevOpsPlatform, nil
|
||||||
|
case "bitbucket":
|
||||||
|
return BitBucketPlatform, nil
|
||||||
default:
|
default:
|
||||||
return UnknownPlatform, fmt.Errorf("invalid scm platform value: %s", s)
|
return UnknownPlatform, fmt.Errorf("invalid scm platform value: %s", s)
|
||||||
}
|
}
|
||||||
|
@ -208,6 +208,8 @@ func platformFromHost(u *url.URL) scm.Platform {
|
|||||||
return scm.GitLabPlatform
|
return scm.GitLabPlatform
|
||||||
case "dev.azure.com", "visualstudio.com":
|
case "dev.azure.com", "visualstudio.com":
|
||||||
return scm.AzureDevOpsPlatform
|
return scm.AzureDevOpsPlatform
|
||||||
|
case "bitbucket.org":
|
||||||
|
return scm.BitBucketPlatform
|
||||||
default:
|
default:
|
||||||
return scm.UnknownPlatform
|
return scm.UnknownPlatform
|
||||||
}
|
}
|
||||||
|
@ -112,6 +112,15 @@ func createScmLink(scmPlatform scm.Platform, remoteUrl string, finding report.Fi
|
|||||||
// This is a bit dirty, but Azure DevOps does not highlight the line when the lineStartColumn and lineEndColumn are not provided
|
// This is a bit dirty, but Azure DevOps does not highlight the line when the lineStartColumn and lineEndColumn are not provided
|
||||||
link += "&lineStartColumn=1&lineEndColumn=10000000&type=2&lineStyle=plain&_a=files"
|
link += "&lineStartColumn=1&lineEndColumn=10000000&type=2&lineStyle=plain&_a=files"
|
||||||
return link
|
return link
|
||||||
|
case scm.BitBucketPlatform:
|
||||||
|
link := fmt.Sprintf("%s/src/%s/%s", remoteUrl, finding.Commit, filePath)
|
||||||
|
if finding.StartLine != 0 {
|
||||||
|
link += fmt.Sprintf("#lines-%d", finding.StartLine)
|
||||||
|
}
|
||||||
|
if finding.EndLine != finding.StartLine {
|
||||||
|
link += fmt.Sprintf(":%d", finding.EndLine)
|
||||||
|
}
|
||||||
|
return link
|
||||||
default:
|
default:
|
||||||
// This should never happen.
|
// This should never happen.
|
||||||
return ""
|
return ""
|
||||||
|
@ -337,9 +337,7 @@ var scanCmd = &cobra.Command{
|
|||||||
if gitCmd, err = sources.NewGitLogCmd(source, logOpts); err != nil {
|
if gitCmd, err = sources.NewGitLogCmd(source, logOpts); err != nil {
|
||||||
logging.Fatal().Err(err).Msg("could not create Git cmd")
|
logging.Fatal().Err(err).Msg("could not create Git cmd")
|
||||||
}
|
}
|
||||||
if scmPlatform, err = scm.PlatformFromString("github"); err != nil {
|
scmPlatform = scm.UnknownPlatform
|
||||||
logging.Fatal().Err(err).Send()
|
|
||||||
}
|
|
||||||
remote = detect.NewRemoteInfo(scmPlatform, source)
|
remote = detect.NewRemoteInfo(scmPlatform, source)
|
||||||
|
|
||||||
if findings, err = detector.DetectGit(gitCmd, remote); err != nil {
|
if findings, err = detector.DetectGit(gitCmd, remote); err != nil {
|
||||||
|
6
docs/Dockerfile
Normal file
6
docs/Dockerfile
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
FROM node:20-alpine
|
||||||
|
WORKDIR /app
|
||||||
|
RUN npm install -g mint
|
||||||
|
COPY . .
|
||||||
|
EXPOSE 3000
|
||||||
|
CMD ["mint", "dev"]
|
@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
title: "Available"
|
||||||
|
openapi: "GET /api/v1/app-connections/bitbucket/available"
|
||||||
|
---
|
@ -0,0 +1,8 @@
|
|||||||
|
---
|
||||||
|
title: "Create"
|
||||||
|
openapi: "POST /api/v1/app-connections/bitbucket"
|
||||||
|
---
|
||||||
|
|
||||||
|
<Note>
|
||||||
|
Check out the configuration docs for [Bitbucket Connections](/integrations/app-connections/bitbucket) to learn how to obtain the required credentials.
|
||||||
|
</Note>
|
@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
title: "Delete"
|
||||||
|
openapi: "DELETE /api/v1/app-connections/bitbucket/{connectionId}"
|
||||||
|
---
|
@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
title: "Get by ID"
|
||||||
|
openapi: "GET /api/v1/app-connections/bitbucket/{connectionId}"
|
||||||
|
---
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user