mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-09 01:45:33 +00:00
Compare commits
113 Commits
misc/updat
...
audit-log-
Author | SHA1 | Date | |
---|---|---|---|
a614b81a7a | |||
c0b296ccd5 | |||
da82cfdf6b | |||
92147b5398 | |||
526e184bd9 | |||
9943312063 | |||
c2cefb2b0c | |||
7571c9b426 | |||
bf707667b5 | |||
d2e6743f22 | |||
9e896563ed | |||
64744d042d | |||
2648ac1c90 | |||
22ae1aeee4 | |||
cd13733621 | |||
0191eb48f3 | |||
9d39910152 | |||
c5a8786d1c | |||
9137fa4ca5 | |||
84687c0558 | |||
78da7ec343 | |||
a678ebb4ac | |||
83dd38db49 | |||
00d4ae9fbd | |||
218338e5d2 | |||
456107fbf3 | |||
2003f5b671 | |||
d2c6bcc7a7 | |||
06bd593b60 | |||
aea43c0a8e | |||
06f5af1200 | |||
f903e5b3d4 | |||
c6f8915d3f | |||
65b1354ef1 | |||
cda8579ca4 | |||
5badb811e1 | |||
7f8b489724 | |||
8723a16913 | |||
b4593a2e11 | |||
1b1acdcb0b | |||
1bbf78e295 | |||
a8f08730a1 | |||
9af9050aa2 | |||
3b767a4deb | |||
18f5f5d04e | |||
6a6f08fc4d | |||
cc564119e0 | |||
189b0dd5ee | |||
9cbef2c07b | |||
9a960a85cd | |||
2a9e31d305 | |||
fb2f1731dd | |||
42648a134c | |||
defb66ce65 | |||
a3d06fdf1b | |||
9049c441d6 | |||
51ecc9dfa0 | |||
13c9879fb6 | |||
8c6b903204 | |||
23b20ebdab | |||
37d490ede3 | |||
edecfb1f62 | |||
ae35a863bc | |||
73025f5094 | |||
82634983ce | |||
af2f3017b7 | |||
a8f0eceeb9 | |||
36ff5e054b | |||
eff73f1810 | |||
68357b5669 | |||
03c2e93bea | |||
8c1f3837e7 | |||
7b47d91cc1 | |||
c37afaa050 | |||
811920f8bb | |||
7b295c5a21 | |||
527a727c1c | |||
0139064aaa | |||
a3859170fe | |||
62ad82f7b1 | |||
02b97cbf5b | |||
8a65343f79 | |||
cf6181eb73 | |||
984ffd2a53 | |||
a1c44bd7a2 | |||
d7860e2491 | |||
db33349f49 | |||
7ab67db84d | |||
e14bb6b901 | |||
3a17281e37 | |||
91d6d5d07b | |||
ac7b23da45 | |||
3daae6f965 | |||
833963af0c | |||
aa560b8199 | |||
abfe185a5b | |||
9163da291e | |||
f6c10683a5 | |||
307e6900ee | |||
69157cb912 | |||
44eb761d5b | |||
1a7b810bad | |||
abbf541c9f | |||
fcdd121a58 | |||
5bfd92bf8d | |||
45af2c0b49 | |||
13d2cbd8b0 | |||
abfc5736fd | |||
68abd0f044 | |||
f3c11a0a17 | |||
f4779de051 | |||
defe7b8f0b | |||
cf3113ac89 |
@ -134,7 +134,7 @@ RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-li
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.41.2 \
|
||||
&& apt-get update && apt-get install -y infisical=0.41.89 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
@ -128,7 +128,7 @@ RUN apt-get update && apt-get install -y \
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.41.2 \
|
||||
&& apt-get update && apt-get install -y infisical=0.41.89 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /
|
||||
|
@ -55,7 +55,7 @@ COPY --from=build /app .
|
||||
# Install Infisical CLI
|
||||
RUN apt-get install -y curl bash && \
|
||||
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
||||
apt-get update && apt-get install -y infisical=0.41.2 git
|
||||
apt-get update && apt-get install -y infisical=0.41.89 git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
@ -57,7 +57,7 @@ RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.41.2
|
||||
apt-get install -y infisical=0.41.89
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
@ -66,7 +66,7 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.41.2
|
||||
apt-get install -y infisical=0.41.89
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
21
backend/src/db/migrations/20250627010508_env-overrides.ts
Normal file
21
backend/src/db/migrations/20250627010508_env-overrides.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedEnvOverrides");
|
||||
if (!hasColumn) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.binary("encryptedEnvOverrides").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedEnvOverrides");
|
||||
if (hasColumn) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("encryptedEnvOverrides");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
|
||||
if (hasColumn) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
t.datetime("lastInvitedAt").nullable().defaultTo(knex.fn.now()).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
|
||||
if (hasColumn) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
t.datetime("lastInvitedAt").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -34,7 +34,8 @@ export const SuperAdminSchema = z.object({
|
||||
encryptedGitHubAppConnectionClientSecret: zodBuffer.nullable().optional(),
|
||||
encryptedGitHubAppConnectionSlug: zodBuffer.nullable().optional(),
|
||||
encryptedGitHubAppConnectionId: zodBuffer.nullable().optional(),
|
||||
encryptedGitHubAppConnectionPrivateKey: zodBuffer.nullable().optional()
|
||||
encryptedGitHubAppConnectionPrivateKey: zodBuffer.nullable().optional(),
|
||||
encryptedEnvOverrides: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@ -17,6 +17,7 @@ import { z } from "zod";
|
||||
import { LdapGroupMapsSchema } from "@app/db/schemas";
|
||||
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
|
||||
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
|
||||
import { ApiDocsTags, LdapSso } from "@app/lib/api-docs";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@ -132,10 +133,18 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.LdapSso],
|
||||
description: "Get LDAP config",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
querystring: z.object({
|
||||
organizationId: z.string().trim()
|
||||
organizationId: z.string().trim().describe(LdapSso.GET_CONFIG.organizationId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -172,23 +181,32 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.LdapSso],
|
||||
description: "Create LDAP config",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
organizationId: z.string().trim(),
|
||||
isActive: z.boolean(),
|
||||
url: z.string().trim(),
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
uniqueUserAttribute: z.string().trim().default("uidNumber"),
|
||||
searchBase: z.string().trim(),
|
||||
searchFilter: z.string().trim().default("(uid={{username}})"),
|
||||
groupSearchBase: z.string().trim(),
|
||||
organizationId: z.string().trim().describe(LdapSso.CREATE_CONFIG.organizationId),
|
||||
isActive: z.boolean().describe(LdapSso.CREATE_CONFIG.isActive),
|
||||
url: z.string().trim().describe(LdapSso.CREATE_CONFIG.url),
|
||||
bindDN: z.string().trim().describe(LdapSso.CREATE_CONFIG.bindDN),
|
||||
bindPass: z.string().trim().describe(LdapSso.CREATE_CONFIG.bindPass),
|
||||
uniqueUserAttribute: z.string().trim().default("uidNumber").describe(LdapSso.CREATE_CONFIG.uniqueUserAttribute),
|
||||
searchBase: z.string().trim().describe(LdapSso.CREATE_CONFIG.searchBase),
|
||||
searchFilter: z.string().trim().default("(uid={{username}})").describe(LdapSso.CREATE_CONFIG.searchFilter),
|
||||
groupSearchBase: z.string().trim().describe(LdapSso.CREATE_CONFIG.groupSearchBase),
|
||||
groupSearchFilter: z
|
||||
.string()
|
||||
.trim()
|
||||
.default("(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))"),
|
||||
caCert: z.string().trim().default("")
|
||||
.default("(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))")
|
||||
.describe(LdapSso.CREATE_CONFIG.groupSearchFilter),
|
||||
caCert: z.string().trim().default("").describe(LdapSso.CREATE_CONFIG.caCert)
|
||||
}),
|
||||
response: {
|
||||
200: SanitizedLdapConfigSchema
|
||||
@ -214,23 +232,31 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.LdapSso],
|
||||
description: "Update LDAP config",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z
|
||||
.object({
|
||||
isActive: z.boolean(),
|
||||
url: z.string().trim(),
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
uniqueUserAttribute: z.string().trim(),
|
||||
searchBase: z.string().trim(),
|
||||
searchFilter: z.string().trim(),
|
||||
groupSearchBase: z.string().trim(),
|
||||
groupSearchFilter: z.string().trim(),
|
||||
caCert: z.string().trim()
|
||||
isActive: z.boolean().describe(LdapSso.UPDATE_CONFIG.isActive),
|
||||
url: z.string().trim().describe(LdapSso.UPDATE_CONFIG.url),
|
||||
bindDN: z.string().trim().describe(LdapSso.UPDATE_CONFIG.bindDN),
|
||||
bindPass: z.string().trim().describe(LdapSso.UPDATE_CONFIG.bindPass),
|
||||
uniqueUserAttribute: z.string().trim().describe(LdapSso.UPDATE_CONFIG.uniqueUserAttribute),
|
||||
searchBase: z.string().trim().describe(LdapSso.UPDATE_CONFIG.searchBase),
|
||||
searchFilter: z.string().trim().describe(LdapSso.UPDATE_CONFIG.searchFilter),
|
||||
groupSearchBase: z.string().trim().describe(LdapSso.UPDATE_CONFIG.groupSearchBase),
|
||||
groupSearchFilter: z.string().trim().describe(LdapSso.UPDATE_CONFIG.groupSearchFilter),
|
||||
caCert: z.string().trim().describe(LdapSso.UPDATE_CONFIG.caCert)
|
||||
})
|
||||
.partial()
|
||||
.merge(z.object({ organizationId: z.string() })),
|
||||
.merge(z.object({ organizationId: z.string().trim().describe(LdapSso.UPDATE_CONFIG.organizationId) })),
|
||||
response: {
|
||||
200: SanitizedLdapConfigSchema
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ import { z } from "zod";
|
||||
|
||||
import { OidcConfigsSchema } from "@app/db/schemas";
|
||||
import { OIDCConfigurationType, OIDCJWTSignatureAlgorithm } from "@app/ee/services/oidc/oidc-config-types";
|
||||
import { ApiDocsTags, OidcSSo } from "@app/lib/api-docs";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@ -153,10 +154,18 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.OidcSso],
|
||||
description: "Get OIDC config",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
querystring: z.object({
|
||||
orgSlug: z.string().trim()
|
||||
organizationId: z.string().trim().describe(OidcSSo.GET_CONFIG.organizationId)
|
||||
}),
|
||||
response: {
|
||||
200: SanitizedOidcConfigSchema.pick({
|
||||
@ -180,9 +189,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { orgSlug } = req.query;
|
||||
const oidc = await server.services.oidc.getOidc({
|
||||
orgSlug,
|
||||
organizationId: req.query.organizationId,
|
||||
type: "external",
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
@ -200,8 +208,16 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.OidcSso],
|
||||
description: "Update OIDC config",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z
|
||||
.object({
|
||||
allowedEmailDomains: z
|
||||
@ -216,22 +232,26 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
.split(",")
|
||||
.map((id) => id.trim())
|
||||
.join(", ");
|
||||
}),
|
||||
discoveryURL: z.string().trim(),
|
||||
configurationType: z.nativeEnum(OIDCConfigurationType),
|
||||
issuer: z.string().trim(),
|
||||
authorizationEndpoint: z.string().trim(),
|
||||
jwksUri: z.string().trim(),
|
||||
tokenEndpoint: z.string().trim(),
|
||||
userinfoEndpoint: z.string().trim(),
|
||||
clientId: z.string().trim(),
|
||||
clientSecret: z.string().trim(),
|
||||
isActive: z.boolean(),
|
||||
manageGroupMemberships: z.boolean().optional(),
|
||||
jwtSignatureAlgorithm: z.nativeEnum(OIDCJWTSignatureAlgorithm).optional()
|
||||
})
|
||||
.describe(OidcSSo.UPDATE_CONFIG.allowedEmailDomains),
|
||||
discoveryURL: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.discoveryURL),
|
||||
configurationType: z.nativeEnum(OIDCConfigurationType).describe(OidcSSo.UPDATE_CONFIG.configurationType),
|
||||
issuer: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.issuer),
|
||||
authorizationEndpoint: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.authorizationEndpoint),
|
||||
jwksUri: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.jwksUri),
|
||||
tokenEndpoint: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.tokenEndpoint),
|
||||
userinfoEndpoint: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.userinfoEndpoint),
|
||||
clientId: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.clientId),
|
||||
clientSecret: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.clientSecret),
|
||||
isActive: z.boolean().describe(OidcSSo.UPDATE_CONFIG.isActive),
|
||||
manageGroupMemberships: z.boolean().optional().describe(OidcSSo.UPDATE_CONFIG.manageGroupMemberships),
|
||||
jwtSignatureAlgorithm: z
|
||||
.nativeEnum(OIDCJWTSignatureAlgorithm)
|
||||
.optional()
|
||||
.describe(OidcSSo.UPDATE_CONFIG.jwtSignatureAlgorithm)
|
||||
})
|
||||
.partial()
|
||||
.merge(z.object({ orgSlug: z.string() })),
|
||||
.merge(z.object({ organizationId: z.string().describe(OidcSSo.UPDATE_CONFIG.organizationId) })),
|
||||
response: {
|
||||
200: SanitizedOidcConfigSchema.pick({
|
||||
id: true,
|
||||
@ -267,8 +287,16 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.OidcSso],
|
||||
description: "Create OIDC config",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z
|
||||
.object({
|
||||
allowedEmailDomains: z
|
||||
@ -283,23 +311,34 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
.split(",")
|
||||
.map((id) => id.trim())
|
||||
.join(", ");
|
||||
}),
|
||||
configurationType: z.nativeEnum(OIDCConfigurationType),
|
||||
issuer: z.string().trim().optional().default(""),
|
||||
discoveryURL: z.string().trim().optional().default(""),
|
||||
authorizationEndpoint: z.string().trim().optional().default(""),
|
||||
jwksUri: z.string().trim().optional().default(""),
|
||||
tokenEndpoint: z.string().trim().optional().default(""),
|
||||
userinfoEndpoint: z.string().trim().optional().default(""),
|
||||
clientId: z.string().trim(),
|
||||
clientSecret: z.string().trim(),
|
||||
isActive: z.boolean(),
|
||||
orgSlug: z.string().trim(),
|
||||
manageGroupMemberships: z.boolean().optional().default(false),
|
||||
})
|
||||
.describe(OidcSSo.CREATE_CONFIG.allowedEmailDomains),
|
||||
configurationType: z.nativeEnum(OIDCConfigurationType).describe(OidcSSo.CREATE_CONFIG.configurationType),
|
||||
issuer: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.issuer),
|
||||
discoveryURL: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.discoveryURL),
|
||||
authorizationEndpoint: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.default("")
|
||||
.describe(OidcSSo.CREATE_CONFIG.authorizationEndpoint),
|
||||
jwksUri: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.jwksUri),
|
||||
tokenEndpoint: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.tokenEndpoint),
|
||||
userinfoEndpoint: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.userinfoEndpoint),
|
||||
clientId: z.string().trim().describe(OidcSSo.CREATE_CONFIG.clientId),
|
||||
clientSecret: z.string().trim().describe(OidcSSo.CREATE_CONFIG.clientSecret),
|
||||
isActive: z.boolean().describe(OidcSSo.CREATE_CONFIG.isActive),
|
||||
organizationId: z.string().trim().describe(OidcSSo.CREATE_CONFIG.organizationId),
|
||||
manageGroupMemberships: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.default(false)
|
||||
.describe(OidcSSo.CREATE_CONFIG.manageGroupMemberships),
|
||||
jwtSignatureAlgorithm: z
|
||||
.nativeEnum(OIDCJWTSignatureAlgorithm)
|
||||
.optional()
|
||||
.default(OIDCJWTSignatureAlgorithm.RS256)
|
||||
.describe(OidcSSo.CREATE_CONFIG.jwtSignatureAlgorithm)
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
|
||||
|
@ -13,6 +13,7 @@ import { FastifyRequest } from "fastify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
|
||||
import { ApiDocsTags, SamlSso } from "@app/lib/api-docs";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@ -149,8 +150,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
firstName,
|
||||
lastName: lastName as string,
|
||||
relayState: (req.body as { RelayState?: string }).RelayState,
|
||||
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider as string,
|
||||
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string,
|
||||
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider,
|
||||
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId,
|
||||
metadata: userMetadata
|
||||
});
|
||||
cb(null, { isUserCompleted, providerAuthToken });
|
||||
@ -262,14 +263,21 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SamlSso],
|
||||
description: "Get SAML config",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
querystring: z.object({
|
||||
organizationId: z.string().trim()
|
||||
organizationId: z.string().trim().describe(SamlSso.GET_CONFIG.organizationId)
|
||||
}),
|
||||
response: {
|
||||
200: z
|
||||
.object({
|
||||
200: z.object({
|
||||
id: z.string(),
|
||||
organization: z.string(),
|
||||
orgId: z.string(),
|
||||
@ -280,7 +288,6 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
cert: z.string(),
|
||||
lastUsed: z.date().nullable().optional()
|
||||
})
|
||||
.optional()
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
@ -302,15 +309,23 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SamlSso],
|
||||
description: "Create SAML config",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
organizationId: z.string(),
|
||||
authProvider: z.nativeEnum(SamlProviders),
|
||||
isActive: z.boolean(),
|
||||
entryPoint: z.string(),
|
||||
issuer: z.string(),
|
||||
cert: z.string()
|
||||
organizationId: z.string().trim().describe(SamlSso.CREATE_CONFIG.organizationId),
|
||||
authProvider: z.nativeEnum(SamlProviders).describe(SamlSso.CREATE_CONFIG.authProvider),
|
||||
isActive: z.boolean().describe(SamlSso.CREATE_CONFIG.isActive),
|
||||
entryPoint: z.string().trim().describe(SamlSso.CREATE_CONFIG.entryPoint),
|
||||
issuer: z.string().trim().describe(SamlSso.CREATE_CONFIG.issuer),
|
||||
cert: z.string().trim().describe(SamlSso.CREATE_CONFIG.cert)
|
||||
}),
|
||||
response: {
|
||||
200: SanitizedSamlConfigSchema
|
||||
@ -341,18 +356,26 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SamlSso],
|
||||
description: "Update SAML config",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z
|
||||
.object({
|
||||
authProvider: z.nativeEnum(SamlProviders),
|
||||
isActive: z.boolean(),
|
||||
entryPoint: z.string(),
|
||||
issuer: z.string(),
|
||||
cert: z.string()
|
||||
authProvider: z.nativeEnum(SamlProviders).describe(SamlSso.UPDATE_CONFIG.authProvider),
|
||||
isActive: z.boolean().describe(SamlSso.UPDATE_CONFIG.isActive),
|
||||
entryPoint: z.string().trim().describe(SamlSso.UPDATE_CONFIG.entryPoint),
|
||||
issuer: z.string().trim().describe(SamlSso.UPDATE_CONFIG.issuer),
|
||||
cert: z.string().trim().describe(SamlSso.UPDATE_CONFIG.cert)
|
||||
})
|
||||
.partial()
|
||||
.merge(z.object({ organizationId: z.string() })),
|
||||
.merge(z.object({ organizationId: z.string().trim().describe(SamlSso.UPDATE_CONFIG.organizationId) })),
|
||||
response: {
|
||||
200: SanitizedSamlConfigSchema
|
||||
}
|
||||
|
@ -141,7 +141,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { approval } = await server.services.secretApprovalRequest.mergeSecretApprovalRequest({
|
||||
const { approval, projectId, secretMutationEvents } =
|
||||
await server.services.secretApprovalRequest.mergeSecretApprovalRequest({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
@ -149,6 +150,30 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
approvalId: req.params.id,
|
||||
bypassReason: req.body.bypassReason
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_MERGED,
|
||||
metadata: {
|
||||
mergedBy: req.permission.id,
|
||||
secretApprovalRequestSlug: approval.slug,
|
||||
secretApprovalRequestId: approval.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
for await (const event of secretMutationEvents) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId,
|
||||
event
|
||||
});
|
||||
}
|
||||
|
||||
return { approval };
|
||||
}
|
||||
});
|
||||
|
@ -0,0 +1,16 @@
|
||||
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
|
||||
import {
|
||||
BitbucketDataSourceSchema,
|
||||
CreateBitbucketDataSourceSchema,
|
||||
UpdateBitbucketDataSourceSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export const registerBitbucketSecretScanningRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretScanningEndpoints({
|
||||
type: SecretScanningDataSource.Bitbucket,
|
||||
server,
|
||||
responseSchema: BitbucketDataSourceSchema,
|
||||
createSchema: CreateBitbucketDataSourceSchema,
|
||||
updateSchema: UpdateBitbucketDataSourceSchema
|
||||
});
|
@ -1,5 +1,6 @@
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
import { registerBitbucketSecretScanningRouter } from "./bitbucket-secret-scanning-router";
|
||||
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
|
||||
|
||||
export * from "./secret-scanning-v2-router";
|
||||
@ -8,5 +9,6 @@ export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
|
||||
SecretScanningDataSource,
|
||||
(server: FastifyZodProvider) => Promise<void>
|
||||
> = {
|
||||
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter
|
||||
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter,
|
||||
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter
|
||||
};
|
||||
|
@ -2,6 +2,7 @@ import { z } from "zod";
|
||||
|
||||
import { SecretScanningConfigsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { BitbucketDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import {
|
||||
SecretScanningFindingStatus,
|
||||
@ -21,7 +22,10 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]);
|
||||
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [
|
||||
GitHubDataSourceListItemSchema,
|
||||
BitbucketDataSourceListItemSchema
|
||||
]);
|
||||
|
||||
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
|
@ -116,6 +116,15 @@ interface BaseAuthData {
|
||||
userAgentType?: UserAgentType;
|
||||
}
|
||||
|
||||
export enum SecretApprovalEvent {
|
||||
Create = "create",
|
||||
Update = "update",
|
||||
Delete = "delete",
|
||||
CreateMany = "create-many",
|
||||
UpdateMany = "update-many",
|
||||
DeleteMany = "delete-many"
|
||||
}
|
||||
|
||||
export enum UserAgentType {
|
||||
WEB = "web",
|
||||
CLI = "cli",
|
||||
@ -1705,6 +1714,17 @@ interface SecretApprovalRequest {
|
||||
committedBy: string;
|
||||
secretApprovalRequestSlug: string;
|
||||
secretApprovalRequestId: string;
|
||||
eventType: SecretApprovalEvent;
|
||||
secretKey?: string;
|
||||
secretId?: string;
|
||||
secrets?: {
|
||||
secretKey?: string;
|
||||
secretId?: string;
|
||||
environment?: string;
|
||||
secretPath?: string;
|
||||
}[];
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -107,34 +107,26 @@ export const oidcConfigServiceFactory = ({
|
||||
kmsService
|
||||
}: TOidcConfigServiceFactoryDep) => {
|
||||
const getOidc = async (dto: TGetOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
||||
if (!org) {
|
||||
const oidcCfg = await oidcConfigDAL.findOne({
|
||||
orgId: dto.organizationId
|
||||
});
|
||||
if (!oidcCfg) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization with slug '${dto.orgSlug}' not found`,
|
||||
name: "OrgNotFound"
|
||||
message: `OIDC configuration for organization with ID '${dto.organizationId}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
if (dto.type === "external") {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
dto.actor,
|
||||
dto.actorId,
|
||||
org.id,
|
||||
dto.organizationId,
|
||||
dto.actorAuthMethod,
|
||||
dto.actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
||||
}
|
||||
|
||||
const oidcCfg = await oidcConfigDAL.findOne({
|
||||
orgId: org.id
|
||||
});
|
||||
|
||||
if (!oidcCfg) {
|
||||
throw new NotFoundError({
|
||||
message: `OIDC configuration for organization with slug '${dto.orgSlug}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: oidcCfg.orgId
|
||||
@ -465,7 +457,7 @@ export const oidcConfigServiceFactory = ({
|
||||
};
|
||||
|
||||
const updateOidcCfg = async ({
|
||||
orgSlug,
|
||||
organizationId,
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
@ -484,13 +476,11 @@ export const oidcConfigServiceFactory = ({
|
||||
manageGroupMemberships,
|
||||
jwtSignatureAlgorithm
|
||||
}: TUpdateOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({
|
||||
slug: orgSlug
|
||||
});
|
||||
const org = await orgDAL.findOne({ id: organizationId });
|
||||
|
||||
if (!org) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization with slug '${orgSlug}' not found`
|
||||
message: `Organization with ID '${organizationId}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
@ -555,7 +545,7 @@ export const oidcConfigServiceFactory = ({
|
||||
};
|
||||
|
||||
const createOidcCfg = async ({
|
||||
orgSlug,
|
||||
organizationId,
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
@ -574,12 +564,10 @@ export const oidcConfigServiceFactory = ({
|
||||
manageGroupMemberships,
|
||||
jwtSignatureAlgorithm
|
||||
}: TCreateOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({
|
||||
slug: orgSlug
|
||||
});
|
||||
const org = await orgDAL.findOne({ id: organizationId });
|
||||
if (!org) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization with slug '${orgSlug}' not found`
|
||||
message: `Organization with ID '${organizationId}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
@ -639,7 +627,7 @@ export const oidcConfigServiceFactory = ({
|
||||
|
||||
const oidcCfg = await getOidc({
|
||||
type: "internal",
|
||||
orgSlug
|
||||
organizationId: org.id
|
||||
});
|
||||
|
||||
if (!oidcCfg || !oidcCfg.isActive) {
|
||||
|
@ -26,11 +26,11 @@ export type TOidcLoginDTO = {
|
||||
export type TGetOidcCfgDTO =
|
||||
| ({
|
||||
type: "external";
|
||||
orgSlug: string;
|
||||
organizationId: string;
|
||||
} & TGenericPermission)
|
||||
| {
|
||||
type: "internal";
|
||||
orgSlug: string;
|
||||
organizationId: string;
|
||||
};
|
||||
|
||||
export type TCreateOidcCfgDTO = {
|
||||
@ -45,7 +45,7 @@ export type TCreateOidcCfgDTO = {
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
isActive: boolean;
|
||||
orgSlug: string;
|
||||
organizationId: string;
|
||||
manageGroupMemberships: boolean;
|
||||
jwtSignatureAlgorithm: OIDCJWTSignatureAlgorithm;
|
||||
} & TGenericPermission;
|
||||
@ -62,7 +62,7 @@ export type TUpdateOidcCfgDTO = Partial<{
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
isActive: boolean;
|
||||
orgSlug: string;
|
||||
organizationId: string;
|
||||
manageGroupMemberships: boolean;
|
||||
jwtSignatureAlgorithm: OIDCJWTSignatureAlgorithm;
|
||||
}> &
|
||||
|
@ -148,10 +148,18 @@ export const samlConfigServiceFactory = ({
|
||||
let samlConfig: TSamlConfigs | undefined;
|
||||
if (dto.type === "org") {
|
||||
samlConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
|
||||
if (!samlConfig) return;
|
||||
if (!samlConfig) {
|
||||
throw new NotFoundError({
|
||||
message: `SAML configuration for organization with ID '${dto.orgId}' not found`
|
||||
});
|
||||
}
|
||||
} else if (dto.type === "orgSlug") {
|
||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
||||
if (!org) return;
|
||||
if (!org) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization with slug '${dto.orgSlug}' not found`
|
||||
});
|
||||
}
|
||||
samlConfig = await samlConfigDAL.findOne({ orgId: org.id });
|
||||
} else if (dto.type === "ssoId") {
|
||||
// TODO:
|
||||
|
@ -61,8 +61,7 @@ export type TSamlLoginDTO = {
|
||||
export type TSamlConfigServiceFactory = {
|
||||
createSamlCfg: (arg: TCreateSamlCfgDTO) => Promise<TSamlConfigs>;
|
||||
updateSamlCfg: (arg: TUpdateSamlCfgDTO) => Promise<TSamlConfigs>;
|
||||
getSaml: (arg: TGetSamlCfgDTO) => Promise<
|
||||
| {
|
||||
getSaml: (arg: TGetSamlCfgDTO) => Promise<{
|
||||
id: string;
|
||||
organization: string;
|
||||
orgId: string;
|
||||
@ -72,9 +71,7 @@ export type TSamlConfigServiceFactory = {
|
||||
issuer: string;
|
||||
cert: string;
|
||||
lastUsed: Date | null | undefined;
|
||||
}
|
||||
| undefined
|
||||
>;
|
||||
}>;
|
||||
samlLogin: (arg: TSamlLoginDTO) => Promise<{
|
||||
isUserCompleted: boolean;
|
||||
providerAuthToken: string;
|
||||
|
@ -10,6 +10,7 @@ import {
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsV2Insert
|
||||
} from "@app/db/schemas";
|
||||
import { Event, EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
@ -523,7 +524,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const { policy, folderId, projectId, bypassers } = secretApprovalRequest;
|
||||
const { policy, folderId, projectId, bypassers, environment } = secretApprovalRequest;
|
||||
if (policy.deletedAt) {
|
||||
throw new BadRequestError({
|
||||
message: "The policy associated with this secret approval request has been deleted."
|
||||
@ -957,7 +958,112 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
return mergeStatus;
|
||||
const { created, updated, deleted } = mergeStatus.secrets;
|
||||
|
||||
const secretMutationEvents: Event[] = [];
|
||||
|
||||
if (created.length) {
|
||||
if (created.length > 1) {
|
||||
secretMutationEvents.push({
|
||||
type: EventType.CREATE_SECRETS,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath: folder.path,
|
||||
secrets: created.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretVersion: 1,
|
||||
// @ts-expect-error not present on v1 secrets
|
||||
secretKey: secret.key as string,
|
||||
// @ts-expect-error not present on v1 secrets
|
||||
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
|
||||
}))
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const [secret] = created;
|
||||
secretMutationEvents.push({
|
||||
type: EventType.CREATE_SECRET,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath: folder.path,
|
||||
secretId: secret.id,
|
||||
secretVersion: 1,
|
||||
// @ts-expect-error not present on v1 secrets
|
||||
secretKey: secret.key as string,
|
||||
// @ts-expect-error not present on v1 secrets
|
||||
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (updated.length) {
|
||||
if (updated.length > 1) {
|
||||
secretMutationEvents.push({
|
||||
type: EventType.UPDATE_SECRETS,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath: folder.path,
|
||||
secrets: updated.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretVersion: secret.version,
|
||||
// @ts-expect-error not present on v1 secrets
|
||||
secretKey: secret.key as string,
|
||||
// @ts-expect-error not present on v1 secrets
|
||||
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
|
||||
}))
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const [secret] = updated;
|
||||
secretMutationEvents.push({
|
||||
type: EventType.UPDATE_SECRET,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath: folder.path,
|
||||
secretId: secret.id,
|
||||
secretVersion: secret.version,
|
||||
// @ts-expect-error not present on v1 secrets
|
||||
secretKey: secret.key as string,
|
||||
// @ts-expect-error not present on v1 secrets
|
||||
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (deleted.length) {
|
||||
if (deleted.length > 1) {
|
||||
secretMutationEvents.push({
|
||||
type: EventType.DELETE_SECRETS,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath: folder.path,
|
||||
secrets: deleted.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretVersion: secret.version,
|
||||
// @ts-expect-error not present on v1 secrets
|
||||
secretKey: secret.key as string
|
||||
}))
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const [secret] = deleted;
|
||||
secretMutationEvents.push({
|
||||
type: EventType.DELETE_SECRET,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath: folder.path,
|
||||
secretId: secret.id,
|
||||
secretVersion: secret.version,
|
||||
// @ts-expect-error not present on v1 secrets
|
||||
secretKey: secret.key as string
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { ...mergeStatus, projectId, secretMutationEvents };
|
||||
};
|
||||
|
||||
// function to save secret change to secret approval
|
||||
|
@ -0,0 +1,9 @@
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
|
||||
name: "Bitbucket",
|
||||
type: SecretScanningDataSource.Bitbucket,
|
||||
connection: AppConnection.Bitbucket
|
||||
};
|
@ -0,0 +1,314 @@
|
||||
import { join } from "path";
|
||||
|
||||
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import {
|
||||
SecretScanningFindingSeverity,
|
||||
SecretScanningResource
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
cloneRepository,
|
||||
convertPatchLineToFileLineNumber,
|
||||
replaceNonChangesWithNewlines
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import {
|
||||
TSecretScanningFactoryGetDiffScanFindingsPayload,
|
||||
TSecretScanningFactoryGetDiffScanResourcePayload,
|
||||
TSecretScanningFactoryGetFullScanPath,
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryPostInitialization,
|
||||
TSecretScanningFactoryTeardown
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { BasicRepositoryRegex } from "@app/lib/regex";
|
||||
import {
|
||||
getBitbucketUser,
|
||||
listBitbucketRepositories,
|
||||
TBitbucketConnection
|
||||
} from "@app/services/app-connection/bitbucket";
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
|
||||
import {
|
||||
TBitbucketDataSourceCredentials,
|
||||
TBitbucketDataSourceInput,
|
||||
TBitbucketDataSourceWithConnection,
|
||||
TQueueBitbucketResourceDiffScan
|
||||
} from "./bitbucket-secret-scanning-types";
|
||||
|
||||
export const BitbucketSecretScanningFactory = () => {
|
||||
const initialize: TSecretScanningFactoryInitialize<
|
||||
TBitbucketDataSourceInput,
|
||||
TBitbucketConnection,
|
||||
TBitbucketDataSourceCredentials
|
||||
> = async ({ connection, payload }, callback) => {
|
||||
const cfg = getConfig();
|
||||
|
||||
const { email, apiToken } = connection.credentials;
|
||||
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
|
||||
|
||||
const { data } = await request.post<{ uuid: string }>(
|
||||
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${encodeURIComponent(payload.config.workspaceSlug)}/hooks`,
|
||||
{
|
||||
description: "Infisical webhook for push events",
|
||||
url: `${cfg.SITE_URL}/secret-scanning/webhooks/bitbucket`,
|
||||
active: false,
|
||||
events: ["repo:push"]
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: authHeader,
|
||||
Accept: "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return callback({
|
||||
credentials: { webhookId: data.uuid, webhookSecret: alphaNumericNanoId(64) }
|
||||
});
|
||||
};
|
||||
|
||||
const postInitialization: TSecretScanningFactoryPostInitialization<
|
||||
TBitbucketDataSourceInput,
|
||||
TBitbucketConnection,
|
||||
TBitbucketDataSourceCredentials
|
||||
> = async ({ dataSourceId, credentials, connection, payload }) => {
|
||||
const { email, apiToken } = connection.credentials;
|
||||
const { webhookId, webhookSecret } = credentials;
|
||||
|
||||
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
|
||||
|
||||
const cfg = getConfig();
|
||||
const newWebhookUrl = `${cfg.SITE_URL}/secret-scanning/webhooks/bitbucket?dataSourceId=${dataSourceId}`;
|
||||
|
||||
await request.put(
|
||||
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${encodeURIComponent(payload.config.workspaceSlug)}/hooks/${webhookId}`,
|
||||
{
|
||||
description: "Infisical webhook for push events",
|
||||
url: newWebhookUrl,
|
||||
active: true,
|
||||
events: ["repo:push"],
|
||||
secret: webhookSecret
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: authHeader,
|
||||
Accept: "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const teardown: TSecretScanningFactoryTeardown<
|
||||
TBitbucketDataSourceWithConnection,
|
||||
TBitbucketDataSourceCredentials
|
||||
> = async ({ credentials, dataSource }) => {
|
||||
const {
|
||||
connection: {
|
||||
credentials: { email, apiToken }
|
||||
},
|
||||
config
|
||||
} = dataSource;
|
||||
const { webhookId } = credentials;
|
||||
|
||||
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
|
||||
|
||||
try {
|
||||
await request.delete(
|
||||
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${config.workspaceSlug}/hooks/${webhookId}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: authHeader,
|
||||
Accept: "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
logger.error(`teardown: Bitbucket - Failed to call delete on webhook [webhookId=${webhookId}]`);
|
||||
}
|
||||
};
|
||||
|
||||
const listRawResources: TSecretScanningFactoryListRawResources<TBitbucketDataSourceWithConnection> = async (
|
||||
dataSource
|
||||
) => {
|
||||
const {
|
||||
connection,
|
||||
config: { includeRepos, workspaceSlug }
|
||||
} = dataSource;
|
||||
|
||||
const repos = await listBitbucketRepositories(connection, workspaceSlug);
|
||||
|
||||
const filteredRepos: typeof repos = [];
|
||||
if (includeRepos.includes("*")) {
|
||||
filteredRepos.push(...repos);
|
||||
} else {
|
||||
filteredRepos.push(...repos.filter((repo) => includeRepos.includes(repo.full_name)));
|
||||
}
|
||||
|
||||
return filteredRepos.map(({ full_name, uuid }) => ({
|
||||
name: full_name,
|
||||
externalId: uuid,
|
||||
type: SecretScanningResource.Repository
|
||||
}));
|
||||
};
|
||||
|
||||
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TBitbucketDataSourceWithConnection> = async ({
|
||||
dataSource,
|
||||
resourceName,
|
||||
tempFolder
|
||||
}) => {
|
||||
const {
|
||||
connection: {
|
||||
credentials: { apiToken, email }
|
||||
}
|
||||
} = dataSource;
|
||||
|
||||
const repoPath = join(tempFolder, "repo.git");
|
||||
|
||||
if (!BasicRepositoryRegex.test(resourceName)) {
|
||||
throw new Error("Invalid Bitbucket repository name");
|
||||
}
|
||||
|
||||
const { username } = await getBitbucketUser({ email, apiToken });
|
||||
|
||||
await cloneRepository({
|
||||
cloneUrl: `https://${encodeURIComponent(username)}:${apiToken}@bitbucket.org/${resourceName}.git`,
|
||||
repoPath
|
||||
});
|
||||
|
||||
return repoPath;
|
||||
};
|
||||
|
||||
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
|
||||
TQueueBitbucketResourceDiffScan["payload"]
|
||||
> = ({ repository }) => {
|
||||
return {
|
||||
name: repository.full_name,
|
||||
externalId: repository.uuid,
|
||||
type: SecretScanningResource.Repository
|
||||
};
|
||||
};
|
||||
|
||||
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
|
||||
TBitbucketDataSourceWithConnection,
|
||||
TQueueBitbucketResourceDiffScan["payload"]
|
||||
> = async ({ dataSource, payload, resourceName, configPath }) => {
|
||||
const {
|
||||
connection: {
|
||||
credentials: { apiToken, email }
|
||||
}
|
||||
} = dataSource;
|
||||
|
||||
const { push, repository } = payload;
|
||||
|
||||
const allFindings: SecretMatch[] = [];
|
||||
|
||||
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
|
||||
|
||||
for (const change of push.changes) {
|
||||
for (const commit of change.commits) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const { data: diffstat } = await request.get<{
|
||||
values: {
|
||||
status: "added" | "modified" | "removed" | "renamed";
|
||||
new?: { path: string };
|
||||
old?: { path: string };
|
||||
}[];
|
||||
}>(`${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${repository.full_name}/diffstat/${commit.hash}`, {
|
||||
headers: {
|
||||
Authorization: authHeader,
|
||||
Accept: "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!diffstat.values) continue;
|
||||
|
||||
for (const file of diffstat.values) {
|
||||
if ((file.status === "added" || file.status === "modified") && file.new?.path) {
|
||||
const filePath = file.new.path;
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const { data: patch } = await request.get<string>(
|
||||
`https://api.bitbucket.org/2.0/repositories/${repository.full_name}/diff/${commit.hash}`,
|
||||
{
|
||||
params: {
|
||||
path: filePath
|
||||
},
|
||||
headers: {
|
||||
Authorization: authHeader
|
||||
},
|
||||
responseType: "text"
|
||||
}
|
||||
);
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!patch) continue;
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const findings = await scanContentAndGetFindings(replaceNonChangesWithNewlines(`\n${patch}`), configPath);
|
||||
|
||||
const adjustedFindings = findings.map((finding) => {
|
||||
const startLine = convertPatchLineToFileLineNumber(patch, finding.StartLine);
|
||||
const endLine =
|
||||
finding.StartLine === finding.EndLine
|
||||
? startLine
|
||||
: convertPatchLineToFileLineNumber(patch, finding.EndLine);
|
||||
const startColumn = finding.StartColumn - 1; // subtract 1 for +
|
||||
const endColumn = finding.EndColumn - 1; // subtract 1 for +
|
||||
const authorName = commit.author.user?.display_name || commit.author.raw.split(" <")[0];
|
||||
const emailMatch = commit.author.raw.match(/<(.*)>/);
|
||||
const authorEmail = emailMatch?.[1] ?? "";
|
||||
|
||||
return {
|
||||
...finding,
|
||||
StartLine: startLine,
|
||||
EndLine: endLine,
|
||||
StartColumn: startColumn,
|
||||
EndColumn: endColumn,
|
||||
File: filePath,
|
||||
Commit: commit.hash,
|
||||
Author: authorName,
|
||||
Email: authorEmail,
|
||||
Message: commit.message,
|
||||
Fingerprint: `${commit.hash}:${filePath}:${finding.RuleID}:${startLine}:${startColumn}`,
|
||||
Date: commit.date,
|
||||
Link: `https://bitbucket.org/${resourceName}/src/${commit.hash}/${filePath}#lines-${startLine}`
|
||||
};
|
||||
});
|
||||
|
||||
allFindings.push(...adjustedFindings);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return allFindings.map(
|
||||
({
|
||||
// discard match and secret as we don't want to store
|
||||
Match,
|
||||
Secret,
|
||||
...finding
|
||||
}) => ({
|
||||
details: titleCaseToCamelCase(finding),
|
||||
fingerprint: finding.Fingerprint,
|
||||
severity: SecretScanningFindingSeverity.High,
|
||||
rule: finding.RuleID
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
return {
|
||||
initialize,
|
||||
postInitialization,
|
||||
listRawResources,
|
||||
getFullScanPath,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload,
|
||||
teardown
|
||||
};
|
||||
};
|
@ -0,0 +1,97 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningResource
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
BaseCreateSecretScanningDataSourceSchema,
|
||||
BaseSecretScanningDataSourceSchema,
|
||||
BaseSecretScanningFindingSchema,
|
||||
BaseUpdateSecretScanningDataSourceSchema,
|
||||
GitRepositoryScanFindingDetailsSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
|
||||
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
||||
import { BasicRepositoryRegex } from "@app/lib/regex";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const BitbucketDataSourceConfigSchema = z.object({
|
||||
workspaceSlug: z
|
||||
.string()
|
||||
.min(1, "Workspace slug required")
|
||||
.max(128)
|
||||
.describe(SecretScanningDataSources.CONFIG.BITBUCKET.workspaceSlug),
|
||||
includeRepos: z
|
||||
.array(
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(256)
|
||||
.refine((value) => value === "*" || BasicRepositoryRegex.test(value), "Invalid repository name format")
|
||||
)
|
||||
.nonempty("One or more repositories required")
|
||||
.max(100, "Cannot configure more than 100 repositories")
|
||||
.default(["*"])
|
||||
.describe(SecretScanningDataSources.CONFIG.BITBUCKET.includeRepos)
|
||||
});
|
||||
|
||||
export const BitbucketDataSourceSchema = BaseSecretScanningDataSourceSchema({
|
||||
type: SecretScanningDataSource.Bitbucket,
|
||||
isConnectionRequired: true
|
||||
})
|
||||
.extend({
|
||||
config: BitbucketDataSourceConfigSchema
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "Bitbucket"
|
||||
})
|
||||
);
|
||||
|
||||
export const CreateBitbucketDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
|
||||
type: SecretScanningDataSource.Bitbucket,
|
||||
isConnectionRequired: true
|
||||
})
|
||||
.extend({
|
||||
config: BitbucketDataSourceConfigSchema
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "Bitbucket"
|
||||
})
|
||||
);
|
||||
|
||||
export const UpdateBitbucketDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(
|
||||
SecretScanningDataSource.Bitbucket
|
||||
)
|
||||
.extend({
|
||||
config: BitbucketDataSourceConfigSchema.optional()
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "Bitbucket"
|
||||
})
|
||||
);
|
||||
|
||||
export const BitbucketDataSourceListItemSchema = z
|
||||
.object({
|
||||
name: z.literal("Bitbucket"),
|
||||
connection: z.literal(AppConnection.Bitbucket),
|
||||
type: z.literal(SecretScanningDataSource.Bitbucket)
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "Bitbucket"
|
||||
})
|
||||
);
|
||||
|
||||
export const BitbucketFindingSchema = BaseSecretScanningFindingSchema.extend({
|
||||
resourceType: z.literal(SecretScanningResource.Repository),
|
||||
dataSourceType: z.literal(SecretScanningDataSource.Bitbucket),
|
||||
details: GitRepositoryScanFindingDetailsSchema
|
||||
});
|
||||
|
||||
export const BitbucketDataSourceCredentialsSchema = z.object({
|
||||
webhookId: z.string(),
|
||||
webhookSecret: z.string()
|
||||
});
|
@ -0,0 +1,104 @@
|
||||
import crypto from "crypto";
|
||||
|
||||
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import {
|
||||
TBitbucketDataSource,
|
||||
TBitbucketDataSourceCredentials,
|
||||
TBitbucketPushEvent
|
||||
} from "./bitbucket-secret-scanning-types";
|
||||
|
||||
export const bitbucketSecretScanningService = (
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory,
|
||||
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const handlePushEvent = async (
|
||||
payload: TBitbucketPushEvent & { dataSourceId: string; receivedSignature: string; bodyString: string }
|
||||
) => {
|
||||
const { push, repository, bodyString, receivedSignature } = payload;
|
||||
|
||||
if (!push?.changes?.length || !repository?.workspace?.uuid) {
|
||||
logger.warn(
|
||||
`secretScanningV2PushEvent: Bitbucket - Insufficient data [changes=${
|
||||
push?.changes?.length ?? 0
|
||||
}] [repository=${repository?.name}] [workspaceUuid=${repository?.workspace?.uuid}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
|
||||
id: payload.dataSourceId,
|
||||
type: SecretScanningDataSource.Bitbucket
|
||||
})) as TBitbucketDataSource | undefined;
|
||||
|
||||
if (!dataSource) {
|
||||
logger.error(
|
||||
`secretScanningV2PushEvent: Bitbucket - Could not find data source [workspaceUuid=${repository.workspace.uuid}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const {
|
||||
isAutoScanEnabled,
|
||||
config: { includeRepos },
|
||||
encryptedCredentials,
|
||||
projectId
|
||||
} = dataSource;
|
||||
|
||||
if (!encryptedCredentials) {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: Bitbucket - Could not find encrypted credentials [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const decryptedCredentials = decryptor({ cipherTextBlob: encryptedCredentials });
|
||||
|
||||
const credentials = JSON.parse(decryptedCredentials.toString()) as TBitbucketDataSourceCredentials;
|
||||
|
||||
const hmac = crypto.createHmac("sha256", credentials.webhookSecret);
|
||||
hmac.update(bodyString);
|
||||
const calculatedSignature = hmac.digest("hex");
|
||||
|
||||
if (calculatedSignature !== receivedSignature) {
|
||||
logger.error(
|
||||
`secretScanningV2PushEvent: Bitbucket - Invalid signature for webhook [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isAutoScanEnabled) {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: Bitbucket - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (includeRepos.includes("*") || includeRepos.includes(repository.full_name)) {
|
||||
await secretScanningV2Queue.queueResourceDiffScan({
|
||||
dataSourceType: SecretScanningDataSource.Bitbucket,
|
||||
payload,
|
||||
dataSourceId: dataSource.id
|
||||
});
|
||||
} else {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: Bitbucket - ignoring due to repository not being present in config [workspaceUuid=${repository.workspace.uuid}] [dataSourceId=${dataSource.id}]`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
handlePushEvent
|
||||
};
|
||||
};
|
@ -0,0 +1,85 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TBitbucketConnection } from "@app/services/app-connection/bitbucket";
|
||||
|
||||
import {
|
||||
BitbucketDataSourceCredentialsSchema,
|
||||
BitbucketDataSourceListItemSchema,
|
||||
BitbucketDataSourceSchema,
|
||||
BitbucketFindingSchema,
|
||||
CreateBitbucketDataSourceSchema
|
||||
} from "./bitbucket-secret-scanning-schemas";
|
||||
|
||||
export type TBitbucketDataSource = z.infer<typeof BitbucketDataSourceSchema>;
|
||||
|
||||
export type TBitbucketDataSourceInput = z.infer<typeof CreateBitbucketDataSourceSchema>;
|
||||
|
||||
export type TBitbucketDataSourceListItem = z.infer<typeof BitbucketDataSourceListItemSchema>;
|
||||
|
||||
export type TBitbucketDataSourceCredentials = z.infer<typeof BitbucketDataSourceCredentialsSchema>;
|
||||
|
||||
export type TBitbucketFinding = z.infer<typeof BitbucketFindingSchema>;
|
||||
|
||||
export type TBitbucketDataSourceWithConnection = TBitbucketDataSource & {
|
||||
connection: TBitbucketConnection;
|
||||
};
|
||||
|
||||
export type TBitbucketPushEventRepository = {
|
||||
full_name: string;
|
||||
name: string;
|
||||
workspace: {
|
||||
slug: string;
|
||||
uuid: string;
|
||||
};
|
||||
uuid: string;
|
||||
};
|
||||
|
||||
export type TBitbucketPushEventCommit = {
|
||||
hash: string;
|
||||
message: string;
|
||||
author: {
|
||||
raw: string;
|
||||
user?: {
|
||||
display_name: string;
|
||||
uuid: string;
|
||||
nickname: string;
|
||||
};
|
||||
};
|
||||
date: string;
|
||||
};
|
||||
|
||||
export type TBitbucketPushEventChange = {
|
||||
new?: {
|
||||
name: string;
|
||||
type: string;
|
||||
};
|
||||
old?: {
|
||||
name: string;
|
||||
type: string;
|
||||
};
|
||||
created: boolean;
|
||||
closed: boolean;
|
||||
forced: boolean;
|
||||
commits: TBitbucketPushEventCommit[];
|
||||
};
|
||||
|
||||
export type TBitbucketPushEvent = {
|
||||
push: {
|
||||
changes: TBitbucketPushEventChange[];
|
||||
};
|
||||
repository: TBitbucketPushEventRepository;
|
||||
actor: {
|
||||
display_name: string;
|
||||
uuid: string;
|
||||
nickname: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type TQueueBitbucketResourceDiffScan = {
|
||||
dataSourceType: SecretScanningDataSource.Bitbucket;
|
||||
payload: TBitbucketPushEvent & { dataSourceId: string };
|
||||
dataSourceId: string;
|
||||
resourceId: string;
|
||||
scanId: string;
|
||||
};
|
@ -0,0 +1,3 @@
|
||||
export * from "./bitbucket-secret-scanning-constants";
|
||||
export * from "./bitbucket-secret-scanning-schemas";
|
||||
export * from "./bitbucket-secret-scanning-types";
|
@ -19,18 +19,23 @@ import {
|
||||
TSecretScanningFactoryGetFullScanPath,
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryPostInitialization
|
||||
TSecretScanningFactoryPostInitialization,
|
||||
TSecretScanningFactoryTeardown
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||
import { GitHubRepositoryRegex } from "@app/lib/regex";
|
||||
import { BasicRepositoryRegex } from "@app/lib/regex";
|
||||
import { listGitHubRadarRepositories, TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
|
||||
|
||||
import { TGitHubDataSourceWithConnection, TQueueGitHubResourceDiffScan } from "./github-secret-scanning-types";
|
||||
import {
|
||||
TGitHubDataSourceInput,
|
||||
TGitHubDataSourceWithConnection,
|
||||
TQueueGitHubResourceDiffScan
|
||||
} from "./github-secret-scanning-types";
|
||||
|
||||
export const GitHubSecretScanningFactory = () => {
|
||||
const initialize: TSecretScanningFactoryInitialize<TGitHubRadarConnection> = async (
|
||||
const initialize: TSecretScanningFactoryInitialize<TGitHubDataSourceInput, TGitHubRadarConnection> = async (
|
||||
{ connection, secretScanningV2DAL },
|
||||
callback
|
||||
) => {
|
||||
@ -51,10 +56,17 @@ export const GitHubSecretScanningFactory = () => {
|
||||
});
|
||||
};
|
||||
|
||||
const postInitialization: TSecretScanningFactoryPostInitialization<TGitHubRadarConnection> = async () => {
|
||||
const postInitialization: TSecretScanningFactoryPostInitialization<
|
||||
TGitHubDataSourceInput,
|
||||
TGitHubRadarConnection
|
||||
> = async () => {
|
||||
// no post-initialization required
|
||||
};
|
||||
|
||||
const teardown: TSecretScanningFactoryTeardown<TGitHubDataSourceWithConnection> = async () => {
|
||||
// no termination required
|
||||
};
|
||||
|
||||
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
|
||||
dataSource
|
||||
) => {
|
||||
@ -107,7 +119,7 @@ export const GitHubSecretScanningFactory = () => {
|
||||
|
||||
const repoPath = join(tempFolder, "repo.git");
|
||||
|
||||
if (!GitHubRepositoryRegex.test(resourceName)) {
|
||||
if (!BasicRepositoryRegex.test(resourceName)) {
|
||||
throw new Error("Invalid GitHub repository name");
|
||||
}
|
||||
|
||||
@ -225,6 +237,7 @@ export const GitHubSecretScanningFactory = () => {
|
||||
listRawResources,
|
||||
getFullScanPath,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload
|
||||
getDiffScanFindingsPayload,
|
||||
teardown
|
||||
};
|
||||
};
|
||||
|
@ -12,7 +12,7 @@ import {
|
||||
GitRepositoryScanFindingDetailsSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
|
||||
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
||||
import { GitHubRepositoryRegex } from "@app/lib/regex";
|
||||
import { BasicRepositoryRegex } from "@app/lib/regex";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const GitHubDataSourceConfigSchema = z.object({
|
||||
@ -22,7 +22,7 @@ export const GitHubDataSourceConfigSchema = z.object({
|
||||
.string()
|
||||
.min(1)
|
||||
.max(256)
|
||||
.refine((value) => value === "*" || GitHubRepositoryRegex.test(value), "Invalid repository name format")
|
||||
.refine((value) => value === "*" || BasicRepositoryRegex.test(value), "Invalid repository name format")
|
||||
)
|
||||
.nonempty("One or more repositories required")
|
||||
.max(100, "Cannot configure more than 100 repositories")
|
||||
|
@ -1,5 +1,6 @@
|
||||
export enum SecretScanningDataSource {
|
||||
GitHub = "github"
|
||||
GitHub = "github",
|
||||
Bitbucket = "bitbucket"
|
||||
}
|
||||
|
||||
export enum SecretScanningScanStatus {
|
||||
|
@ -1,19 +1,23 @@
|
||||
import { BitbucketSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-factory";
|
||||
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
|
||||
|
||||
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
|
||||
import {
|
||||
TQueueSecretScanningResourceDiffScan,
|
||||
TSecretScanningDataSourceCredentials,
|
||||
TSecretScanningDataSourceInput,
|
||||
TSecretScanningDataSourceWithConnection,
|
||||
TSecretScanningFactory
|
||||
} from "./secret-scanning-v2-types";
|
||||
|
||||
type TSecretScanningFactoryImplementation = TSecretScanningFactory<
|
||||
TSecretScanningDataSourceWithConnection,
|
||||
TSecretScanningDataSourceCredentials,
|
||||
TQueueSecretScanningResourceDiffScan["payload"]
|
||||
TQueueSecretScanningResourceDiffScan["payload"],
|
||||
TSecretScanningDataSourceInput,
|
||||
TSecretScanningDataSourceCredentials
|
||||
>;
|
||||
|
||||
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
|
||||
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation
|
||||
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation,
|
||||
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation
|
||||
};
|
||||
|
@ -4,6 +4,7 @@ import RE2 from "re2";
|
||||
|
||||
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||
|
||||
@ -11,7 +12,8 @@ import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secre
|
||||
import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListItem } from "./secret-scanning-v2-types";
|
||||
|
||||
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
|
||||
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
||||
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
|
||||
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
||||
};
|
||||
|
||||
export const listSecretScanningDataSourceOptions = () => {
|
||||
|
@ -2,13 +2,16 @@ import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/se
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
|
||||
[SecretScanningDataSource.GitHub]: "GitHub"
|
||||
[SecretScanningDataSource.GitHub]: "GitHub",
|
||||
[SecretScanningDataSource.Bitbucket]: "Bitbucket"
|
||||
};
|
||||
|
||||
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
|
||||
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar
|
||||
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar,
|
||||
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket
|
||||
};
|
||||
|
||||
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
|
||||
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" }
|
||||
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" },
|
||||
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" }
|
||||
};
|
||||
|
@ -318,7 +318,7 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 20,
|
||||
workerCount: 2,
|
||||
pollingIntervalSeconds: 1
|
||||
}
|
||||
);
|
||||
@ -539,7 +539,7 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 20,
|
||||
workerCount: 2,
|
||||
pollingIntervalSeconds: 1
|
||||
}
|
||||
);
|
||||
@ -613,7 +613,7 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 5,
|
||||
workerCount: 2,
|
||||
pollingIntervalSeconds: 1
|
||||
}
|
||||
);
|
||||
|
@ -19,8 +19,7 @@ export const BaseSecretScanningDataSourceSchema = ({
|
||||
// unique to provider
|
||||
type: true,
|
||||
connectionId: true,
|
||||
config: true,
|
||||
encryptedCredentials: true
|
||||
config: true
|
||||
}).extend({
|
||||
type: z.literal(type),
|
||||
connectionId: isConnectionRequired ? z.string().uuid() : z.null(),
|
||||
|
@ -30,6 +30,8 @@ import {
|
||||
TFindSecretScanningDataSourceByNameDTO,
|
||||
TListSecretScanningDataSourcesByProjectId,
|
||||
TSecretScanningDataSource,
|
||||
TSecretScanningDataSourceCredentials,
|
||||
TSecretScanningDataSourceInput,
|
||||
TSecretScanningDataSourceWithConnection,
|
||||
TSecretScanningDataSourceWithDetails,
|
||||
TSecretScanningFinding,
|
||||
@ -49,6 +51,7 @@ import { TAppConnection } from "@app/services/app-connection/app-connection-type
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { bitbucketSecretScanningService } from "./bitbucket/bitbucket-secret-scanning-service";
|
||||
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
|
||||
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
|
||||
|
||||
@ -256,7 +259,7 @@ export const secretScanningV2ServiceFactory = ({
|
||||
try {
|
||||
const createdDataSource = await factory.initialize(
|
||||
{
|
||||
payload,
|
||||
payload: payload as TSecretScanningDataSourceInput,
|
||||
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
|
||||
secretScanningV2DAL
|
||||
},
|
||||
@ -287,7 +290,7 @@ export const secretScanningV2ServiceFactory = ({
|
||||
);
|
||||
|
||||
await factory.postInitialization({
|
||||
payload,
|
||||
payload: payload as TSecretScanningDataSourceInput,
|
||||
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
|
||||
dataSourceId: dataSource.id,
|
||||
credentials
|
||||
@ -398,7 +401,6 @@ export const secretScanningV2ServiceFactory = ({
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
@ -412,7 +414,36 @@ export const secretScanningV2ServiceFactory = ({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
// TODO: clean up webhooks
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) {
|
||||
connection = await decryptAppConnection(dataSource.connection, kmsService);
|
||||
}
|
||||
|
||||
let credentials: TSecretScanningDataSourceCredentials | undefined;
|
||||
|
||||
if (dataSource.encryptedCredentials) {
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
credentials = JSON.parse(
|
||||
decryptor({
|
||||
cipherTextBlob: dataSource.encryptedCredentials
|
||||
}).toString()
|
||||
) as TSecretScanningDataSourceCredentials;
|
||||
}
|
||||
|
||||
await factory.teardown({
|
||||
dataSource: {
|
||||
...dataSource,
|
||||
// @ts-expect-error currently we don't have a null connection data source
|
||||
connection
|
||||
},
|
||||
credentials
|
||||
});
|
||||
|
||||
await secretScanningV2DAL.dataSources.deleteById(dataSourceId);
|
||||
|
||||
@ -869,6 +900,7 @@ export const secretScanningV2ServiceFactory = ({
|
||||
updateSecretScanningFindingById,
|
||||
findSecretScanningConfigByProjectId,
|
||||
upsertSecretScanningConfig,
|
||||
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue)
|
||||
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue),
|
||||
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
|
||||
};
|
||||
};
|
||||
|
@ -4,6 +4,15 @@ import {
|
||||
TSecretScanningResources,
|
||||
TSecretScanningScans
|
||||
} from "@app/db/schemas";
|
||||
import {
|
||||
TBitbucketDataSource,
|
||||
TBitbucketDataSourceCredentials,
|
||||
TBitbucketDataSourceInput,
|
||||
TBitbucketDataSourceListItem,
|
||||
TBitbucketDataSourceWithConnection,
|
||||
TBitbucketFinding,
|
||||
TQueueBitbucketResourceDiffScan
|
||||
} from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import {
|
||||
TGitHubDataSource,
|
||||
TGitHubDataSourceInput,
|
||||
@ -19,7 +28,7 @@ import {
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export type TSecretScanningDataSource = TGitHubDataSource;
|
||||
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource;
|
||||
|
||||
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
|
||||
lastScannedAt?: Date | null;
|
||||
@ -41,13 +50,17 @@ export type TSecretScanningScanWithDetails = TSecretScanningScans & {
|
||||
resourceName: string;
|
||||
};
|
||||
|
||||
export type TSecretScanningDataSourceWithConnection = TGitHubDataSourceWithConnection;
|
||||
export type TSecretScanningDataSourceWithConnection =
|
||||
| TGitHubDataSourceWithConnection
|
||||
| TBitbucketDataSourceWithConnection;
|
||||
|
||||
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput;
|
||||
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput | TBitbucketDataSourceInput;
|
||||
|
||||
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem;
|
||||
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem | TBitbucketDataSourceListItem;
|
||||
|
||||
export type TSecretScanningFinding = TGitHubFinding;
|
||||
export type TSecretScanningDataSourceCredentials = TBitbucketDataSourceCredentials | undefined;
|
||||
|
||||
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding;
|
||||
|
||||
export type TListSecretScanningDataSourcesByProjectId = {
|
||||
projectId: string;
|
||||
@ -99,7 +112,7 @@ export type TQueueSecretScanningDataSourceFullScan = {
|
||||
scanId: string;
|
||||
};
|
||||
|
||||
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan;
|
||||
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan | TQueueBitbucketResourceDiffScan;
|
||||
|
||||
export type TQueueSecretScanningSendNotification = {
|
||||
dataSource: TSecretScanningDataSources;
|
||||
@ -138,11 +151,12 @@ export type TSecretScanningDataSourceRaw = NonNullable<
|
||||
>;
|
||||
|
||||
export type TSecretScanningFactoryInitialize<
|
||||
P extends TSecretScanningDataSourceInput,
|
||||
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
||||
C extends TSecretScanningDataSourceCredentials = undefined
|
||||
> = (
|
||||
params: {
|
||||
payload: TCreateSecretScanningDataSourceDTO;
|
||||
payload: P;
|
||||
connection: T;
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
||||
},
|
||||
@ -150,24 +164,27 @@ export type TSecretScanningFactoryInitialize<
|
||||
) => Promise<TSecretScanningDataSourceRaw>;
|
||||
|
||||
export type TSecretScanningFactoryPostInitialization<
|
||||
P extends TSecretScanningDataSourceInput,
|
||||
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
||||
C extends TSecretScanningDataSourceCredentials = undefined
|
||||
> = (params: {
|
||||
payload: TCreateSecretScanningDataSourceDTO;
|
||||
connection: T;
|
||||
credentials: C;
|
||||
dataSourceId: string;
|
||||
}) => Promise<void>;
|
||||
> = (params: { payload: P; connection: T; credentials: C; dataSourceId: string }) => Promise<void>;
|
||||
|
||||
export type TSecretScanningFactoryTeardown<
|
||||
T extends TSecretScanningDataSourceWithConnection,
|
||||
C extends TSecretScanningDataSourceCredentials = undefined
|
||||
> = (params: { dataSource: T; credentials: C }) => Promise<void>;
|
||||
|
||||
export type TSecretScanningFactory<
|
||||
T extends TSecretScanningDataSourceWithConnection,
|
||||
C extends TSecretScanningDataSourceCredentials,
|
||||
P extends TQueueSecretScanningResourceDiffScan["payload"]
|
||||
P extends TQueueSecretScanningResourceDiffScan["payload"],
|
||||
I extends TSecretScanningDataSourceInput,
|
||||
C extends TSecretScanningDataSourceCredentials | undefined = undefined
|
||||
> = () => {
|
||||
listRawResources: TSecretScanningFactoryListRawResources<T>;
|
||||
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
|
||||
initialize: TSecretScanningFactoryInitialize<T["connection"] | undefined, C>;
|
||||
postInitialization: TSecretScanningFactoryPostInitialization<T["connection"] | undefined, C>;
|
||||
initialize: TSecretScanningFactoryInitialize<I, T["connection"] | undefined, C>;
|
||||
postInitialization: TSecretScanningFactoryPostInitialization<I, T["connection"] | undefined, C>;
|
||||
teardown: TSecretScanningFactoryTeardown<T, C>;
|
||||
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
|
||||
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
|
||||
};
|
||||
@ -185,5 +202,3 @@ export type TUpsertSecretScanningConfigDTO = {
|
||||
projectId: string;
|
||||
content: string | null;
|
||||
};
|
||||
|
||||
export type TSecretScanningDataSourceCredentials = undefined;
|
||||
|
@ -1,7 +1,22 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { BitbucketDataSourceSchema, BitbucketFindingSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
|
||||
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [GitHubDataSourceSchema]);
|
||||
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [
|
||||
GitHubDataSourceSchema,
|
||||
BitbucketDataSourceSchema
|
||||
]);
|
||||
|
||||
export const SecretScanningFindingSchema = z.discriminatedUnion("resourceType", [GitHubFindingSchema]);
|
||||
export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType", [
|
||||
GitHubFindingSchema.describe(
|
||||
JSON.stringify({
|
||||
title: "GitHub"
|
||||
})
|
||||
),
|
||||
BitbucketFindingSchema.describe(
|
||||
JSON.stringify({
|
||||
title: "Bitbucket"
|
||||
})
|
||||
)
|
||||
]);
|
||||
|
@ -66,7 +66,10 @@ export enum ApiDocsTags {
|
||||
KmsKeys = "KMS Keys",
|
||||
KmsEncryption = "KMS Encryption",
|
||||
KmsSigning = "KMS Signing",
|
||||
SecretScanning = "Secret Scanning"
|
||||
SecretScanning = "Secret Scanning",
|
||||
OidcSso = "OIDC SSO",
|
||||
SamlSso = "SAML SSO",
|
||||
LdapSso = "LDAP SSO"
|
||||
}
|
||||
|
||||
export const GROUPS = {
|
||||
@ -2268,6 +2271,14 @@ export const AppConnections = {
|
||||
accessToken: "The Access Token used to access GitLab.",
|
||||
code: "The OAuth code to use to connect with GitLab.",
|
||||
accessTokenType: "The type of token used to connect with GitLab."
|
||||
},
|
||||
BITBUCKET: {
|
||||
email: "The email used to access Bitbucket.",
|
||||
apiToken: "The API token used to access Bitbucket."
|
||||
},
|
||||
ZABBIX: {
|
||||
apiToken: "The API Token used to access Zabbix.",
|
||||
instanceUrl: "The Zabbix instance URL to connect with."
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -2457,6 +2468,12 @@ export const SecretSyncs = {
|
||||
CLOUDFLARE_PAGES: {
|
||||
projectName: "The name of the Cloudflare Pages project to sync secrets to.",
|
||||
environment: "The environment of the Cloudflare Pages project to sync secrets to."
|
||||
},
|
||||
ZABBIX: {
|
||||
scope: "The Zabbix scope that secrets should be synced to.",
|
||||
hostId: "The ID of the Zabbix host to sync secrets to.",
|
||||
hostName: "The name of the Zabbix host to sync secrets to.",
|
||||
macroType: "The type of macro to sync secrets to. (0: Text, 1: Secret)"
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -2628,6 +2645,10 @@ export const SecretScanningDataSources = {
|
||||
CONFIG: {
|
||||
GITHUB: {
|
||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||
},
|
||||
BITBUCKET: {
|
||||
workspaceSlug: "The workspace to scan.",
|
||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -2652,3 +2673,113 @@ export const SecretScanningConfigs = {
|
||||
content: "The contents of the Secret Scanning Configuration file."
|
||||
}
|
||||
};
|
||||
|
||||
export const OidcSSo = {
|
||||
GET_CONFIG: {
|
||||
organizationId: "The ID of the organization to get the OIDC config for."
|
||||
},
|
||||
UPDATE_CONFIG: {
|
||||
organizationId: "The ID of the organization to update the OIDC config for.",
|
||||
allowedEmailDomains:
|
||||
"A list of allowed email domains that users can use to authenticate with. This field is comma separated. Example: 'example.com,acme.com'",
|
||||
discoveryURL: "The URL of the OIDC discovery endpoint.",
|
||||
configurationType: "The configuration type to use for the OIDC configuration.",
|
||||
issuer:
|
||||
"The issuer for the OIDC configuration. This is only supported when the OIDC configuration type is set to 'custom'.",
|
||||
authorizationEndpoint:
|
||||
"The endpoint to use for OIDC authorization. This is only supported when the OIDC configuration type is set to 'custom'.",
|
||||
jwksUri: "The URL of the OIDC JWKS endpoint.",
|
||||
tokenEndpoint: "The token endpoint to use for OIDC token exchange.",
|
||||
userinfoEndpoint: "The userinfo endpoint to get user information from the OIDC provider.",
|
||||
clientId: "The client ID to use for OIDC authentication.",
|
||||
clientSecret: "The client secret to use for OIDC authentication.",
|
||||
isActive: "Whether to enable or disable this OIDC configuration.",
|
||||
manageGroupMemberships:
|
||||
"Whether to manage group memberships for the OIDC configuration. If enabled, users will automatically be assigned groups when they sign in, based on which groups they are a member of in the OIDC provider.",
|
||||
jwtSignatureAlgorithm: "The algorithm to use for JWT signature verification."
|
||||
},
|
||||
CREATE_CONFIG: {
|
||||
organizationId: "The ID of the organization to create the OIDC config for.",
|
||||
allowedEmailDomains:
|
||||
"A list of allowed email domains that users can use to authenticate with. This field is comma separated.",
|
||||
discoveryURL: "The URL of the OIDC discovery endpoint.",
|
||||
configurationType: "The configuration type to use for the OIDC configuration.",
|
||||
issuer:
|
||||
"The issuer for the OIDC configuration. This is only supported when the OIDC configuration type is set to 'custom'.",
|
||||
authorizationEndpoint:
|
||||
"The authorization endpoint to use for OIDC authorization. This is only supported when the OIDC configuration type is set to 'custom'.",
|
||||
jwksUri: "The URL of the OIDC JWKS endpoint.",
|
||||
tokenEndpoint: "The token endpoint to use for OIDC token exchange.",
|
||||
userinfoEndpoint: "The userinfo endpoint to get user information from the OIDC provider.",
|
||||
clientId: "The client ID to use for OIDC authentication.",
|
||||
clientSecret: "The client secret to use for OIDC authentication.",
|
||||
isActive: "Whether to enable or disable this OIDC configuration.",
|
||||
manageGroupMemberships:
|
||||
"Whether to manage group memberships for the OIDC configuration. If enabled, users will automatically be assigned groups when they sign in, based on which groups they are a member of in the OIDC provider.",
|
||||
jwtSignatureAlgorithm: "The algorithm to use for JWT signature verification."
|
||||
}
|
||||
};
|
||||
|
||||
export const SamlSso = {
|
||||
GET_CONFIG: {
|
||||
organizationId: "The ID of the organization to get the SAML config for."
|
||||
},
|
||||
UPDATE_CONFIG: {
|
||||
organizationId: "The ID of the organization to update the SAML config for.",
|
||||
authProvider: "Authentication provider to use for SAML authentication.",
|
||||
isActive: "Whether to enable or disable this SAML configuration.",
|
||||
entryPoint:
|
||||
"The entry point for the SAML authentication. This is the URL that the user will be redirected to after they have authenticated with the SAML provider.",
|
||||
issuer: "The SAML provider issuer URL or entity ID.",
|
||||
cert: "The certificate to use for SAML authentication."
|
||||
},
|
||||
CREATE_CONFIG: {
|
||||
organizationId: "The ID of the organization to create the SAML config for.",
|
||||
authProvider: "Authentication provider to use for SAML authentication.",
|
||||
isActive: "Whether to enable or disable this SAML configuration.",
|
||||
entryPoint:
|
||||
"The entry point for the SAML authentication. This is the URL that the user will be redirected to after they have authenticated with the SAML provider.",
|
||||
issuer: "The SAML provider issuer URL or entity ID.",
|
||||
cert: "The certificate to use for SAML authentication."
|
||||
}
|
||||
};
|
||||
|
||||
export const LdapSso = {
|
||||
GET_CONFIG: {
|
||||
organizationId: "The ID of the organization to get the LDAP config for."
|
||||
},
|
||||
CREATE_CONFIG: {
|
||||
organizationId: "The ID of the organization to create the LDAP config for.",
|
||||
isActive: "Whether to enable or disable this LDAP configuration.",
|
||||
url: "The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.",
|
||||
bindDN:
|
||||
"The distinguished name of the object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`",
|
||||
bindPass: "The password to use along with Bind DN when performing the user search.",
|
||||
searchBase: "The base DN to use for the user search such as `ou=Users,dc=acme,dc=com`",
|
||||
uniqueUserAttribute:
|
||||
"The attribute to use as the unique identifier of LDAP users such as `sAMAccountName`, `cn`, `uid`, `objectGUID`. If left blank, defaults to uidNumber",
|
||||
searchFilter:
|
||||
"The template used to construct the LDAP user search filter such as `(uid={{username}})` uses literal `{{username}}` to have the given username used in the search. The default is `(uid={{username}})` which is compatible with several common directory schemas.",
|
||||
groupSearchBase: "LDAP search base to use for group membership search such as `ou=Groups,dc=acme,dc=com`",
|
||||
groupSearchFilter:
|
||||
"The template used when constructing the group membership query such as `(&(objectClass=posixGroup)(memberUid={{.Username}}))`. The template can access the following context variables: `[UserDN, UserName]`. The default is `(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))` which is compatible with several common directory schemas.",
|
||||
caCert: "The CA certificate to use when verifying the LDAP server certificate."
|
||||
},
|
||||
UPDATE_CONFIG: {
|
||||
organizationId: "The ID of the organization to update the LDAP config for.",
|
||||
isActive: "Whether to enable or disable this LDAP configuration.",
|
||||
url: "The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.",
|
||||
bindDN:
|
||||
"The distinguished name of object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`",
|
||||
bindPass: "The password to use along with Bind DN when performing the user search.",
|
||||
uniqueUserAttribute:
|
||||
"The attribute to use as the unique identifier of LDAP users such as `sAMAccountName`, `cn`, `uid`, `objectGUID`. If left blank, defaults to uidNumber",
|
||||
searchFilter:
|
||||
"The template used to construct the LDAP user search filter such as `(uid={{username}})` uses literal `{{username}}` to have the given username used in the search. The default is `(uid={{username}})` which is compatible with several common directory schemas.",
|
||||
searchBase: "The base DN to use for the user search such as `ou=Users,dc=acme,dc=com`",
|
||||
groupSearchBase: "LDAP search base to use for group membership search such as `ou=Groups,dc=acme,dc=com`",
|
||||
groupSearchFilter:
|
||||
"The template used when constructing the group membership query such as `(&(objectClass=posixGroup)(memberUid={{.Username}}))`. The template can access the following context variables: `[UserDN, UserName]`. The default is `(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))` which is compatible with several common directory schemas.",
|
||||
caCert: "The CA certificate to use when verifying the LDAP server certificate."
|
||||
}
|
||||
};
|
||||
|
@ -2,6 +2,7 @@ import { z } from "zod";
|
||||
|
||||
import { QueueWorkerProfile } from "@app/lib/types";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
import { removeTrailingSlash } from "../fn";
|
||||
import { CustomLogger } from "../logger/logger";
|
||||
import { zpStr } from "../zod";
|
||||
@ -341,8 +342,11 @@ const envSchema = z
|
||||
|
||||
export type TEnvConfig = Readonly<z.infer<typeof envSchema>>;
|
||||
let envCfg: TEnvConfig;
|
||||
let originalEnvConfig: TEnvConfig;
|
||||
|
||||
export const getConfig = () => envCfg;
|
||||
export const getOriginalConfig = () => originalEnvConfig;
|
||||
|
||||
// cannot import singleton logger directly as it needs config to load various transport
|
||||
export const initEnvConfig = (logger?: CustomLogger) => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
@ -352,10 +356,115 @@ export const initEnvConfig = (logger?: CustomLogger) => {
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
envCfg = Object.freeze(parsedEnv.data);
|
||||
const config = Object.freeze(parsedEnv.data);
|
||||
envCfg = config;
|
||||
|
||||
if (!originalEnvConfig) {
|
||||
originalEnvConfig = config;
|
||||
}
|
||||
|
||||
return envCfg;
|
||||
};
|
||||
|
||||
// A list of environment variables that can be overwritten
|
||||
export const overwriteSchema: {
|
||||
[key: string]: {
|
||||
name: string;
|
||||
fields: { key: keyof TEnvConfig; description?: string }[];
|
||||
};
|
||||
} = {
|
||||
azure: {
|
||||
name: "Azure",
|
||||
fields: [
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_CLIENT_ID",
|
||||
description: "The Application (Client) ID of your Azure application."
|
||||
},
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRET",
|
||||
description: "The Client Secret of your Azure application."
|
||||
}
|
||||
]
|
||||
},
|
||||
google_sso: {
|
||||
name: "Google SSO",
|
||||
fields: [
|
||||
{
|
||||
key: "CLIENT_ID_GOOGLE_LOGIN",
|
||||
description: "The Client ID of your GCP OAuth2 application."
|
||||
},
|
||||
{
|
||||
key: "CLIENT_SECRET_GOOGLE_LOGIN",
|
||||
description: "The Client Secret of your GCP OAuth2 application."
|
||||
}
|
||||
]
|
||||
},
|
||||
github_sso: {
|
||||
name: "GitHub SSO",
|
||||
fields: [
|
||||
{
|
||||
key: "CLIENT_ID_GITHUB_LOGIN",
|
||||
description: "The Client ID of your GitHub OAuth application."
|
||||
},
|
||||
{
|
||||
key: "CLIENT_SECRET_GITHUB_LOGIN",
|
||||
description: "The Client Secret of your GitHub OAuth application."
|
||||
}
|
||||
]
|
||||
},
|
||||
gitlab_sso: {
|
||||
name: "GitLab SSO",
|
||||
fields: [
|
||||
{
|
||||
key: "CLIENT_ID_GITLAB_LOGIN",
|
||||
description: "The Client ID of your GitLab application."
|
||||
},
|
||||
{
|
||||
key: "CLIENT_SECRET_GITLAB_LOGIN",
|
||||
description: "The Secret of your GitLab application."
|
||||
},
|
||||
{
|
||||
key: "CLIENT_GITLAB_LOGIN_URL",
|
||||
description:
|
||||
"The URL of your self-hosted instance of GitLab where the OAuth application is registered. If no URL is passed in, this will default to https://gitlab.com."
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
export const overridableKeys = new Set(
|
||||
Object.values(overwriteSchema).flatMap(({ fields }) => fields.map(({ key }) => key))
|
||||
);
|
||||
|
||||
export const validateOverrides = (config: Record<string, string>) => {
|
||||
const allowedOverrides = Object.fromEntries(
|
||||
Object.entries(config).filter(([key]) => overridableKeys.has(key as keyof z.input<typeof envSchema>))
|
||||
);
|
||||
|
||||
const tempEnv: Record<string, unknown> = { ...process.env, ...allowedOverrides };
|
||||
const parsedResult = envSchema.safeParse(tempEnv);
|
||||
|
||||
if (!parsedResult.success) {
|
||||
const errorDetails = parsedResult.error.issues
|
||||
.map((issue) => `Key: "${issue.path.join(".")}", Error: ${issue.message}`)
|
||||
.join("\n");
|
||||
throw new BadRequestError({ message: errorDetails });
|
||||
}
|
||||
};
|
||||
|
||||
export const overrideEnvConfig = (config: Record<string, string>) => {
|
||||
const allowedOverrides = Object.fromEntries(
|
||||
Object.entries(config).filter(([key]) => overridableKeys.has(key as keyof z.input<typeof envSchema>))
|
||||
);
|
||||
|
||||
const tempEnv: Record<string, unknown> = { ...process.env, ...allowedOverrides };
|
||||
const parsedResult = envSchema.safeParse(tempEnv);
|
||||
|
||||
if (parsedResult.success) {
|
||||
envCfg = Object.freeze(parsedResult.data);
|
||||
}
|
||||
};
|
||||
|
||||
export const formatSmtpConfig = () => {
|
||||
const tlsOptions: {
|
||||
rejectUnauthorized: boolean;
|
||||
|
@ -10,4 +10,4 @@ export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]
|
||||
|
||||
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
|
||||
|
||||
export const GitHubRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
|
||||
export const BasicRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
|
||||
|
@ -1,7 +1,9 @@
|
||||
import type { EmitterWebhookEventName } from "@octokit/webhooks/dist-types/types";
|
||||
import { PushEvent } from "@octokit/webhooks-types";
|
||||
import { Probot } from "probot";
|
||||
import { z } from "zod";
|
||||
|
||||
import { TBitbucketPushEvent } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
@ -63,4 +65,52 @@ export const registerSecretScanningV2Webhooks = async (server: FastifyZodProvide
|
||||
return res.send("ok");
|
||||
}
|
||||
});
|
||||
|
||||
// bitbucket push event webhook
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/bitbucket",
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
dataSourceId: z.string().min(1, { message: "Data Source ID is required" })
|
||||
}),
|
||||
headers: z
|
||||
.object({
|
||||
"x-hub-signature": z.string().min(1, { message: "X-Hub-Signature header is required" })
|
||||
})
|
||||
.passthrough()
|
||||
},
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
const { dataSourceId } = req.query;
|
||||
|
||||
// Verify signature
|
||||
const signature = req.headers["x-hub-signature"];
|
||||
if (!signature) {
|
||||
logger.error("Missing X-Hub-Signature header for Bitbucket webhook");
|
||||
return res.status(401).send({ message: "Unauthorized: Missing signature" });
|
||||
}
|
||||
|
||||
const expectedSignaturePrefix = "sha256=";
|
||||
if (!signature.startsWith(expectedSignaturePrefix)) {
|
||||
logger.error({ signature }, "Invalid X-Hub-Signature format for Bitbucket webhook");
|
||||
return res.status(401).send({ message: "Unauthorized: Invalid signature format" });
|
||||
}
|
||||
|
||||
const receivedSignature = signature.substring(expectedSignaturePrefix.length);
|
||||
|
||||
if (!dataSourceId) return res.status(400).send({ message: "Data Source ID is required" });
|
||||
|
||||
await server.services.secretScanningV2.bitbucket.handlePushEvent({
|
||||
...(req.body as TBitbucketPushEvent),
|
||||
dataSourceId,
|
||||
receivedSignature,
|
||||
bodyString: JSON.stringify(req.body)
|
||||
});
|
||||
|
||||
return res.send("ok");
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -300,6 +300,7 @@ import { injectIdentity } from "../plugins/auth/inject-identity";
|
||||
import { injectPermission } from "../plugins/auth/inject-permission";
|
||||
import { injectRateLimits } from "../plugins/inject-rate-limits";
|
||||
import { registerV1Routes } from "./v1";
|
||||
import { initializeOauthConfigSync } from "./v1/sso-router";
|
||||
import { registerV2Routes } from "./v2";
|
||||
import { registerV3Routes } from "./v3";
|
||||
|
||||
@ -1910,6 +1911,7 @@ export const registerRoutes = async (
|
||||
await hsmService.startService();
|
||||
|
||||
await telemetryQueue.startTelemetryCheck();
|
||||
await telemetryQueue.startAggregatedEventsJob();
|
||||
await dailyResourceCleanUp.startCleanUp();
|
||||
await dailyExpiringPkiItemAlert.startSendingAlerts();
|
||||
await pkiSubscriberQueue.startDailyAutoRenewalJob();
|
||||
@ -2046,6 +2048,16 @@ export const registerRoutes = async (
|
||||
}
|
||||
}
|
||||
|
||||
const configSyncJob = await superAdminService.initializeEnvConfigSync();
|
||||
if (configSyncJob) {
|
||||
cronJobs.push(configSyncJob);
|
||||
}
|
||||
|
||||
const oauthConfigSyncJob = await initializeOauthConfigSync();
|
||||
if (oauthConfigSyncJob) {
|
||||
cronJobs.push(oauthConfigSyncJob);
|
||||
}
|
||||
|
||||
server.decorate<FastifyZodProvider["store"]>("store", {
|
||||
user: userDAL,
|
||||
kmipClient: kmipClientDAL
|
||||
|
@ -8,7 +8,7 @@ import {
|
||||
SuperAdminSchema,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { getConfig, overridableKeys } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { invalidateCacheLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
@ -42,7 +42,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
encryptedGitHubAppConnectionClientSecret: true,
|
||||
encryptedGitHubAppConnectionSlug: true,
|
||||
encryptedGitHubAppConnectionId: true,
|
||||
encryptedGitHubAppConnectionPrivateKey: true
|
||||
encryptedGitHubAppConnectionPrivateKey: true,
|
||||
encryptedEnvOverrides: true
|
||||
}).extend({
|
||||
isMigrationModeOn: z.boolean(),
|
||||
defaultAuthOrgSlug: z.string().nullable(),
|
||||
@ -110,11 +111,14 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
.refine((content) => DOMPurify.sanitize(content) === content, {
|
||||
message: "Page frame content contains unsafe HTML."
|
||||
})
|
||||
.optional()
|
||||
.optional(),
|
||||
envOverrides: z.record(z.enum(Array.from(overridableKeys) as [string, ...string[]]), z.string()).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
config: SuperAdminSchema.extend({
|
||||
config: SuperAdminSchema.omit({
|
||||
encryptedEnvOverrides: true
|
||||
}).extend({
|
||||
defaultAuthOrgSlug: z.string().nullable()
|
||||
})
|
||||
})
|
||||
@ -381,6 +385,41 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/env-overrides",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.record(
|
||||
z.string(),
|
||||
z.object({
|
||||
name: z.string(),
|
||||
fields: z
|
||||
.object({
|
||||
key: z.string(),
|
||||
value: z.string(),
|
||||
hasEnvEntry: z.boolean(),
|
||||
description: z.string().optional()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
)
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async () => {
|
||||
const envOverrides = await server.services.superAdmin.getEnvOverridesOrganized();
|
||||
return envOverrides;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/user-management/users/:userId",
|
||||
|
@ -31,6 +31,10 @@ import {
|
||||
AzureKeyVaultConnectionListItemSchema,
|
||||
SanitizedAzureKeyVaultConnectionSchema
|
||||
} from "@app/services/app-connection/azure-key-vault";
|
||||
import {
|
||||
BitbucketConnectionListItemSchema,
|
||||
SanitizedBitbucketConnectionSchema
|
||||
} from "@app/services/app-connection/bitbucket";
|
||||
import {
|
||||
CamundaConnectionListItemSchema,
|
||||
SanitizedCamundaConnectionSchema
|
||||
@ -84,6 +88,7 @@ import {
|
||||
SanitizedWindmillConnectionSchema,
|
||||
WindmillConnectionListItemSchema
|
||||
} from "@app/services/app-connection/windmill";
|
||||
import { SanitizedZabbixConnectionSchema, ZabbixConnectionListItemSchema } from "@app/services/app-connection/zabbix";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
// can't use discriminated due to multiple schemas for certain apps
|
||||
@ -116,7 +121,9 @@ const SanitizedAppConnectionSchema = z.union([
|
||||
...SanitizedRenderConnectionSchema.options,
|
||||
...SanitizedFlyioConnectionSchema.options,
|
||||
...SanitizedGitLabConnectionSchema.options,
|
||||
...SanitizedCloudflareConnectionSchema.options
|
||||
...SanitizedCloudflareConnectionSchema.options,
|
||||
...SanitizedBitbucketConnectionSchema.options,
|
||||
...SanitizedZabbixConnectionSchema.options
|
||||
]);
|
||||
|
||||
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
@ -148,7 +155,9 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
RenderConnectionListItemSchema,
|
||||
FlyioConnectionListItemSchema,
|
||||
GitLabConnectionListItemSchema,
|
||||
CloudflareConnectionListItemSchema
|
||||
CloudflareConnectionListItemSchema,
|
||||
BitbucketConnectionListItemSchema,
|
||||
ZabbixConnectionListItemSchema
|
||||
]);
|
||||
|
||||
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
|
@ -0,0 +1,88 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateBitbucketConnectionSchema,
|
||||
SanitizedBitbucketConnectionSchema,
|
||||
UpdateBitbucketConnectionSchema
|
||||
} from "@app/services/app-connection/bitbucket";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerBitbucketConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.Bitbucket,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedBitbucketConnectionSchema,
|
||||
createSchema: CreateBitbucketConnectionSchema,
|
||||
updateSchema: UpdateBitbucketConnectionSchema
|
||||
});
|
||||
|
||||
// The below endpoints are not exposed and for Infisical App use
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/workspaces`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
workspaces: z.object({ slug: z.string() }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
params: { connectionId }
|
||||
} = req;
|
||||
|
||||
const workspaces = await server.services.appConnection.bitbucket.listWorkspaces(connectionId, req.permission);
|
||||
|
||||
return { workspaces };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/repositories`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
querystring: z.object({
|
||||
workspaceSlug: z.string().min(1).max(255)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
repositories: z.object({ slug: z.string(), full_name: z.string(), uuid: z.string() }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
params: { connectionId },
|
||||
query: { workspaceSlug }
|
||||
} = req;
|
||||
|
||||
const repositories = await server.services.appConnection.bitbucket.listRepositories(
|
||||
{ connectionId, workspaceSlug },
|
||||
req.permission
|
||||
);
|
||||
|
||||
return { repositories };
|
||||
}
|
||||
});
|
||||
};
|
@ -9,6 +9,7 @@ import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-confi
|
||||
import { registerAzureClientSecretsConnectionRouter } from "./azure-client-secrets-connection-router";
|
||||
import { registerAzureDevOpsConnectionRouter } from "./azure-devops-connection-router";
|
||||
import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router";
|
||||
import { registerBitbucketConnectionRouter } from "./bitbucket-connection-router";
|
||||
import { registerCamundaConnectionRouter } from "./camunda-connection-router";
|
||||
import { registerCloudflareConnectionRouter } from "./cloudflare-connection-router";
|
||||
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
|
||||
@ -29,6 +30,7 @@ import { registerTeamCityConnectionRouter } from "./teamcity-connection-router";
|
||||
import { registerTerraformCloudConnectionRouter } from "./terraform-cloud-router";
|
||||
import { registerVercelConnectionRouter } from "./vercel-connection-router";
|
||||
import { registerWindmillConnectionRouter } from "./windmill-connection-router";
|
||||
import { registerZabbixConnectionRouter } from "./zabbix-connection-router";
|
||||
|
||||
export * from "./app-connection-router";
|
||||
|
||||
@ -62,5 +64,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
||||
[AppConnection.Render]: registerRenderConnectionRouter,
|
||||
[AppConnection.Flyio]: registerFlyioConnectionRouter,
|
||||
[AppConnection.GitLab]: registerGitLabConnectionRouter,
|
||||
[AppConnection.Cloudflare]: registerCloudflareConnectionRouter
|
||||
[AppConnection.Cloudflare]: registerCloudflareConnectionRouter,
|
||||
[AppConnection.Bitbucket]: registerBitbucketConnectionRouter,
|
||||
[AppConnection.Zabbix]: registerZabbixConnectionRouter
|
||||
};
|
||||
|
@ -0,0 +1,51 @@
|
||||
import z from "zod";
|
||||
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateZabbixConnectionSchema,
|
||||
SanitizedZabbixConnectionSchema,
|
||||
UpdateZabbixConnectionSchema
|
||||
} from "@app/services/app-connection/zabbix";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerZabbixConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.Zabbix,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedZabbixConnectionSchema,
|
||||
createSchema: CreateZabbixConnectionSchema,
|
||||
updateSchema: UpdateZabbixConnectionSchema
|
||||
});
|
||||
|
||||
// The following endpoints are for internal Infisical App use only and not part of the public API
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/hosts`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z
|
||||
.object({
|
||||
hostId: z.string(),
|
||||
host: z.string()
|
||||
})
|
||||
.array()
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
const hosts = await server.services.appConnection.zabbix.listHosts(connectionId, req.permission);
|
||||
return hosts;
|
||||
}
|
||||
});
|
||||
};
|
@ -732,8 +732,8 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId,
|
||||
environment,
|
||||
path: secretPath,
|
||||
search
|
||||
path: secretPath
|
||||
// search scott: removing for now because this prevents searching imported secrets which are fetched separately client side
|
||||
});
|
||||
|
||||
if (remainingLimit > 0 && totalImportCount > adjustedOffset) {
|
||||
@ -745,7 +745,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
projectId,
|
||||
environment,
|
||||
path: secretPath,
|
||||
search,
|
||||
// search scott: removing for now because this prevents searching imported secrets which are fetched separately client side
|
||||
limit: remainingLimit,
|
||||
offset: adjustedOffset
|
||||
});
|
||||
|
@ -22,6 +22,7 @@ import { registerTeamCitySyncRouter } from "./teamcity-sync-router";
|
||||
import { registerTerraformCloudSyncRouter } from "./terraform-cloud-sync-router";
|
||||
import { registerVercelSyncRouter } from "./vercel-sync-router";
|
||||
import { registerWindmillSyncRouter } from "./windmill-sync-router";
|
||||
import { registerZabbixSyncRouter } from "./zabbix-sync-router";
|
||||
|
||||
export * from "./secret-sync-router";
|
||||
|
||||
@ -47,5 +48,6 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
|
||||
[SecretSync.Render]: registerRenderSyncRouter,
|
||||
[SecretSync.Flyio]: registerFlyioSyncRouter,
|
||||
[SecretSync.GitLab]: registerGitLabSyncRouter,
|
||||
[SecretSync.CloudflarePages]: registerCloudflarePagesSyncRouter
|
||||
[SecretSync.CloudflarePages]: registerCloudflarePagesSyncRouter,
|
||||
[SecretSync.Zabbix]: registerZabbixSyncRouter
|
||||
};
|
||||
|
@ -382,7 +382,8 @@ export const registerSyncSecretsEndpoints = <T extends TSecretSync, I extends TS
|
||||
{
|
||||
syncId,
|
||||
destination,
|
||||
importBehavior
|
||||
importBehavior,
|
||||
auditLogInfo: req.auditLogInfo
|
||||
},
|
||||
req.permission
|
||||
)) as T;
|
||||
@ -415,7 +416,8 @@ export const registerSyncSecretsEndpoints = <T extends TSecretSync, I extends TS
|
||||
const secretSync = (await server.services.secretSync.triggerSecretSyncRemoveSecretsById(
|
||||
{
|
||||
syncId,
|
||||
destination
|
||||
destination,
|
||||
auditLogInfo: req.auditLogInfo
|
||||
},
|
||||
req.permission
|
||||
)) as T;
|
||||
|
@ -39,6 +39,7 @@ import { TeamCitySyncListItemSchema, TeamCitySyncSchema } from "@app/services/se
|
||||
import { TerraformCloudSyncListItemSchema, TerraformCloudSyncSchema } from "@app/services/secret-sync/terraform-cloud";
|
||||
import { VercelSyncListItemSchema, VercelSyncSchema } from "@app/services/secret-sync/vercel";
|
||||
import { WindmillSyncListItemSchema, WindmillSyncSchema } from "@app/services/secret-sync/windmill";
|
||||
import { ZabbixSyncListItemSchema, ZabbixSyncSchema } from "@app/services/secret-sync/zabbix";
|
||||
|
||||
const SecretSyncSchema = z.discriminatedUnion("destination", [
|
||||
AwsParameterStoreSyncSchema,
|
||||
@ -62,7 +63,8 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
|
||||
RenderSyncSchema,
|
||||
FlyioSyncSchema,
|
||||
GitLabSyncSchema,
|
||||
CloudflarePagesSyncSchema
|
||||
CloudflarePagesSyncSchema,
|
||||
ZabbixSyncSchema
|
||||
]);
|
||||
|
||||
const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
||||
@ -87,7 +89,8 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
||||
RenderSyncListItemSchema,
|
||||
FlyioSyncListItemSchema,
|
||||
GitLabSyncListItemSchema,
|
||||
CloudflarePagesSyncListItemSchema
|
||||
CloudflarePagesSyncListItemSchema,
|
||||
ZabbixSyncListItemSchema
|
||||
]);
|
||||
|
||||
export const registerSecretSyncRouter = async (server: FastifyZodProvider) => {
|
||||
|
@ -0,0 +1,13 @@
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
import { CreateZabbixSyncSchema, UpdateZabbixSyncSchema, ZabbixSyncSchema } from "@app/services/secret-sync/zabbix";
|
||||
|
||||
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
|
||||
|
||||
export const registerZabbixSyncRouter = async (server: FastifyZodProvider) =>
|
||||
registerSyncSecretsEndpoints({
|
||||
destination: SecretSync.Zabbix,
|
||||
server,
|
||||
responseSchema: ZabbixSyncSchema,
|
||||
createSchema: CreateZabbixSyncSchema,
|
||||
updateSchema: UpdateZabbixSyncSchema
|
||||
});
|
@ -9,6 +9,7 @@
|
||||
import { Authenticator } from "@fastify/passport";
|
||||
import fastifySession from "@fastify/session";
|
||||
import RedisStore from "connect-redis";
|
||||
import { CronJob } from "cron";
|
||||
import { Strategy as GitLabStrategy } from "passport-gitlab2";
|
||||
import { Strategy as GoogleStrategy } from "passport-google-oauth20";
|
||||
import { Strategy as OAuth2Strategy } from "passport-oauth2";
|
||||
@ -25,26 +26,13 @@ import { AuthMethod } from "@app/services/auth/auth-type";
|
||||
import { OrgAuthMethod } from "@app/services/org/org-types";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
|
||||
export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const passport = new Authenticator({ key: "sso", userProperty: "passportUser" });
|
||||
const redisStore = new RedisStore({
|
||||
client: server.redis,
|
||||
prefix: "oauth-session:",
|
||||
ttl: 600 // 10 minutes
|
||||
});
|
||||
|
||||
await server.register(fastifySession, {
|
||||
secret: appCfg.COOKIE_SECRET_SIGN_KEY,
|
||||
store: redisStore,
|
||||
cookie: {
|
||||
secure: appCfg.HTTPS_ENABLED,
|
||||
sameSite: "lax" // we want cookies to be sent to Infisical in redirects originating from IDP server
|
||||
}
|
||||
});
|
||||
await server.register(passport.initialize());
|
||||
await server.register(passport.secureSession());
|
||||
let serverInstance: FastifyZodProvider | null = null;
|
||||
|
||||
export const registerOauthMiddlewares = (server: FastifyZodProvider) => {
|
||||
serverInstance = server;
|
||||
const appCfg = getConfig();
|
||||
|
||||
// passport oauth strategy for Google
|
||||
const isGoogleOauthActive = Boolean(appCfg.CLIENT_ID_GOOGLE_LOGIN && appCfg.CLIENT_SECRET_GOOGLE_LOGIN);
|
||||
@ -176,6 +164,49 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
)
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export const refreshOauthConfig = () => {
|
||||
if (!serverInstance) {
|
||||
logger.warn("Cannot refresh OAuth config: server instance not available");
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info("Refreshing OAuth configuration...");
|
||||
registerOauthMiddlewares(serverInstance);
|
||||
};
|
||||
|
||||
export const initializeOauthConfigSync = async () => {
|
||||
logger.info("Setting up background sync process for oauth configuration");
|
||||
|
||||
// sync every 5 minutes
|
||||
const job = new CronJob("*/5 * * * *", refreshOauthConfig);
|
||||
job.start();
|
||||
|
||||
return job;
|
||||
};
|
||||
|
||||
export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const redisStore = new RedisStore({
|
||||
client: server.redis,
|
||||
prefix: "oauth-session:",
|
||||
ttl: 600 // 10 minutes
|
||||
});
|
||||
|
||||
await server.register(fastifySession, {
|
||||
secret: appCfg.COOKIE_SECRET_SIGN_KEY,
|
||||
store: redisStore,
|
||||
cookie: {
|
||||
secure: appCfg.HTTPS_ENABLED,
|
||||
sameSite: "lax" // we want cookies to be sent to Infisical in redirects originating from IDP server
|
||||
}
|
||||
});
|
||||
await server.register(passport.initialize());
|
||||
await server.register(passport.secureSession());
|
||||
|
||||
registerOauthMiddlewares(server);
|
||||
|
||||
server.route({
|
||||
url: "/redirect/google",
|
||||
|
@ -2,7 +2,7 @@ import picomatch from "picomatch";
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretApprovalRequestsSchema, SecretsSchema, SecretType, ServiceTokenScopes } from "@app/db/schemas";
|
||||
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { EventType, SecretApprovalEvent, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ApiDocsTags, RAW_SECRETS, SECRETS } from "@app/lib/api-docs";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
@ -594,6 +594,23 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
secretReminderRepeatDays: req.body.secretReminderRepeatDays
|
||||
});
|
||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.body.workspaceId,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||
metadata: {
|
||||
committedBy: secretOperation.approval.committerUserId,
|
||||
secretApprovalRequestId: secretOperation.approval.id,
|
||||
secretApprovalRequestSlug: secretOperation.approval.slug,
|
||||
secretPath: req.body.secretPath,
|
||||
environment: req.body.environment,
|
||||
secretKey: req.params.secretName,
|
||||
eventType: SecretApprovalEvent.Create
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { approval: secretOperation.approval };
|
||||
}
|
||||
|
||||
@ -730,6 +747,23 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.body.workspaceId,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||
metadata: {
|
||||
committedBy: secretOperation.approval.committerUserId,
|
||||
secretApprovalRequestId: secretOperation.approval.id,
|
||||
secretApprovalRequestSlug: secretOperation.approval.slug,
|
||||
secretPath: req.body.secretPath,
|
||||
environment: req.body.environment,
|
||||
secretKey: req.params.secretName,
|
||||
eventType: SecretApprovalEvent.Update
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { approval: secretOperation.approval };
|
||||
}
|
||||
const { secret } = secretOperation;
|
||||
@ -831,6 +865,23 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
type: req.body.type
|
||||
});
|
||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.body.workspaceId,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||
metadata: {
|
||||
committedBy: secretOperation.approval.committerUserId,
|
||||
secretApprovalRequestId: secretOperation.approval.id,
|
||||
secretApprovalRequestSlug: secretOperation.approval.slug,
|
||||
secretPath: req.body.secretPath,
|
||||
environment: req.body.environment,
|
||||
secretKey: req.params.secretName,
|
||||
eventType: SecretApprovalEvent.Delete
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { approval: secretOperation.approval };
|
||||
}
|
||||
|
||||
@ -1165,7 +1216,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
committedBy: approval.committerUserId,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
secretApprovalRequestSlug: approval.slug,
|
||||
secretPath,
|
||||
environment,
|
||||
eventType: SecretApprovalEvent.Create
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -1351,7 +1405,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
committedBy: approval.committerUserId,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
secretApprovalRequestSlug: approval.slug,
|
||||
secretPath,
|
||||
environment,
|
||||
secretKey: req.params.secretName,
|
||||
eventType: SecretApprovalEvent.Update
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -1489,7 +1547,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
committedBy: approval.committerUserId,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
secretApprovalRequestSlug: approval.slug,
|
||||
secretPath,
|
||||
environment,
|
||||
secretKey: req.params.secretName,
|
||||
eventType: SecretApprovalEvent.Delete
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -1673,7 +1735,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
committedBy: approval.committerUserId,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
secretApprovalRequestSlug: approval.slug,
|
||||
secretPath,
|
||||
environment,
|
||||
eventType: SecretApprovalEvent.CreateMany
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -1801,7 +1866,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
committedBy: approval.committerUserId,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
secretApprovalRequestSlug: approval.slug,
|
||||
secretPath,
|
||||
environment,
|
||||
eventType: SecretApprovalEvent.UpdateMany,
|
||||
secrets: inputSecrets.map((secret) => ({
|
||||
secretKey: secret.secretName
|
||||
}))
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -1920,7 +1991,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
committedBy: approval.committerUserId,
|
||||
secretApprovalRequestId: approval.id,
|
||||
secretApprovalRequestSlug: approval.slug
|
||||
secretApprovalRequestSlug: approval.slug,
|
||||
secretPath,
|
||||
environment,
|
||||
secrets: inputSecrets.map((secret) => ({
|
||||
secretKey: secret.secretName
|
||||
})),
|
||||
eventType: SecretApprovalEvent.DeleteMany
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -2038,6 +2115,24 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
secrets: inputSecrets
|
||||
});
|
||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.body.workspaceId,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||
metadata: {
|
||||
committedBy: secretOperation.approval.committerUserId,
|
||||
secretApprovalRequestId: secretOperation.approval.id,
|
||||
secretApprovalRequestSlug: secretOperation.approval.slug,
|
||||
secretPath,
|
||||
environment,
|
||||
secrets: inputSecrets.map((secret) => ({
|
||||
secretKey: secret.secretKey
|
||||
})),
|
||||
eventType: SecretApprovalEvent.CreateMany
|
||||
}
|
||||
}
|
||||
});
|
||||
return { approval: secretOperation.approval };
|
||||
}
|
||||
const { secrets } = secretOperation;
|
||||
@ -2170,6 +2265,25 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
mode: req.body.mode
|
||||
});
|
||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.body.workspaceId,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||
metadata: {
|
||||
committedBy: secretOperation.approval.committerUserId,
|
||||
secretApprovalRequestId: secretOperation.approval.id,
|
||||
secretApprovalRequestSlug: secretOperation.approval.slug,
|
||||
secretPath,
|
||||
environment,
|
||||
secrets: inputSecrets.map((secret) => ({
|
||||
secretKey: secret.secretKey,
|
||||
secretPath: secret.secretPath
|
||||
})),
|
||||
eventType: SecretApprovalEvent.UpdateMany
|
||||
}
|
||||
}
|
||||
});
|
||||
return { approval: secretOperation.approval };
|
||||
}
|
||||
const { secrets } = secretOperation;
|
||||
@ -2298,6 +2412,25 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
secrets: inputSecrets
|
||||
});
|
||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.body.workspaceId,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST,
|
||||
metadata: {
|
||||
committedBy: secretOperation.approval.committerUserId,
|
||||
secretApprovalRequestId: secretOperation.approval.id,
|
||||
secretApprovalRequestSlug: secretOperation.approval.slug,
|
||||
secretPath,
|
||||
environment,
|
||||
secrets: inputSecrets.map((secret) => ({
|
||||
secretKey: secret.secretKey
|
||||
})),
|
||||
eventType: SecretApprovalEvent.DeleteMany
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { approval: secretOperation.approval };
|
||||
}
|
||||
const { secrets } = secretOperation;
|
||||
|
@ -27,7 +27,9 @@ export enum AppConnection {
|
||||
Render = "render",
|
||||
Flyio = "flyio",
|
||||
GitLab = "gitlab",
|
||||
Cloudflare = "cloudflare"
|
||||
Cloudflare = "cloudflare",
|
||||
Bitbucket = "bitbucket",
|
||||
Zabbix = "zabbix"
|
||||
}
|
||||
|
||||
export enum AWSRegion {
|
||||
|
@ -50,6 +50,11 @@ import {
|
||||
getAzureKeyVaultConnectionListItem,
|
||||
validateAzureKeyVaultConnectionCredentials
|
||||
} from "./azure-key-vault";
|
||||
import {
|
||||
BitbucketConnectionMethod,
|
||||
getBitbucketConnectionListItem,
|
||||
validateBitbucketConnectionCredentials
|
||||
} from "./bitbucket";
|
||||
import { CamundaConnectionMethod, getCamundaConnectionListItem, validateCamundaConnectionCredentials } from "./camunda";
|
||||
import { CloudflareConnectionMethod } from "./cloudflare/cloudflare-connection-enum";
|
||||
import {
|
||||
@ -105,6 +110,7 @@ import {
|
||||
validateWindmillConnectionCredentials,
|
||||
WindmillConnectionMethod
|
||||
} from "./windmill";
|
||||
import { getZabbixConnectionListItem, validateZabbixConnectionCredentials, ZabbixConnectionMethod } from "./zabbix";
|
||||
|
||||
export const listAppConnectionOptions = () => {
|
||||
return [
|
||||
@ -136,7 +142,9 @@ export const listAppConnectionOptions = () => {
|
||||
getRenderConnectionListItem(),
|
||||
getFlyioConnectionListItem(),
|
||||
getGitLabConnectionListItem(),
|
||||
getCloudflareConnectionListItem()
|
||||
getCloudflareConnectionListItem(),
|
||||
getBitbucketConnectionListItem(),
|
||||
getZabbixConnectionListItem()
|
||||
].sort((a, b) => a.name.localeCompare(b.name));
|
||||
};
|
||||
|
||||
@ -216,7 +224,9 @@ export const validateAppConnectionCredentials = async (
|
||||
[AppConnection.Render]: validateRenderConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Flyio]: validateFlyioConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.GitLab]: validateGitLabConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Cloudflare]: validateCloudflareConnectionCredentials as TAppConnectionCredentialsValidator
|
||||
[AppConnection.Cloudflare]: validateCloudflareConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Bitbucket]: validateBitbucketConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Zabbix]: validateZabbixConnectionCredentials as TAppConnectionCredentialsValidator
|
||||
};
|
||||
|
||||
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection);
|
||||
@ -253,6 +263,8 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
|
||||
case VercelConnectionMethod.ApiToken:
|
||||
case OnePassConnectionMethod.ApiToken:
|
||||
case CloudflareConnectionMethod.APIToken:
|
||||
case BitbucketConnectionMethod.ApiToken:
|
||||
case ZabbixConnectionMethod.ApiToken:
|
||||
return "API Token";
|
||||
case PostgresConnectionMethod.UsernameAndPassword:
|
||||
case MsSqlConnectionMethod.UsernameAndPassword:
|
||||
@ -332,7 +344,9 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
|
||||
[AppConnection.Render]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Flyio]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.GitLab]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Cloudflare]: platformManagedCredentialsNotSupported
|
||||
[AppConnection.Cloudflare]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Bitbucket]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Zabbix]: platformManagedCredentialsNotSupported
|
||||
};
|
||||
|
||||
export const enterpriseAppCheck = async (
|
||||
|
@ -29,7 +29,9 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
[AppConnection.Render]: "Render",
|
||||
[AppConnection.Flyio]: "Fly.io",
|
||||
[AppConnection.GitLab]: "GitLab",
|
||||
[AppConnection.Cloudflare]: "Cloudflare"
|
||||
[AppConnection.Cloudflare]: "Cloudflare",
|
||||
[AppConnection.Bitbucket]: "Bitbucket",
|
||||
[AppConnection.Zabbix]: "Zabbix"
|
||||
};
|
||||
|
||||
export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanType> = {
|
||||
@ -61,5 +63,7 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
|
||||
[AppConnection.Render]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Flyio]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.GitLab]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Cloudflare]: AppConnectionPlanType.Regular
|
||||
[AppConnection.Cloudflare]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Bitbucket]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Zabbix]: AppConnectionPlanType.Regular
|
||||
};
|
||||
|
@ -45,6 +45,8 @@ import { azureClientSecretsConnectionService } from "./azure-client-secrets/azur
|
||||
import { ValidateAzureDevOpsConnectionCredentialsSchema } from "./azure-devops/azure-devops-schemas";
|
||||
import { azureDevOpsConnectionService } from "./azure-devops/azure-devops-service";
|
||||
import { ValidateAzureKeyVaultConnectionCredentialsSchema } from "./azure-key-vault";
|
||||
import { ValidateBitbucketConnectionCredentialsSchema } from "./bitbucket";
|
||||
import { bitbucketConnectionService } from "./bitbucket/bitbucket-connection-service";
|
||||
import { ValidateCamundaConnectionCredentialsSchema } from "./camunda";
|
||||
import { camundaConnectionService } from "./camunda/camunda-connection-service";
|
||||
import { ValidateCloudflareConnectionCredentialsSchema } from "./cloudflare/cloudflare-connection-schema";
|
||||
@ -80,6 +82,8 @@ import { ValidateVercelConnectionCredentialsSchema } from "./vercel";
|
||||
import { vercelConnectionService } from "./vercel/vercel-connection-service";
|
||||
import { ValidateWindmillConnectionCredentialsSchema } from "./windmill";
|
||||
import { windmillConnectionService } from "./windmill/windmill-connection-service";
|
||||
import { ValidateZabbixConnectionCredentialsSchema } from "./zabbix";
|
||||
import { zabbixConnectionService } from "./zabbix/zabbix-connection-service";
|
||||
|
||||
export type TAppConnectionServiceFactoryDep = {
|
||||
appConnectionDAL: TAppConnectionDALFactory;
|
||||
@ -119,7 +123,9 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
|
||||
[AppConnection.Render]: ValidateRenderConnectionCredentialsSchema,
|
||||
[AppConnection.Flyio]: ValidateFlyioConnectionCredentialsSchema,
|
||||
[AppConnection.GitLab]: ValidateGitLabConnectionCredentialsSchema,
|
||||
[AppConnection.Cloudflare]: ValidateCloudflareConnectionCredentialsSchema
|
||||
[AppConnection.Cloudflare]: ValidateCloudflareConnectionCredentialsSchema,
|
||||
[AppConnection.Bitbucket]: ValidateBitbucketConnectionCredentialsSchema,
|
||||
[AppConnection.Zabbix]: ValidateZabbixConnectionCredentialsSchema
|
||||
};
|
||||
|
||||
export const appConnectionServiceFactory = ({
|
||||
@ -529,6 +535,8 @@ export const appConnectionServiceFactory = ({
|
||||
render: renderConnectionService(connectAppConnectionById),
|
||||
flyio: flyioConnectionService(connectAppConnectionById),
|
||||
gitlab: gitlabConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
cloudflare: cloudflareConnectionService(connectAppConnectionById)
|
||||
cloudflare: cloudflareConnectionService(connectAppConnectionById),
|
||||
bitbucket: bitbucketConnectionService(connectAppConnectionById),
|
||||
zabbix: zabbixConnectionService(connectAppConnectionById)
|
||||
};
|
||||
};
|
||||
|
@ -56,6 +56,12 @@ import {
|
||||
TAzureKeyVaultConnectionInput,
|
||||
TValidateAzureKeyVaultConnectionCredentialsSchema
|
||||
} from "./azure-key-vault";
|
||||
import {
|
||||
TBitbucketConnection,
|
||||
TBitbucketConnectionConfig,
|
||||
TBitbucketConnectionInput,
|
||||
TValidateBitbucketConnectionCredentialsSchema
|
||||
} from "./bitbucket";
|
||||
import {
|
||||
TCamundaConnection,
|
||||
TCamundaConnectionConfig,
|
||||
@ -165,6 +171,12 @@ import {
|
||||
TWindmillConnectionConfig,
|
||||
TWindmillConnectionInput
|
||||
} from "./windmill";
|
||||
import {
|
||||
TValidateZabbixConnectionCredentialsSchema,
|
||||
TZabbixConnection,
|
||||
TZabbixConnectionConfig,
|
||||
TZabbixConnectionInput
|
||||
} from "./zabbix";
|
||||
|
||||
export type TAppConnection = { id: string } & (
|
||||
| TAwsConnection
|
||||
@ -196,6 +208,8 @@ export type TAppConnection = { id: string } & (
|
||||
| TFlyioConnection
|
||||
| TGitLabConnection
|
||||
| TCloudflareConnection
|
||||
| TBitbucketConnection
|
||||
| TZabbixConnection
|
||||
);
|
||||
|
||||
export type TAppConnectionRaw = NonNullable<Awaited<ReturnType<TAppConnectionDALFactory["findById"]>>>;
|
||||
@ -232,6 +246,8 @@ export type TAppConnectionInput = { id: string } & (
|
||||
| TFlyioConnectionInput
|
||||
| TGitLabConnectionInput
|
||||
| TCloudflareConnectionInput
|
||||
| TBitbucketConnectionInput
|
||||
| TZabbixConnectionInput
|
||||
);
|
||||
|
||||
export type TSqlConnectionInput =
|
||||
@ -275,7 +291,9 @@ export type TAppConnectionConfig =
|
||||
| TRenderConnectionConfig
|
||||
| TFlyioConnectionConfig
|
||||
| TGitLabConnectionConfig
|
||||
| TCloudflareConnectionConfig;
|
||||
| TCloudflareConnectionConfig
|
||||
| TBitbucketConnectionConfig
|
||||
| TZabbixConnectionConfig;
|
||||
|
||||
export type TValidateAppConnectionCredentialsSchema =
|
||||
| TValidateAwsConnectionCredentialsSchema
|
||||
@ -306,7 +324,9 @@ export type TValidateAppConnectionCredentialsSchema =
|
||||
| TValidateRenderConnectionCredentialsSchema
|
||||
| TValidateFlyioConnectionCredentialsSchema
|
||||
| TValidateGitLabConnectionCredentialsSchema
|
||||
| TValidateCloudflareConnectionCredentialsSchema;
|
||||
| TValidateCloudflareConnectionCredentialsSchema
|
||||
| TValidateBitbucketConnectionCredentialsSchema
|
||||
| TValidateZabbixConnectionCredentialsSchema;
|
||||
|
||||
export type TListAwsConnectionKmsKeys = {
|
||||
connectionId: string;
|
||||
|
@ -0,0 +1,3 @@
|
||||
export enum BitbucketConnectionMethod {
|
||||
ApiToken = "api-token"
|
||||
}
|
@ -0,0 +1,117 @@
|
||||
import { AxiosError } from "axios";
|
||||
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
|
||||
import { BitbucketConnectionMethod } from "./bitbucket-connection-enums";
|
||||
import {
|
||||
TBitbucketConnection,
|
||||
TBitbucketConnectionConfig,
|
||||
TBitbucketRepo,
|
||||
TBitbucketWorkspace
|
||||
} from "./bitbucket-connection-types";
|
||||
|
||||
export const getBitbucketConnectionListItem = () => {
|
||||
return {
|
||||
name: "Bitbucket" as const,
|
||||
app: AppConnection.Bitbucket as const,
|
||||
methods: Object.values(BitbucketConnectionMethod) as [BitbucketConnectionMethod.ApiToken]
|
||||
};
|
||||
};
|
||||
|
||||
export const getBitbucketUser = async ({ email, apiToken }: { email: string; apiToken: string }) => {
|
||||
try {
|
||||
const { data } = await request.get<{ username: string }>(`${IntegrationUrls.BITBUCKET_API_URL}/2.0/user`, {
|
||||
headers: {
|
||||
Authorization: `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`,
|
||||
Accept: "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
return data;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
|
||||
});
|
||||
}
|
||||
throw new BadRequestError({
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const validateBitbucketConnectionCredentials = async (config: TBitbucketConnectionConfig) => {
|
||||
await getBitbucketUser(config.credentials);
|
||||
return config.credentials;
|
||||
};
|
||||
|
||||
interface BitbucketWorkspacesResponse {
|
||||
values: TBitbucketWorkspace[];
|
||||
next?: string;
|
||||
}
|
||||
|
||||
export const listBitbucketWorkspaces = async (appConnection: TBitbucketConnection) => {
|
||||
const { email, apiToken } = appConnection.credentials;
|
||||
|
||||
const headers = {
|
||||
Authorization: `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`,
|
||||
Accept: "application/json"
|
||||
};
|
||||
|
||||
let allWorkspaces: TBitbucketWorkspace[] = [];
|
||||
let nextUrl: string | undefined = `${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces?pagelen=100`;
|
||||
let iterationCount = 0;
|
||||
|
||||
// Limit to 10 iterations, fetching at most 10 * 100 = 1000 workspaces
|
||||
while (nextUrl && iterationCount < 10) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const { data }: { data: BitbucketWorkspacesResponse } = await request.get<BitbucketWorkspacesResponse>(nextUrl, {
|
||||
headers
|
||||
});
|
||||
|
||||
allWorkspaces = allWorkspaces.concat(data.values.map((workspace) => ({ slug: workspace.slug })));
|
||||
nextUrl = data.next;
|
||||
iterationCount += 1;
|
||||
}
|
||||
|
||||
return allWorkspaces;
|
||||
};
|
||||
|
||||
interface BitbucketRepositoriesResponse {
|
||||
values: TBitbucketRepo[];
|
||||
next?: string;
|
||||
}
|
||||
|
||||
export const listBitbucketRepositories = async (appConnection: TBitbucketConnection, workspaceSlug: string) => {
|
||||
const { email, apiToken } = appConnection.credentials;
|
||||
|
||||
const headers = {
|
||||
Authorization: `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`,
|
||||
Accept: "application/json"
|
||||
};
|
||||
|
||||
let allRepos: TBitbucketRepo[] = [];
|
||||
let nextUrl: string | undefined =
|
||||
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${encodeURIComponent(workspaceSlug)}?pagelen=100`;
|
||||
let iterationCount = 0;
|
||||
|
||||
// Limit to 10 iterations, fetching at most 10 * 100 = 1000 repositories
|
||||
while (nextUrl && iterationCount < 10) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const { data }: { data: BitbucketRepositoriesResponse } = await request.get<BitbucketRepositoriesResponse>(
|
||||
nextUrl,
|
||||
{
|
||||
headers
|
||||
}
|
||||
);
|
||||
|
||||
allRepos = allRepos.concat(data.values);
|
||||
nextUrl = data.next;
|
||||
iterationCount += 1;
|
||||
}
|
||||
|
||||
return allRepos;
|
||||
};
|
@ -0,0 +1,72 @@
|
||||
import z from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { BitbucketConnectionMethod } from "./bitbucket-connection-enums";
|
||||
|
||||
export const BitbucketConnectionAccessTokenCredentialsSchema = z.object({
|
||||
apiToken: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "API Token required")
|
||||
.max(255)
|
||||
.describe(AppConnections.CREDENTIALS.BITBUCKET.apiToken),
|
||||
email: z
|
||||
.string()
|
||||
.email()
|
||||
.trim()
|
||||
.min(1, "Email required")
|
||||
.max(255)
|
||||
.describe(AppConnections.CREDENTIALS.BITBUCKET.email)
|
||||
});
|
||||
|
||||
const BaseBitbucketConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.Bitbucket) });
|
||||
|
||||
export const BitbucketConnectionSchema = BaseBitbucketConnectionSchema.extend({
|
||||
method: z.literal(BitbucketConnectionMethod.ApiToken),
|
||||
credentials: BitbucketConnectionAccessTokenCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedBitbucketConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseBitbucketConnectionSchema.extend({
|
||||
method: z.literal(BitbucketConnectionMethod.ApiToken),
|
||||
credentials: BitbucketConnectionAccessTokenCredentialsSchema.pick({
|
||||
email: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
export const ValidateBitbucketConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
.literal(BitbucketConnectionMethod.ApiToken)
|
||||
.describe(AppConnections.CREATE(AppConnection.Bitbucket).method),
|
||||
credentials: BitbucketConnectionAccessTokenCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.Bitbucket).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateBitbucketConnectionSchema = ValidateBitbucketConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.Bitbucket)
|
||||
);
|
||||
|
||||
export const UpdateBitbucketConnectionSchema = z
|
||||
.object({
|
||||
credentials: BitbucketConnectionAccessTokenCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.Bitbucket).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Bitbucket));
|
||||
|
||||
export const BitbucketConnectionListItemSchema = z.object({
|
||||
name: z.literal("Bitbucket"),
|
||||
app: z.literal(AppConnection.Bitbucket),
|
||||
methods: z.nativeEnum(BitbucketConnectionMethod).array()
|
||||
});
|
@ -0,0 +1,33 @@
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import { listBitbucketRepositories, listBitbucketWorkspaces } from "./bitbucket-connection-fns";
|
||||
import { TBitbucketConnection, TGetBitbucketRepositoriesDTO } from "./bitbucket-connection-types";
|
||||
|
||||
type TGetAppConnectionFunc = (
|
||||
app: AppConnection,
|
||||
connectionId: string,
|
||||
actor: OrgServiceActor
|
||||
) => Promise<TBitbucketConnection>;
|
||||
|
||||
export const bitbucketConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||
const listWorkspaces = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.Bitbucket, connectionId, actor);
|
||||
const workspaces = await listBitbucketWorkspaces(appConnection);
|
||||
return workspaces;
|
||||
};
|
||||
|
||||
const listRepositories = async (
|
||||
{ connectionId, workspaceSlug }: TGetBitbucketRepositoriesDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const appConnection = await getAppConnection(AppConnection.Bitbucket, connectionId, actor);
|
||||
const repositories = await listBitbucketRepositories(appConnection, workspaceSlug);
|
||||
return repositories;
|
||||
};
|
||||
|
||||
return {
|
||||
listWorkspaces,
|
||||
listRepositories
|
||||
};
|
||||
};
|
@ -0,0 +1,40 @@
|
||||
import z from "zod";
|
||||
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
BitbucketConnectionSchema,
|
||||
CreateBitbucketConnectionSchema,
|
||||
ValidateBitbucketConnectionCredentialsSchema
|
||||
} from "./bitbucket-connection-schemas";
|
||||
|
||||
export type TBitbucketConnection = z.infer<typeof BitbucketConnectionSchema>;
|
||||
|
||||
export type TBitbucketConnectionInput = z.infer<typeof CreateBitbucketConnectionSchema> & {
|
||||
app: AppConnection.Bitbucket;
|
||||
};
|
||||
|
||||
export type TValidateBitbucketConnectionCredentialsSchema = typeof ValidateBitbucketConnectionCredentialsSchema;
|
||||
|
||||
export type TBitbucketConnectionConfig = DiscriminativePick<
|
||||
TBitbucketConnectionInput,
|
||||
"method" | "app" | "credentials"
|
||||
> & {
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TGetBitbucketRepositoriesDTO = {
|
||||
connectionId: string;
|
||||
workspaceSlug: string;
|
||||
};
|
||||
|
||||
export type TBitbucketWorkspace = {
|
||||
slug: string;
|
||||
};
|
||||
|
||||
export type TBitbucketRepo = {
|
||||
uuid: string;
|
||||
full_name: string; // workspace-slug/repo-slug
|
||||
slug: string;
|
||||
};
|
4
backend/src/services/app-connection/bitbucket/index.ts
Normal file
4
backend/src/services/app-connection/bitbucket/index.ts
Normal file
@ -0,0 +1,4 @@
|
||||
export * from "./bitbucket-connection-enums";
|
||||
export * from "./bitbucket-connection-fns";
|
||||
export * from "./bitbucket-connection-schemas";
|
||||
export * from "./bitbucket-connection-types";
|
4
backend/src/services/app-connection/zabbix/index.ts
Normal file
4
backend/src/services/app-connection/zabbix/index.ts
Normal file
@ -0,0 +1,4 @@
|
||||
export * from "./zabbix-connection-enums";
|
||||
export * from "./zabbix-connection-fns";
|
||||
export * from "./zabbix-connection-schemas";
|
||||
export * from "./zabbix-connection-types";
|
@ -0,0 +1,3 @@
|
||||
export enum ZabbixConnectionMethod {
|
||||
ApiToken = "api-token"
|
||||
}
|
@ -0,0 +1,108 @@
|
||||
import { AxiosError } from "axios";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
import { ZabbixConnectionMethod } from "./zabbix-connection-enums";
|
||||
import {
|
||||
TZabbixConnection,
|
||||
TZabbixConnectionConfig,
|
||||
TZabbixHost,
|
||||
TZabbixHostListResponse
|
||||
} from "./zabbix-connection-types";
|
||||
|
||||
const TRAILING_SLASH_REGEX = new RE2("/+$");
|
||||
|
||||
export const getZabbixConnectionListItem = () => {
|
||||
return {
|
||||
name: "Zabbix" as const,
|
||||
app: AppConnection.Zabbix as const,
|
||||
methods: Object.values(ZabbixConnectionMethod) as [ZabbixConnectionMethod.ApiToken]
|
||||
};
|
||||
};
|
||||
|
||||
export const validateZabbixConnectionCredentials = async (config: TZabbixConnectionConfig) => {
|
||||
const { apiToken, instanceUrl } = config.credentials;
|
||||
await blockLocalAndPrivateIpAddresses(instanceUrl);
|
||||
|
||||
try {
|
||||
const apiUrl = `${instanceUrl.replace(TRAILING_SLASH_REGEX, "")}/api_jsonrpc.php`;
|
||||
|
||||
const payload = {
|
||||
jsonrpc: "2.0",
|
||||
method: "authentication.get",
|
||||
params: {
|
||||
output: "extend"
|
||||
},
|
||||
id: 1
|
||||
};
|
||||
|
||||
const response: { data: { error?: { message: string }; result?: string } } = await request.post(apiUrl, payload, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${apiToken}`
|
||||
}
|
||||
});
|
||||
|
||||
if (response.data.error) {
|
||||
throw new BadRequestError({
|
||||
message: response.data.error.message
|
||||
});
|
||||
}
|
||||
|
||||
return config.credentials;
|
||||
} catch (error) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect to Zabbix instance: ${error.message}`
|
||||
});
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const listZabbixHosts = async (appConnection: TZabbixConnection): Promise<TZabbixHost[]> => {
|
||||
const { apiToken, instanceUrl } = appConnection.credentials;
|
||||
await blockLocalAndPrivateIpAddresses(instanceUrl);
|
||||
|
||||
try {
|
||||
const apiUrl = `${instanceUrl.replace(TRAILING_SLASH_REGEX, "")}/api_jsonrpc.php`;
|
||||
|
||||
const payload = {
|
||||
jsonrpc: "2.0",
|
||||
method: "host.get",
|
||||
params: {
|
||||
output: ["hostid", "host"],
|
||||
sortfield: "host",
|
||||
sortorder: "ASC"
|
||||
},
|
||||
id: 1
|
||||
};
|
||||
|
||||
const response: { data: TZabbixHostListResponse } = await request.post(apiUrl, payload, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${apiToken}`
|
||||
}
|
||||
});
|
||||
|
||||
return response.data.result
|
||||
? response.data.result.map((host) => ({
|
||||
hostId: host.hostid,
|
||||
host: host.host
|
||||
}))
|
||||
: [];
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
|
||||
});
|
||||
}
|
||||
throw new BadRequestError({
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
@ -0,0 +1,62 @@
|
||||
import z from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { ZabbixConnectionMethod } from "./zabbix-connection-enums";
|
||||
|
||||
export const ZabbixConnectionApiTokenCredentialsSchema = z.object({
|
||||
apiToken: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "API Token required")
|
||||
.max(1000)
|
||||
.describe(AppConnections.CREDENTIALS.ZABBIX.apiToken),
|
||||
instanceUrl: z.string().trim().url("Invalid Instance URL").describe(AppConnections.CREDENTIALS.ZABBIX.instanceUrl)
|
||||
});
|
||||
|
||||
const BaseZabbixConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.Zabbix) });
|
||||
|
||||
export const ZabbixConnectionSchema = BaseZabbixConnectionSchema.extend({
|
||||
method: z.literal(ZabbixConnectionMethod.ApiToken),
|
||||
credentials: ZabbixConnectionApiTokenCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedZabbixConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseZabbixConnectionSchema.extend({
|
||||
method: z.literal(ZabbixConnectionMethod.ApiToken),
|
||||
credentials: ZabbixConnectionApiTokenCredentialsSchema.pick({ instanceUrl: true })
|
||||
})
|
||||
]);
|
||||
|
||||
export const ValidateZabbixConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z.literal(ZabbixConnectionMethod.ApiToken).describe(AppConnections.CREATE(AppConnection.Zabbix).method),
|
||||
credentials: ZabbixConnectionApiTokenCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.Zabbix).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateZabbixConnectionSchema = ValidateZabbixConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.Zabbix)
|
||||
);
|
||||
|
||||
export const UpdateZabbixConnectionSchema = z
|
||||
.object({
|
||||
credentials: ZabbixConnectionApiTokenCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.Zabbix).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Zabbix));
|
||||
|
||||
export const ZabbixConnectionListItemSchema = z.object({
|
||||
name: z.literal("Zabbix"),
|
||||
app: z.literal(AppConnection.Zabbix),
|
||||
methods: z.nativeEnum(ZabbixConnectionMethod).array()
|
||||
});
|
@ -0,0 +1,30 @@
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import { listZabbixHosts } from "./zabbix-connection-fns";
|
||||
import { TZabbixConnection } from "./zabbix-connection-types";
|
||||
|
||||
type TGetAppConnectionFunc = (
|
||||
app: AppConnection,
|
||||
connectionId: string,
|
||||
actor: OrgServiceActor
|
||||
) => Promise<TZabbixConnection>;
|
||||
|
||||
export const zabbixConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||
const listHosts = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.Zabbix, connectionId, actor);
|
||||
|
||||
try {
|
||||
const hosts = await listZabbixHosts(appConnection);
|
||||
return hosts;
|
||||
} catch (error) {
|
||||
logger.error(error, "Failed to establish connection with zabbix");
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
listHosts
|
||||
};
|
||||
};
|
@ -0,0 +1,33 @@
|
||||
import z from "zod";
|
||||
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
CreateZabbixConnectionSchema,
|
||||
ValidateZabbixConnectionCredentialsSchema,
|
||||
ZabbixConnectionSchema
|
||||
} from "./zabbix-connection-schemas";
|
||||
|
||||
export type TZabbixConnection = z.infer<typeof ZabbixConnectionSchema>;
|
||||
|
||||
export type TZabbixConnectionInput = z.infer<typeof CreateZabbixConnectionSchema> & {
|
||||
app: AppConnection.Zabbix;
|
||||
};
|
||||
|
||||
export type TValidateZabbixConnectionCredentialsSchema = typeof ValidateZabbixConnectionCredentialsSchema;
|
||||
|
||||
export type TZabbixConnectionConfig = DiscriminativePick<TZabbixConnectionInput, "method" | "app" | "credentials"> & {
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TZabbixHost = {
|
||||
hostId: string;
|
||||
host: string;
|
||||
};
|
||||
|
||||
export type TZabbixHostListResponse = {
|
||||
jsonrpc: string;
|
||||
result: { hostid: string; host: string }[];
|
||||
error?: { message: string };
|
||||
};
|
@ -93,6 +93,7 @@ export const identityProjectServiceFactory = ({
|
||||
projectId
|
||||
);
|
||||
|
||||
if (requestedRoleChange !== ProjectMembershipRole.NoAccess) {
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
ProjectPermissionIdentityActions.GrantPrivileges,
|
||||
@ -111,6 +112,7 @@ export const identityProjectServiceFactory = ({
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// validate custom roles input
|
||||
const customInputRoles = roles.filter(
|
||||
|
@ -69,6 +69,7 @@ export const identityServiceFactory = ({
|
||||
orgId
|
||||
);
|
||||
const isCustomRole = Boolean(customRole);
|
||||
if (role !== OrgMembershipRole.NoAccess) {
|
||||
const permissionBoundary = validatePrivilegeChangeOperation(
|
||||
membership.shouldUseNewPrivilegeSystem,
|
||||
OrgPermissionIdentityActions.GrantPrivileges,
|
||||
@ -86,6 +87,7 @@ export const identityServiceFactory = ({
|
||||
),
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
|
||||
@ -187,6 +189,7 @@ export const identityServiceFactory = ({
|
||||
),
|
||||
details: { missingPermissions: appliedRolePermissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
if (isCustomRole) customRole = customOrgRole;
|
||||
}
|
||||
|
||||
|
@ -814,9 +814,9 @@ const getAppsCloudflareWorkers = async ({ accessToken, accountId }: { accessToke
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of repositories for the BitBucket integration based on provided BitBucket workspace
|
||||
* Return list of repositories for the Bitbucket integration based on provided Bitbucket workspace
|
||||
*/
|
||||
const getAppsBitBucket = async ({ accessToken, workspaceSlug }: { accessToken: string; workspaceSlug?: string }) => {
|
||||
const getAppsBitbucket = async ({ accessToken, workspaceSlug }: { accessToken: string; workspaceSlug?: string }) => {
|
||||
interface RepositoriesResponse {
|
||||
size: number;
|
||||
page: number;
|
||||
@ -1302,7 +1302,7 @@ export const getApps = async ({
|
||||
});
|
||||
|
||||
case Integrations.BITBUCKET:
|
||||
return getAppsBitBucket({
|
||||
return getAppsBitbucket({
|
||||
accessToken,
|
||||
workspaceSlug
|
||||
});
|
||||
|
@ -342,7 +342,7 @@ export const getIntegrationOptions = async () => {
|
||||
{
|
||||
name: "Bitbucket",
|
||||
slug: "bitbucket",
|
||||
image: "BitBucket.png",
|
||||
image: "Bitbucket.png",
|
||||
isAvailable: true,
|
||||
type: "oauth",
|
||||
clientId: appCfg.CLIENT_ID_BITBUCKET,
|
||||
|
@ -3921,9 +3921,9 @@ const syncSecretsCloudflareWorkers = async ({
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to BitBucket repo with name [integration.app]
|
||||
* Sync/push [secrets] to Bitbucket repo with name [integration.app]
|
||||
*/
|
||||
const syncSecretsBitBucket = async ({
|
||||
const syncSecretsBitbucket = async ({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
@ -4832,7 +4832,7 @@ export const syncIntegrationSecrets = async ({
|
||||
});
|
||||
break;
|
||||
case Integrations.BITBUCKET:
|
||||
await syncSecretsBitBucket({
|
||||
await syncSecretsBitbucket({
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
|
@ -64,7 +64,7 @@ type ExchangeCodeGitlabResponse = {
|
||||
created_at: number;
|
||||
};
|
||||
|
||||
type ExchangeCodeBitBucketResponse = {
|
||||
type ExchangeCodeBitbucketResponse = {
|
||||
access_token: string;
|
||||
token_type: string;
|
||||
expires_in: number;
|
||||
@ -392,10 +392,10 @@ const exchangeCodeGitlab = async ({ code, url }: { code: string; url?: string })
|
||||
};
|
||||
|
||||
/**
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for BitBucket
|
||||
* Return [accessToken], [accessExpiresAt], and [refreshToken] for Bitbucket
|
||||
* code-token exchange
|
||||
*/
|
||||
const exchangeCodeBitBucket = async ({ code }: { code: string }) => {
|
||||
const exchangeCodeBitbucket = async ({ code }: { code: string }) => {
|
||||
const accessExpiresAt = new Date();
|
||||
const appCfg = getConfig();
|
||||
if (!appCfg.CLIENT_SECRET_BITBUCKET || !appCfg.CLIENT_ID_BITBUCKET) {
|
||||
@ -403,7 +403,7 @@ const exchangeCodeBitBucket = async ({ code }: { code: string }) => {
|
||||
}
|
||||
|
||||
const res = (
|
||||
await request.post<ExchangeCodeBitBucketResponse>(
|
||||
await request.post<ExchangeCodeBitbucketResponse>(
|
||||
IntegrationUrls.BITBUCKET_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
@ -490,7 +490,7 @@ export const exchangeCode = async ({
|
||||
url
|
||||
});
|
||||
case Integrations.BITBUCKET:
|
||||
return exchangeCodeBitBucket({
|
||||
return exchangeCodeBitbucket({
|
||||
code
|
||||
});
|
||||
default:
|
||||
@ -524,7 +524,7 @@ type RefreshTokenGitLabResponse = {
|
||||
created_at: number;
|
||||
};
|
||||
|
||||
type RefreshTokenBitBucketResponse = {
|
||||
type RefreshTokenBitbucketResponse = {
|
||||
access_token: string;
|
||||
token_type: string;
|
||||
expires_in: number;
|
||||
@ -653,9 +653,9 @@ const exchangeRefreshGitLab = async ({ refreshToken, url }: { url?: string | nul
|
||||
|
||||
/**
|
||||
* Return new access token by exchanging refresh token [refreshToken] for the
|
||||
* BitBucket integration
|
||||
* Bitbucket integration
|
||||
*/
|
||||
const exchangeRefreshBitBucket = async ({ refreshToken }: { refreshToken: string }) => {
|
||||
const exchangeRefreshBitbucket = async ({ refreshToken }: { refreshToken: string }) => {
|
||||
const accessExpiresAt = new Date();
|
||||
const appCfg = getConfig();
|
||||
if (!appCfg.CLIENT_SECRET_BITBUCKET || !appCfg.CLIENT_ID_BITBUCKET) {
|
||||
@ -664,7 +664,7 @@ const exchangeRefreshBitBucket = async ({ refreshToken }: { refreshToken: string
|
||||
const {
|
||||
data
|
||||
}: {
|
||||
data: RefreshTokenBitBucketResponse;
|
||||
data: RefreshTokenBitbucketResponse;
|
||||
} = await request.post(
|
||||
IntegrationUrls.BITBUCKET_TOKEN_URL,
|
||||
new URLSearchParams({
|
||||
@ -794,7 +794,7 @@ export const exchangeRefresh = async (
|
||||
url
|
||||
});
|
||||
case Integrations.BITBUCKET:
|
||||
return exchangeRefreshBitBucket({
|
||||
return exchangeRefreshBitbucket({
|
||||
refreshToken
|
||||
});
|
||||
case Integrations.GCP_SECRET_MANAGER:
|
||||
|
@ -122,8 +122,8 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
|
||||
.orWhere((qb) => {
|
||||
// lastInvitedAt is older than 1 week ago AND createdAt is younger than 1 month ago
|
||||
void qb
|
||||
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneMonthAgo)
|
||||
.where(`${TableName.OrgMembership}.createdAt`, ">", oneWeekAgo);
|
||||
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneWeekAgo)
|
||||
.where(`${TableName.OrgMembership}.createdAt`, ">", oneMonthAgo);
|
||||
});
|
||||
|
||||
return memberships;
|
||||
@ -135,9 +135,22 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const updateLastInvitedAtByIds = async (membershipIds: string[]) => {
|
||||
try {
|
||||
if (membershipIds.length === 0) return;
|
||||
await db(TableName.OrgMembership).whereIn("id", membershipIds).update({ lastInvitedAt: new Date() });
|
||||
} catch (error) {
|
||||
throw new DatabaseError({
|
||||
error,
|
||||
name: "Update last invited at by ids"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...orgMembershipOrm,
|
||||
findOrgMembershipById,
|
||||
findRecentInvitedMemberships
|
||||
findRecentInvitedMemberships,
|
||||
updateLastInvitedAtByIds
|
||||
};
|
||||
};
|
||||
|
@ -109,7 +109,12 @@ type TOrgServiceFactoryDep = {
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "insertMany" | "findLatestProjectKey" | "create">;
|
||||
orgMembershipDAL: Pick<
|
||||
TOrgMembershipDALFactory,
|
||||
"findOrgMembershipById" | "findOne" | "findById" | "findRecentInvitedMemberships" | "updateById"
|
||||
| "findOrgMembershipById"
|
||||
| "findOne"
|
||||
| "findById"
|
||||
| "findRecentInvitedMemberships"
|
||||
| "updateById"
|
||||
| "updateLastInvitedAtByIds"
|
||||
>;
|
||||
incidentContactDAL: TIncidentContactsDALFactory;
|
||||
samlConfigDAL: Pick<TSamlConfigDALFactory, "findOne">;
|
||||
@ -763,6 +768,10 @@ export const orgServiceFactory = ({
|
||||
}
|
||||
});
|
||||
|
||||
await orgMembershipDAL.updateById(inviteeOrgMembership.id, {
|
||||
lastInvitedAt: new Date()
|
||||
});
|
||||
|
||||
return { signupToken: undefined };
|
||||
};
|
||||
|
||||
@ -1433,6 +1442,7 @@ export const orgServiceFactory = ({
|
||||
const appCfg = getConfig();
|
||||
|
||||
const orgCache: Record<string, { name: string; id: string } | undefined> = {};
|
||||
const notifiedUsers: string[] = [];
|
||||
|
||||
await Promise.all(
|
||||
invitedUsers.map(async (invitedUser) => {
|
||||
@ -1463,13 +1473,12 @@ export const orgServiceFactory = ({
|
||||
callback_url: `${appCfg.SITE_URL}/signupinvite`
|
||||
}
|
||||
});
|
||||
notifiedUsers.push(invitedUser.id);
|
||||
}
|
||||
|
||||
await orgMembershipDAL.updateById(invitedUser.id, {
|
||||
lastInvitedAt: new Date()
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
await orgMembershipDAL.updateLastInvitedAtByIds(notifiedUsers);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -2,7 +2,7 @@ import { ForbiddenError, subject } from "@casl/ability";
|
||||
import path from "path";
|
||||
import { v4 as uuidv4, validate as uuidValidate } from "uuid";
|
||||
|
||||
import { TSecretFolders, TSecretFoldersInsert } from "@app/db/schemas";
|
||||
import { TProjectEnvironments, TSecretFolders, TSecretFoldersInsert } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
@ -469,15 +469,41 @@ export const secretFolderServiceFactory = ({
|
||||
|
||||
const $checkFolderPolicy = async ({
|
||||
projectId,
|
||||
environment,
|
||||
parentId
|
||||
env,
|
||||
parentId,
|
||||
idOrName
|
||||
}: {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
env: TProjectEnvironments;
|
||||
parentId: string;
|
||||
idOrName: string;
|
||||
}) => {
|
||||
let targetFolder = await folderDAL
|
||||
.findOne({
|
||||
envId: env.id,
|
||||
name: idOrName,
|
||||
parentId,
|
||||
isReserved: false
|
||||
})
|
||||
.catch(() => null);
|
||||
|
||||
if (!targetFolder && uuidValidate(idOrName)) {
|
||||
targetFolder = await folderDAL
|
||||
.findOne({
|
||||
envId: env.id,
|
||||
id: idOrName,
|
||||
parentId,
|
||||
isReserved: false
|
||||
})
|
||||
.catch(() => null);
|
||||
}
|
||||
|
||||
if (!targetFolder) {
|
||||
throw new NotFoundError({ message: `Target folder not found` });
|
||||
}
|
||||
|
||||
// get environment root folder (as it's needed to get all folders under it)
|
||||
const rootFolder = await folderDAL.findBySecretPath(projectId, environment, "/");
|
||||
const rootFolder = await folderDAL.findBySecretPath(projectId, env.slug, "/");
|
||||
if (!rootFolder) throw new NotFoundError({ message: `Root folder not found` });
|
||||
// get all folders under environment root folder
|
||||
const folderPaths = await folderDAL.findByEnvsDeep({ parentIds: [rootFolder.id] });
|
||||
@ -492,7 +518,13 @@ export const secretFolderServiceFactory = ({
|
||||
folderMap.get(normalizeKey(folder.parentId))?.push(folder);
|
||||
}
|
||||
|
||||
// Recursively collect all folders under the given parentId
|
||||
// Find the target folder in the folderPaths to get its full details
|
||||
const targetFolderWithPath = folderPaths.find((f) => f.id === targetFolder!.id);
|
||||
if (!targetFolderWithPath) {
|
||||
throw new NotFoundError({ message: `Target folder path not found` });
|
||||
}
|
||||
|
||||
// Recursively collect all folders under the target folder (descendants only)
|
||||
const collectDescendants = (
|
||||
id: string
|
||||
): (TSecretFolders & { path: string; depth: number; environment: string })[] => {
|
||||
@ -500,23 +532,31 @@ export const secretFolderServiceFactory = ({
|
||||
return [...children, ...children.flatMap((child) => collectDescendants(child.id))];
|
||||
};
|
||||
|
||||
const foldersUnderParent = collectDescendants(parentId);
|
||||
const targetFolderDescendants = collectDescendants(targetFolder.id);
|
||||
|
||||
const folderPolicyPaths = foldersUnderParent.map((folder) => ({
|
||||
// Include the target folder itself plus all its descendants
|
||||
const foldersToCheck = [targetFolderWithPath, ...targetFolderDescendants];
|
||||
|
||||
const folderPolicyPaths = foldersToCheck.map((folder) => ({
|
||||
path: folder.path,
|
||||
id: folder.id
|
||||
}));
|
||||
|
||||
// get secrets under the given folders
|
||||
const secrets = await secretV2BridgeDAL.findByFolderIds({ folderIds: folderPolicyPaths.map((p) => p.id) });
|
||||
const secrets = await secretV2BridgeDAL.findByFolderIds({
|
||||
folderIds: folderPolicyPaths.map((p) => p.id)
|
||||
});
|
||||
|
||||
for await (const folderPolicyPath of folderPolicyPaths) {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!secrets.some((s) => s.folderId === folderPolicyPath.id)) continue;
|
||||
|
||||
const policy = await secretApprovalPolicyService.getSecretApprovalPolicy(
|
||||
projectId,
|
||||
environment,
|
||||
env.slug,
|
||||
folderPolicyPath.path
|
||||
);
|
||||
|
||||
// if there is a policy and there are secrets under the given folder, throw error
|
||||
if (policy) {
|
||||
throw new BadRequestError({
|
||||
@ -560,20 +600,42 @@ export const secretFolderServiceFactory = ({
|
||||
message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`
|
||||
});
|
||||
|
||||
await $checkFolderPolicy({ projectId, environment, parentId: parentFolder.id });
|
||||
await $checkFolderPolicy({ projectId, env, parentId: parentFolder.id, idOrName });
|
||||
|
||||
let folderToDelete = await folderDAL
|
||||
.findOne({
|
||||
envId: env.id,
|
||||
name: idOrName,
|
||||
parentId: parentFolder.id,
|
||||
isReserved: false
|
||||
})
|
||||
.catch(() => null);
|
||||
|
||||
if (!folderToDelete && uuidValidate(idOrName)) {
|
||||
folderToDelete = await folderDAL
|
||||
.findOne({
|
||||
envId: env.id,
|
||||
id: idOrName,
|
||||
parentId: parentFolder.id,
|
||||
isReserved: false
|
||||
})
|
||||
.catch(() => null);
|
||||
}
|
||||
|
||||
if (!folderToDelete) {
|
||||
throw new NotFoundError({ message: `Folder with ID '${idOrName}' not found` });
|
||||
}
|
||||
|
||||
const [doc] = await folderDAL.delete(
|
||||
{
|
||||
envId: env.id,
|
||||
[uuidValidate(idOrName) ? "id" : "name"]: idOrName,
|
||||
id: folderToDelete.id,
|
||||
parentId: parentFolder.id,
|
||||
isReserved: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!doc) throw new NotFoundError({ message: `Failed to delete folder with ID '${idOrName}', not found` });
|
||||
|
||||
const folderVersions = await folderVersionDAL.findLatestFolderVersions([doc.id], tx);
|
||||
|
||||
await folderCommitService.createCommit(
|
||||
|
@ -20,7 +20,8 @@ export enum SecretSync {
|
||||
Render = "render",
|
||||
Flyio = "flyio",
|
||||
GitLab = "gitlab",
|
||||
CloudflarePages = "cloudflare-pages"
|
||||
CloudflarePages = "cloudflare-pages",
|
||||
Zabbix = "zabbix"
|
||||
}
|
||||
|
||||
export enum SecretSyncInitialSyncBehavior {
|
||||
|
@ -45,6 +45,7 @@ import { TEAMCITY_SYNC_LIST_OPTION, TeamCitySyncFns } from "./teamcity";
|
||||
import { TERRAFORM_CLOUD_SYNC_LIST_OPTION, TerraformCloudSyncFns } from "./terraform-cloud";
|
||||
import { VERCEL_SYNC_LIST_OPTION, VercelSyncFns } from "./vercel";
|
||||
import { WINDMILL_SYNC_LIST_OPTION, WindmillSyncFns } from "./windmill";
|
||||
import { ZABBIX_SYNC_LIST_OPTION, ZabbixSyncFns } from "./zabbix";
|
||||
|
||||
const SECRET_SYNC_LIST_OPTIONS: Record<SecretSync, TSecretSyncListItem> = {
|
||||
[SecretSync.AWSParameterStore]: AWS_PARAMETER_STORE_SYNC_LIST_OPTION,
|
||||
@ -68,7 +69,8 @@ const SECRET_SYNC_LIST_OPTIONS: Record<SecretSync, TSecretSyncListItem> = {
|
||||
[SecretSync.Render]: RENDER_SYNC_LIST_OPTION,
|
||||
[SecretSync.Flyio]: FLYIO_SYNC_LIST_OPTION,
|
||||
[SecretSync.GitLab]: GITLAB_SYNC_LIST_OPTION,
|
||||
[SecretSync.CloudflarePages]: CLOUDFLARE_PAGES_SYNC_LIST_OPTION
|
||||
[SecretSync.CloudflarePages]: CLOUDFLARE_PAGES_SYNC_LIST_OPTION,
|
||||
[SecretSync.Zabbix]: ZABBIX_SYNC_LIST_OPTION
|
||||
};
|
||||
|
||||
export const listSecretSyncOptions = () => {
|
||||
@ -236,6 +238,8 @@ export const SecretSyncFns = {
|
||||
return GitLabSyncFns.syncSecrets(secretSync, schemaSecretMap, { appConnectionDAL, kmsService });
|
||||
case SecretSync.CloudflarePages:
|
||||
return CloudflarePagesSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.Zabbix:
|
||||
return ZabbixSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
default:
|
||||
throw new Error(
|
||||
`Unhandled sync destination for sync secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
||||
@ -328,6 +332,9 @@ export const SecretSyncFns = {
|
||||
case SecretSync.CloudflarePages:
|
||||
secretMap = await CloudflarePagesSyncFns.getSecrets(secretSync);
|
||||
break;
|
||||
case SecretSync.Zabbix:
|
||||
secretMap = await ZabbixSyncFns.getSecrets(secretSync);
|
||||
break;
|
||||
default:
|
||||
throw new Error(
|
||||
`Unhandled sync destination for get secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
||||
@ -405,6 +412,8 @@ export const SecretSyncFns = {
|
||||
return GitLabSyncFns.removeSecrets(secretSync, schemaSecretMap, { appConnectionDAL, kmsService });
|
||||
case SecretSync.CloudflarePages:
|
||||
return CloudflarePagesSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.Zabbix:
|
||||
return ZabbixSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
default:
|
||||
throw new Error(
|
||||
`Unhandled sync destination for remove secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
||||
|
@ -23,7 +23,8 @@ export const SECRET_SYNC_NAME_MAP: Record<SecretSync, string> = {
|
||||
[SecretSync.Render]: "Render",
|
||||
[SecretSync.Flyio]: "Fly.io",
|
||||
[SecretSync.GitLab]: "GitLab",
|
||||
[SecretSync.CloudflarePages]: "Cloudflare Pages"
|
||||
[SecretSync.CloudflarePages]: "Cloudflare Pages",
|
||||
[SecretSync.Zabbix]: "Zabbix"
|
||||
};
|
||||
|
||||
export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
|
||||
@ -48,7 +49,8 @@ export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
|
||||
[SecretSync.Render]: AppConnection.Render,
|
||||
[SecretSync.Flyio]: AppConnection.Flyio,
|
||||
[SecretSync.GitLab]: AppConnection.GitLab,
|
||||
[SecretSync.CloudflarePages]: AppConnection.Cloudflare
|
||||
[SecretSync.CloudflarePages]: AppConnection.Cloudflare,
|
||||
[SecretSync.Zabbix]: AppConnection.Zabbix
|
||||
};
|
||||
|
||||
export const SECRET_SYNC_PLAN_MAP: Record<SecretSync, SecretSyncPlanType> = {
|
||||
@ -73,5 +75,6 @@ export const SECRET_SYNC_PLAN_MAP: Record<SecretSync, SecretSyncPlanType> = {
|
||||
[SecretSync.Render]: SecretSyncPlanType.Regular,
|
||||
[SecretSync.Flyio]: SecretSyncPlanType.Regular,
|
||||
[SecretSync.GitLab]: SecretSyncPlanType.Regular,
|
||||
[SecretSync.CloudflarePages]: SecretSyncPlanType.Regular
|
||||
[SecretSync.CloudflarePages]: SecretSyncPlanType.Regular,
|
||||
[SecretSync.Zabbix]: SecretSyncPlanType.Regular
|
||||
};
|
||||
|
@ -113,6 +113,7 @@ import {
|
||||
TTerraformCloudSyncWithCredentials
|
||||
} from "./terraform-cloud";
|
||||
import { TVercelSync, TVercelSyncInput, TVercelSyncListItem, TVercelSyncWithCredentials } from "./vercel";
|
||||
import { TZabbixSync, TZabbixSyncInput, TZabbixSyncListItem, TZabbixSyncWithCredentials } from "./zabbix";
|
||||
|
||||
export type TSecretSync =
|
||||
| TAwsParameterStoreSync
|
||||
@ -136,7 +137,8 @@ export type TSecretSync =
|
||||
| TRenderSync
|
||||
| TFlyioSync
|
||||
| TGitLabSync
|
||||
| TCloudflarePagesSync;
|
||||
| TCloudflarePagesSync
|
||||
| TZabbixSync;
|
||||
|
||||
export type TSecretSyncWithCredentials =
|
||||
| TAwsParameterStoreSyncWithCredentials
|
||||
@ -160,7 +162,8 @@ export type TSecretSyncWithCredentials =
|
||||
| TRenderSyncWithCredentials
|
||||
| TFlyioSyncWithCredentials
|
||||
| TGitLabSyncWithCredentials
|
||||
| TCloudflarePagesSyncWithCredentials;
|
||||
| TCloudflarePagesSyncWithCredentials
|
||||
| TZabbixSyncWithCredentials;
|
||||
|
||||
export type TSecretSyncInput =
|
||||
| TAwsParameterStoreSyncInput
|
||||
@ -184,7 +187,8 @@ export type TSecretSyncInput =
|
||||
| TRenderSyncInput
|
||||
| TFlyioSyncInput
|
||||
| TGitLabSyncInput
|
||||
| TCloudflarePagesSyncInput;
|
||||
| TCloudflarePagesSyncInput
|
||||
| TZabbixSyncInput;
|
||||
|
||||
export type TSecretSyncListItem =
|
||||
| TAwsParameterStoreSyncListItem
|
||||
@ -208,7 +212,8 @@ export type TSecretSyncListItem =
|
||||
| TRenderSyncListItem
|
||||
| TFlyioSyncListItem
|
||||
| TGitLabSyncListItem
|
||||
| TCloudflarePagesSyncListItem;
|
||||
| TCloudflarePagesSyncListItem
|
||||
| TZabbixSyncListItem;
|
||||
|
||||
export type TSyncOptionsConfig = {
|
||||
canImportSecrets: boolean;
|
||||
|
5
backend/src/services/secret-sync/zabbix/index.ts
Normal file
5
backend/src/services/secret-sync/zabbix/index.ts
Normal file
@ -0,0 +1,5 @@
|
||||
export * from "./zabbix-sync-constants";
|
||||
export * from "./zabbix-sync-enums";
|
||||
export * from "./zabbix-sync-fns";
|
||||
export * from "./zabbix-sync-schemas";
|
||||
export * from "./zabbix-sync-types";
|
@ -0,0 +1,10 @@
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
import { TSecretSyncListItem } from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
export const ZABBIX_SYNC_LIST_OPTION: TSecretSyncListItem = {
|
||||
name: "Zabbix",
|
||||
destination: SecretSync.Zabbix,
|
||||
connection: AppConnection.Zabbix,
|
||||
canImportSecrets: true
|
||||
};
|
@ -0,0 +1,4 @@
|
||||
export enum ZabbixSyncScope {
|
||||
Global = "global",
|
||||
Host = "host"
|
||||
}
|
285
backend/src/services/secret-sync/zabbix/zabbix-sync-fns.ts
Normal file
285
backend/src/services/secret-sync/zabbix/zabbix-sync-fns.ts
Normal file
@ -0,0 +1,285 @@
|
||||
import RE2 from "re2";
|
||||
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
|
||||
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
|
||||
import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
|
||||
import {
|
||||
TZabbixSecret,
|
||||
TZabbixSyncWithCredentials,
|
||||
ZabbixApiResponse,
|
||||
ZabbixMacroCreateResponse,
|
||||
ZabbixMacroDeleteResponse
|
||||
} from "@app/services/secret-sync/zabbix/zabbix-sync-types";
|
||||
|
||||
import { ZabbixSyncScope } from "./zabbix-sync-enums";
|
||||
|
||||
const TRAILING_SLASH_REGEX = new RE2("/+$");
|
||||
const MACRO_START_REGEX = new RE2("^\\{\\$");
|
||||
const MACRO_END_REGEX = new RE2("\\}$");
|
||||
|
||||
const extractMacroKey = (macro: string): string => {
|
||||
return macro.replace(MACRO_START_REGEX, "").replace(MACRO_END_REGEX, "");
|
||||
};
|
||||
|
||||
// Helper function to handle Zabbix API responses and errors
|
||||
const handleZabbixResponse = <T>(response: ZabbixApiResponse<T>): T => {
|
||||
if (response.data.error) {
|
||||
const errorMessage = response.data.error.data
|
||||
? `${response.data.error.message}: ${response.data.error.data}`
|
||||
: response.data.error.message;
|
||||
throw new SecretSyncError({
|
||||
error: new Error(`Zabbix API Error (${response.data.error.code}): ${errorMessage}`)
|
||||
});
|
||||
}
|
||||
|
||||
if (response.data.result === undefined) {
|
||||
throw new SecretSyncError({
|
||||
error: new Error("Zabbix API returned no result")
|
||||
});
|
||||
}
|
||||
|
||||
return response.data.result;
|
||||
};
|
||||
|
||||
const listZabbixSecrets = async (apiToken: string, instanceUrl: string, hostId?: string): Promise<TZabbixSecret[]> => {
|
||||
const apiUrl = `${instanceUrl.replace(TRAILING_SLASH_REGEX, "")}/api_jsonrpc.php`;
|
||||
|
||||
// - jsonrpc: Specifies the JSON-RPC protocol version.
|
||||
// - method: The API method to call, in this case "usermacro.get" for retrieving user macros.
|
||||
// - id: A unique identifier for the request. Required by JSON-RPC but not used by the API for logic. Typically set to any integer.
|
||||
const payload = {
|
||||
jsonrpc: "2.0" as const,
|
||||
method: "usermacro.get",
|
||||
params: hostId ? { output: "extend", hostids: hostId } : { output: "extend", globalmacro: true },
|
||||
id: 1
|
||||
};
|
||||
|
||||
try {
|
||||
const response: ZabbixApiResponse<TZabbixSecret[]> = await request.post(apiUrl, payload, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${apiToken}`
|
||||
}
|
||||
});
|
||||
|
||||
return handleZabbixResponse(response) || [];
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error: error instanceof Error ? error : new Error("Failed to list Zabbix secrets")
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const putZabbixSecrets = async (
|
||||
apiToken: string,
|
||||
instanceUrl: string,
|
||||
secretMap: TSecretMap,
|
||||
destinationConfig: TZabbixSyncWithCredentials["destinationConfig"],
|
||||
existingSecrets: TZabbixSecret[]
|
||||
): Promise<void> => {
|
||||
const apiUrl = `${instanceUrl.replace(TRAILING_SLASH_REGEX, "")}/api_jsonrpc.php`;
|
||||
const hostId = destinationConfig.scope === ZabbixSyncScope.Host ? destinationConfig.hostId : undefined;
|
||||
|
||||
const existingMacroMap = new Map(existingSecrets.map((secret) => [secret.macro, secret]));
|
||||
|
||||
for (const [key, secret] of Object.entries(secretMap)) {
|
||||
const macroKey = `{$${key.toUpperCase()}}`;
|
||||
const existingMacro = existingMacroMap.get(macroKey);
|
||||
|
||||
try {
|
||||
if (existingMacro) {
|
||||
// Update existing macro
|
||||
const updatePayload = {
|
||||
jsonrpc: "2.0" as const,
|
||||
method: hostId ? "usermacro.update" : "usermacro.updateglobal",
|
||||
params: {
|
||||
[hostId ? "hostmacroid" : "globalmacroid"]: existingMacro[hostId ? "hostmacroid" : "globalmacroid"],
|
||||
value: secret.value,
|
||||
type: destinationConfig.macroType,
|
||||
description: secret.comment
|
||||
},
|
||||
id: 1
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const response: ZabbixApiResponse<ZabbixMacroCreateResponse> = await request.post(apiUrl, updatePayload, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${apiToken}`
|
||||
}
|
||||
});
|
||||
|
||||
handleZabbixResponse(response);
|
||||
} else {
|
||||
// Create new macro
|
||||
const createPayload = {
|
||||
jsonrpc: "2.0" as const,
|
||||
method: hostId ? "usermacro.create" : "usermacro.createglobal",
|
||||
params: hostId
|
||||
? {
|
||||
hostid: hostId,
|
||||
macro: macroKey,
|
||||
value: secret.value,
|
||||
type: destinationConfig.macroType,
|
||||
description: secret.comment
|
||||
}
|
||||
: {
|
||||
macro: macroKey,
|
||||
value: secret.value,
|
||||
type: destinationConfig.macroType,
|
||||
description: secret.comment
|
||||
},
|
||||
id: 1
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const response: ZabbixApiResponse<ZabbixMacroCreateResponse> = await request.post(apiUrl, createPayload, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${apiToken}`
|
||||
}
|
||||
});
|
||||
|
||||
handleZabbixResponse(response);
|
||||
}
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error: error instanceof Error ? error : new Error(`Failed to sync secret ${key}`)
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const deleteZabbixSecrets = async (
|
||||
apiToken: string,
|
||||
instanceUrl: string,
|
||||
keys: string[],
|
||||
hostId?: string
|
||||
): Promise<void> => {
|
||||
if (keys.length === 0) return;
|
||||
|
||||
const apiUrl = `${instanceUrl.replace(TRAILING_SLASH_REGEX, "")}/api_jsonrpc.php`;
|
||||
|
||||
try {
|
||||
// Get existing macros to find their IDs
|
||||
const existingSecrets = await listZabbixSecrets(apiToken, instanceUrl, hostId);
|
||||
const macroIds = existingSecrets
|
||||
.filter((secret) => keys.includes(secret.macro))
|
||||
.map((secret) => secret[hostId ? "hostmacroid" : "globalmacroid"])
|
||||
.filter(Boolean);
|
||||
|
||||
if (macroIds.length === 0) return;
|
||||
|
||||
const payload = {
|
||||
jsonrpc: "2.0" as const,
|
||||
method: hostId ? "usermacro.delete" : "usermacro.deleteglobal",
|
||||
params: macroIds,
|
||||
id: 1
|
||||
};
|
||||
|
||||
const response: ZabbixApiResponse<ZabbixMacroDeleteResponse> = await request.post(apiUrl, payload, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${apiToken}`
|
||||
}
|
||||
});
|
||||
|
||||
handleZabbixResponse(response);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error: error instanceof Error ? error : new Error("Failed to delete Zabbix secrets")
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const ZabbixSyncFns = {
|
||||
syncSecrets: async (secretSync: TZabbixSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
const { connection, environment, destinationConfig } = secretSync;
|
||||
const { apiToken, instanceUrl } = connection.credentials;
|
||||
await blockLocalAndPrivateIpAddresses(instanceUrl);
|
||||
|
||||
const hostId = destinationConfig.scope === ZabbixSyncScope.Host ? destinationConfig.hostId : undefined;
|
||||
let secrets: TZabbixSecret[] = [];
|
||||
try {
|
||||
secrets = await listZabbixSecrets(apiToken, instanceUrl, hostId);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error: error instanceof Error ? error : new Error("Failed to list Zabbix secrets")
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await putZabbixSecrets(apiToken, instanceUrl, secretMap, destinationConfig, secrets);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error: error instanceof Error ? error : new Error("Failed to sync secrets")
|
||||
});
|
||||
}
|
||||
|
||||
if (secretSync.syncOptions.disableSecretDeletion) return;
|
||||
|
||||
try {
|
||||
const shapedSecretMapKeys = Object.keys(secretMap).map((key) => key.toUpperCase());
|
||||
|
||||
const keys = secrets
|
||||
.filter(
|
||||
(secret) =>
|
||||
matchesSchema(secret.macro, environment?.slug || "", secretSync.syncOptions.keySchema) &&
|
||||
!shapedSecretMapKeys.includes(extractMacroKey(secret.macro))
|
||||
)
|
||||
.map((secret) => secret.macro);
|
||||
|
||||
await deleteZabbixSecrets(apiToken, instanceUrl, keys, hostId);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error: error instanceof Error ? error : new Error("Failed to delete orphaned secrets")
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
removeSecrets: async (secretSync: TZabbixSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
const { connection, destinationConfig } = secretSync;
|
||||
const { apiToken, instanceUrl } = connection.credentials;
|
||||
await blockLocalAndPrivateIpAddresses(instanceUrl);
|
||||
|
||||
const hostId = destinationConfig.scope === ZabbixSyncScope.Host ? destinationConfig.hostId : undefined;
|
||||
|
||||
try {
|
||||
const secrets = await listZabbixSecrets(apiToken, instanceUrl, hostId);
|
||||
|
||||
const shapedSecretMapKeys = Object.keys(secretMap).map((key) => key.toUpperCase());
|
||||
const keys = secrets
|
||||
.filter((secret) => shapedSecretMapKeys.includes(extractMacroKey(secret.macro)))
|
||||
.map((secret) => secret.macro);
|
||||
|
||||
await deleteZabbixSecrets(apiToken, instanceUrl, keys, hostId);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error: error instanceof Error ? error : new Error("Failed to remove secrets")
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
getSecrets: async (secretSync: TZabbixSyncWithCredentials) => {
|
||||
const { connection, destinationConfig } = secretSync;
|
||||
const { apiToken, instanceUrl } = connection.credentials;
|
||||
await blockLocalAndPrivateIpAddresses(instanceUrl);
|
||||
const hostId = destinationConfig.scope === ZabbixSyncScope.Host ? destinationConfig.hostId : undefined;
|
||||
|
||||
try {
|
||||
const secrets = await listZabbixSecrets(apiToken, instanceUrl, hostId);
|
||||
return Object.fromEntries(
|
||||
secrets.map((secret) => [
|
||||
extractMacroKey(secret.macro),
|
||||
{ value: secret.value ?? "", comment: secret.description }
|
||||
])
|
||||
);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error: error instanceof Error ? error : new Error("Failed to get secrets")
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
@ -0,0 +1,67 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretSyncs } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
import {
|
||||
BaseSecretSyncSchema,
|
||||
GenericCreateSecretSyncFieldsSchema,
|
||||
GenericUpdateSecretSyncFieldsSchema
|
||||
} from "@app/services/secret-sync/secret-sync-schemas";
|
||||
import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
import { ZabbixSyncScope } from "./zabbix-sync-enums";
|
||||
|
||||
const ZabbixSyncDestinationConfigSchema = z.discriminatedUnion("scope", [
|
||||
z.object({
|
||||
scope: z.literal(ZabbixSyncScope.Host).describe(SecretSyncs.DESTINATION_CONFIG.ZABBIX.scope),
|
||||
hostId: z.string().trim().min(1, "Host required").max(255).describe(SecretSyncs.DESTINATION_CONFIG.ZABBIX.hostId),
|
||||
hostName: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Host name required")
|
||||
.max(255)
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.ZABBIX.hostName),
|
||||
macroType: z
|
||||
.number()
|
||||
.min(0, "Macro type required")
|
||||
.max(1, "Macro type required")
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.ZABBIX.macroType)
|
||||
}),
|
||||
z.object({
|
||||
scope: z.literal(ZabbixSyncScope.Global).describe(SecretSyncs.DESTINATION_CONFIG.ZABBIX.scope),
|
||||
macroType: z
|
||||
.number()
|
||||
.min(0, "Macro type required")
|
||||
.max(1, "Macro type required")
|
||||
.describe(SecretSyncs.DESTINATION_CONFIG.ZABBIX.macroType)
|
||||
})
|
||||
]);
|
||||
|
||||
const ZabbixSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true };
|
||||
|
||||
export const ZabbixSyncSchema = BaseSecretSyncSchema(SecretSync.Zabbix, ZabbixSyncOptionsConfig).extend({
|
||||
destination: z.literal(SecretSync.Zabbix),
|
||||
destinationConfig: ZabbixSyncDestinationConfigSchema
|
||||
});
|
||||
|
||||
export const CreateZabbixSyncSchema = GenericCreateSecretSyncFieldsSchema(
|
||||
SecretSync.Zabbix,
|
||||
ZabbixSyncOptionsConfig
|
||||
).extend({
|
||||
destinationConfig: ZabbixSyncDestinationConfigSchema
|
||||
});
|
||||
|
||||
export const UpdateZabbixSyncSchema = GenericUpdateSecretSyncFieldsSchema(
|
||||
SecretSync.Zabbix,
|
||||
ZabbixSyncOptionsConfig
|
||||
).extend({
|
||||
destinationConfig: ZabbixSyncDestinationConfigSchema.optional()
|
||||
});
|
||||
|
||||
export const ZabbixSyncListItemSchema = z.object({
|
||||
name: z.literal("Zabbix"),
|
||||
connection: z.literal(AppConnection.Zabbix),
|
||||
destination: z.literal(SecretSync.Zabbix),
|
||||
canImportSecrets: z.literal(true)
|
||||
});
|
75
backend/src/services/secret-sync/zabbix/zabbix-sync-types.ts
Normal file
75
backend/src/services/secret-sync/zabbix/zabbix-sync-types.ts
Normal file
@ -0,0 +1,75 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { TZabbixConnection } from "@app/services/app-connection/zabbix";
|
||||
|
||||
import { CreateZabbixSyncSchema, ZabbixSyncListItemSchema, ZabbixSyncSchema } from "./zabbix-sync-schemas";
|
||||
|
||||
export type TZabbixSync = z.infer<typeof ZabbixSyncSchema>;
|
||||
export type TZabbixSyncInput = z.infer<typeof CreateZabbixSyncSchema>;
|
||||
export type TZabbixSyncListItem = z.infer<typeof ZabbixSyncListItemSchema>;
|
||||
|
||||
export type TZabbixSyncWithCredentials = TZabbixSync & {
|
||||
connection: TZabbixConnection;
|
||||
};
|
||||
|
||||
export type TZabbixSecret = {
|
||||
macro: string;
|
||||
value: string;
|
||||
description?: string;
|
||||
globalmacroid?: string;
|
||||
hostmacroid?: string;
|
||||
hostid?: string;
|
||||
type: number;
|
||||
automatic?: string;
|
||||
};
|
||||
|
||||
export interface ZabbixApiResponse<T = unknown> {
|
||||
data: {
|
||||
jsonrpc: "2.0";
|
||||
result?: T;
|
||||
error?: {
|
||||
code: number;
|
||||
message: string;
|
||||
data?: string;
|
||||
};
|
||||
id: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ZabbixMacroCreateResponse {
|
||||
hostmacroids?: string[];
|
||||
globalmacroids?: string[];
|
||||
}
|
||||
|
||||
export interface ZabbixMacroUpdateResponse {
|
||||
hostmacroids?: string[];
|
||||
globalmacroids?: string[];
|
||||
}
|
||||
|
||||
export interface ZabbixMacroDeleteResponse {
|
||||
hostmacroids?: string[];
|
||||
globalmacroids?: string[];
|
||||
}
|
||||
|
||||
export enum ZabbixMacroType {
|
||||
TEXT = 0,
|
||||
SECRET = 1
|
||||
}
|
||||
|
||||
export interface ZabbixMacroInput {
|
||||
hostid?: string;
|
||||
macro: string;
|
||||
value: string;
|
||||
description?: string;
|
||||
type?: ZabbixMacroType;
|
||||
automatic?: "0" | "1";
|
||||
}
|
||||
|
||||
export interface ZabbixMacroUpdate {
|
||||
hostmacroid?: string;
|
||||
globalmacroid?: string;
|
||||
value?: string;
|
||||
description?: string;
|
||||
type?: ZabbixMacroType;
|
||||
automatic?: "0" | "1";
|
||||
}
|
@ -5,7 +5,13 @@ import jwt from "jsonwebtoken";
|
||||
import { IdentityAuthMethod, OrgMembershipRole, TSuperAdmin, TSuperAdminUpdate } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
getConfig,
|
||||
getOriginalConfig,
|
||||
overrideEnvConfig,
|
||||
overwriteSchema,
|
||||
validateOverrides
|
||||
} from "@app/lib/config/env";
|
||||
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { generateUserSrpKeys, getUserPrivateKey } from "@app/lib/crypto/srp";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
@ -33,6 +39,7 @@ import { TInvalidateCacheQueueFactory } from "./invalidate-cache-queue";
|
||||
import { TSuperAdminDALFactory } from "./super-admin-dal";
|
||||
import {
|
||||
CacheType,
|
||||
EnvOverrides,
|
||||
LoginMethod,
|
||||
TAdminBootstrapInstanceDTO,
|
||||
TAdminGetIdentitiesDTO,
|
||||
@ -234,6 +241,45 @@ export const superAdminServiceFactory = ({
|
||||
adminIntegrationsConfig = config;
|
||||
};
|
||||
|
||||
const getEnvOverrides = async () => {
|
||||
const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID);
|
||||
|
||||
if (!serverCfg || !serverCfg.encryptedEnvOverrides) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const decrypt = kmsService.decryptWithRootKey();
|
||||
|
||||
const overrides = JSON.parse(decrypt(serverCfg.encryptedEnvOverrides).toString()) as Record<string, string>;
|
||||
|
||||
return overrides;
|
||||
};
|
||||
|
||||
const getEnvOverridesOrganized = async (): Promise<EnvOverrides> => {
|
||||
const overrides = await getEnvOverrides();
|
||||
const ogConfig = getOriginalConfig();
|
||||
|
||||
return Object.fromEntries(
|
||||
Object.entries(overwriteSchema).map(([groupKey, groupDef]) => [
|
||||
groupKey,
|
||||
{
|
||||
name: groupDef.name,
|
||||
fields: groupDef.fields.map(({ key, description }) => ({
|
||||
key,
|
||||
description,
|
||||
value: overrides[key] || "",
|
||||
hasEnvEntry: !!(ogConfig as unknown as Record<string, string | undefined>)[key]
|
||||
}))
|
||||
}
|
||||
])
|
||||
);
|
||||
};
|
||||
|
||||
const $syncEnvConfig = async () => {
|
||||
const config = await getEnvOverrides();
|
||||
overrideEnvConfig(config);
|
||||
};
|
||||
|
||||
const updateServerCfg = async (
|
||||
data: TSuperAdminUpdate & {
|
||||
slackClientId?: string;
|
||||
@ -246,6 +292,7 @@ export const superAdminServiceFactory = ({
|
||||
gitHubAppConnectionSlug?: string;
|
||||
gitHubAppConnectionId?: string;
|
||||
gitHubAppConnectionPrivateKey?: string;
|
||||
envOverrides?: Record<string, string>;
|
||||
},
|
||||
userId: string
|
||||
) => {
|
||||
@ -374,6 +421,17 @@ export const superAdminServiceFactory = ({
|
||||
gitHubAppConnectionSettingsUpdated = true;
|
||||
}
|
||||
|
||||
let envOverridesUpdated = false;
|
||||
if (data.envOverrides !== undefined) {
|
||||
// Verify input format
|
||||
validateOverrides(data.envOverrides);
|
||||
|
||||
const encryptedEnvOverrides = encryptWithRoot(Buffer.from(JSON.stringify(data.envOverrides)));
|
||||
updatedData.encryptedEnvOverrides = encryptedEnvOverrides;
|
||||
updatedData.envOverrides = undefined;
|
||||
envOverridesUpdated = true;
|
||||
}
|
||||
|
||||
const updatedServerCfg = await serverCfgDAL.updateById(ADMIN_CONFIG_DB_UUID, updatedData);
|
||||
|
||||
await keyStore.setItemWithExpiry(ADMIN_CONFIG_KEY, ADMIN_CONFIG_KEY_EXP, JSON.stringify(updatedServerCfg));
|
||||
@ -382,6 +440,10 @@ export const superAdminServiceFactory = ({
|
||||
await $syncAdminIntegrationConfig();
|
||||
}
|
||||
|
||||
if (envOverridesUpdated) {
|
||||
await $syncEnvConfig();
|
||||
}
|
||||
|
||||
if (
|
||||
updatedServerCfg.encryptedMicrosoftTeamsAppId &&
|
||||
updatedServerCfg.encryptedMicrosoftTeamsClientSecret &&
|
||||
@ -814,6 +876,18 @@ export const superAdminServiceFactory = ({
|
||||
return job;
|
||||
};
|
||||
|
||||
const initializeEnvConfigSync = async () => {
|
||||
logger.info("Setting up background sync process for environment overrides");
|
||||
|
||||
await $syncEnvConfig();
|
||||
|
||||
// sync every 5 minutes
|
||||
const job = new CronJob("*/5 * * * *", $syncEnvConfig);
|
||||
job.start();
|
||||
|
||||
return job;
|
||||
};
|
||||
|
||||
return {
|
||||
initServerCfg,
|
||||
updateServerCfg,
|
||||
@ -833,6 +907,9 @@ export const superAdminServiceFactory = ({
|
||||
getOrganizations,
|
||||
deleteOrganization,
|
||||
deleteOrganizationMembership,
|
||||
initializeAdminIntegrationConfigSync
|
||||
initializeAdminIntegrationConfigSync,
|
||||
initializeEnvConfigSync,
|
||||
getEnvOverrides,
|
||||
getEnvOverridesOrganized
|
||||
};
|
||||
};
|
||||
|
@ -1,3 +1,5 @@
|
||||
import { TEnvConfig } from "@app/lib/config/env";
|
||||
|
||||
export type TAdminSignUpDTO = {
|
||||
email: string;
|
||||
password: string;
|
||||
@ -74,3 +76,10 @@ export type TAdminIntegrationConfig = {
|
||||
privateKey: string;
|
||||
};
|
||||
};
|
||||
|
||||
export interface EnvOverrides {
|
||||
[key: string]: {
|
||||
name: string;
|
||||
fields: { key: keyof TEnvConfig; value: string; hasEnvEntry: boolean; description?: string }[];
|
||||
};
|
||||
}
|
||||
|
@ -71,6 +71,15 @@ export const telemetryQueueServiceFactory = ({
|
||||
QueueName.TelemetryInstanceStats // just a job id
|
||||
);
|
||||
|
||||
if (postHog) {
|
||||
await queueService.queue(QueueName.TelemetryInstanceStats, QueueJobs.TelemetryInstanceStats, undefined, {
|
||||
jobId: QueueName.TelemetryInstanceStats,
|
||||
repeat: { pattern: "0 0 * * *", utc: true }
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const startAggregatedEventsJob = async () => {
|
||||
// clear previous aggregated events job
|
||||
await queueService.stopRepeatableJob(
|
||||
QueueName.TelemetryAggregatedEvents,
|
||||
@ -80,11 +89,6 @@ export const telemetryQueueServiceFactory = ({
|
||||
);
|
||||
|
||||
if (postHog) {
|
||||
await queueService.queue(QueueName.TelemetryInstanceStats, QueueJobs.TelemetryInstanceStats, undefined, {
|
||||
jobId: QueueName.TelemetryInstanceStats,
|
||||
repeat: { pattern: "0 0 * * *", utc: true }
|
||||
});
|
||||
|
||||
// Start aggregated events job (runs every five minutes)
|
||||
await queueService.queue(QueueName.TelemetryAggregatedEvents, QueueJobs.TelemetryAggregatedEvents, undefined, {
|
||||
jobId: QueueName.TelemetryAggregatedEvents,
|
||||
@ -102,6 +106,7 @@ export const telemetryQueueServiceFactory = ({
|
||||
});
|
||||
|
||||
return {
|
||||
startTelemetryCheck
|
||||
startTelemetryCheck,
|
||||
startAggregatedEventsJob
|
||||
};
|
||||
};
|
||||
|
@ -14,7 +14,7 @@ export const TELEMETRY_SECRET_PROCESSED_KEY = "telemetry-secret-processed";
|
||||
export const TELEMETRY_SECRET_OPERATIONS_KEY = "telemetry-secret-operations";
|
||||
|
||||
export const POSTHOG_AGGREGATED_EVENTS = [PostHogEventTypes.SecretPulled];
|
||||
const TELEMETRY_AGGREGATED_KEY_EXP = 900; // 15mins
|
||||
const TELEMETRY_AGGREGATED_KEY_EXP = 600; // 10mins
|
||||
|
||||
// Bucket configuration
|
||||
const TELEMETRY_BUCKET_COUNT = 30;
|
||||
@ -102,13 +102,6 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme
|
||||
const instanceType = licenseService.getInstanceType();
|
||||
// capture posthog only when its cloud or signup event happens in self-hosted
|
||||
if (instanceType === InstanceType.Cloud || event.event === PostHogEventTypes.UserSignedUp) {
|
||||
if (event.organizationId) {
|
||||
try {
|
||||
postHog.groupIdentify({ groupType: "organization", groupKey: event.organizationId });
|
||||
} catch (error) {
|
||||
logger.error(error, "Failed to identify PostHog organization");
|
||||
}
|
||||
}
|
||||
if (POSTHOG_AGGREGATED_EVENTS.includes(event.event)) {
|
||||
const eventKey = createTelemetryEventKey(event.event, event.distinctId);
|
||||
await keyStore.setItemWithExpiry(
|
||||
@ -122,6 +115,13 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme
|
||||
})
|
||||
);
|
||||
} else {
|
||||
if (event.organizationId) {
|
||||
try {
|
||||
postHog.groupIdentify({ groupType: "organization", groupKey: event.organizationId });
|
||||
} catch (error) {
|
||||
logger.error(error, "Failed to identify PostHog organization");
|
||||
}
|
||||
}
|
||||
postHog.capture({
|
||||
event: event.event,
|
||||
distinctId: event.distinctId,
|
||||
|
@ -35,6 +35,7 @@ const (
|
||||
GitHubPlatform
|
||||
GitLabPlatform
|
||||
AzureDevOpsPlatform
|
||||
BitBucketPlatform
|
||||
// TODO: Add others.
|
||||
)
|
||||
|
||||
@ -45,6 +46,7 @@ func (p Platform) String() string {
|
||||
"github",
|
||||
"gitlab",
|
||||
"azuredevops",
|
||||
"bitbucket",
|
||||
}[p]
|
||||
}
|
||||
|
||||
@ -60,6 +62,8 @@ func PlatformFromString(s string) (Platform, error) {
|
||||
return GitLabPlatform, nil
|
||||
case "azuredevops":
|
||||
return AzureDevOpsPlatform, nil
|
||||
case "bitbucket":
|
||||
return BitBucketPlatform, nil
|
||||
default:
|
||||
return UnknownPlatform, fmt.Errorf("invalid scm platform value: %s", s)
|
||||
}
|
||||
|
@ -208,6 +208,8 @@ func platformFromHost(u *url.URL) scm.Platform {
|
||||
return scm.GitLabPlatform
|
||||
case "dev.azure.com", "visualstudio.com":
|
||||
return scm.AzureDevOpsPlatform
|
||||
case "bitbucket.org":
|
||||
return scm.BitBucketPlatform
|
||||
default:
|
||||
return scm.UnknownPlatform
|
||||
}
|
||||
|
@ -112,6 +112,15 @@ func createScmLink(scmPlatform scm.Platform, remoteUrl string, finding report.Fi
|
||||
// This is a bit dirty, but Azure DevOps does not highlight the line when the lineStartColumn and lineEndColumn are not provided
|
||||
link += "&lineStartColumn=1&lineEndColumn=10000000&type=2&lineStyle=plain&_a=files"
|
||||
return link
|
||||
case scm.BitBucketPlatform:
|
||||
link := fmt.Sprintf("%s/src/%s/%s", remoteUrl, finding.Commit, filePath)
|
||||
if finding.StartLine != 0 {
|
||||
link += fmt.Sprintf("#lines-%d", finding.StartLine)
|
||||
}
|
||||
if finding.EndLine != finding.StartLine {
|
||||
link += fmt.Sprintf(":%d", finding.EndLine)
|
||||
}
|
||||
return link
|
||||
default:
|
||||
// This should never happen.
|
||||
return ""
|
||||
|
@ -337,9 +337,7 @@ var scanCmd = &cobra.Command{
|
||||
if gitCmd, err = sources.NewGitLogCmd(source, logOpts); err != nil {
|
||||
logging.Fatal().Err(err).Msg("could not create Git cmd")
|
||||
}
|
||||
if scmPlatform, err = scm.PlatformFromString("github"); err != nil {
|
||||
logging.Fatal().Err(err).Send()
|
||||
}
|
||||
scmPlatform = scm.UnknownPlatform
|
||||
remote = detect.NewRemoteInfo(scmPlatform, source)
|
||||
|
||||
if findings, err = detector.DetectGit(gitCmd, remote); err != nil {
|
||||
|
@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Available"
|
||||
openapi: "GET /api/v1/app-connections/bitbucket/available"
|
||||
---
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user