Compare commits

..

29 Commits

Author SHA1 Message Date
Maidul Islam
9cc17452fa address greptile 2025-06-05 01:23:28 -04:00
Maidul Islam
93ba6f7b58 add netowkring docs 2025-06-05 01:18:21 -04:00
Maidul Islam
6fddecdf82 Merge pull request #3729 from akhilmhdh/feat/ui-change-for-approval-replication
feat: updated ui for replication approval
2025-06-04 19:05:13 -04:00
Scott Wilson
99e2c85f8f Merge pull request #3718 from Infisical/filter-org-members-by-role
improvement(org-users-table): Add filter by roles to org users table
2025-06-04 16:01:43 -07:00
Maidul Islam
6e1504dc73 Merge pull request #3727 from Infisical/update-github-radar-image
improvement(github-radar-app): update image
2025-06-04 18:29:41 -04:00
=
07d930f608 feat: small text changes 2025-06-05 03:54:09 +05:30
=
696bbcb072 feat: updated ui for replication approval 2025-06-05 03:44:54 +05:30
Scott Wilson
6c52847dec improvement: update image 2025-06-04 13:48:33 -07:00
Maidul Islam
caeda09b21 Merge pull request #3725 from Infisical/doc/spire
doc: add oidc auth doc for spire
2025-06-04 12:59:49 -04:00
Sheen
1201baf35c doc: add oidc auth doc for spire 2025-06-04 15:42:43 +00:00
Akhil Mohan
5d5f843a9f Merge pull request #3724 from Infisical/fix/secretRequestUIOverflows
Fix broken UI for secret requests due to long secret values
2025-06-04 21:08:03 +05:30
carlosmonastyrski
caca23b56c Fix broken UI for secret requests due to long secret values 2025-06-04 12:33:37 -03:00
Maidul Islam
01ea22f167 move bounty progam to invite only - low quality reports 2025-06-04 10:58:03 -04:00
Scott Wilson
83c53b9d5a Merge pull request #3677 from Infisical/secret-scanning-v2-pt-1
feature(secret-scanning-v2): secret scanning architecture and github data source
2025-06-03 16:34:29 -07:00
carlosmonastyrski
8cc457d49a Merge pull request #3710 from Infisical/feat/verticaDynamicSecret
feat(dynamic-secret): add vertica dynamic secret option
2025-06-03 20:27:47 -03:00
Scott Wilson
540374f543 Merge pull request #3720 from Infisical/add-email-body-padding
improvement(email-templates): Add y-padding to email body
2025-06-03 16:06:34 -07:00
Scott Wilson
0fbf8efd3a improvement: add filter by roles to org users table 2025-06-03 14:36:47 -07:00
Scott Wilson
d8ee05bfba improvements: address feedback 2025-06-03 10:41:46 -07:00
carlosmonastyrski
ce5712606f feat(dynamic-secret): Vertica option improvements 2025-06-03 10:45:58 -03:00
carlosmonastyrski
ce67e5f137 feat(dynamic-secret): add vertica dynamic secret option 2025-06-03 10:04:11 -03:00
Scott Wilson
98ab969356 improvements: address greppy 2025-06-02 20:24:50 -07:00
Scott Wilson
d4523b0ca4 improvements: additional feedback 2025-06-02 18:19:51 -07:00
Scott Wilson
2be8c47ae8 chore: add route tree 2025-06-02 16:29:24 -07:00
Scott Wilson
8730d14104 merge main 2025-06-02 16:24:55 -07:00
Scott Wilson
d924580599 improvements: address feedback and setup queue worker profiles 2025-06-02 14:40:06 -07:00
Scott Wilson
3ae2ec1f51 chore: revert license and fix type error 2025-05-30 20:26:55 -07:00
Scott Wilson
ce4e35e908 feature: secret scanning pt 3 2025-05-30 20:19:44 -07:00
Scott Wilson
4773336a04 feature: secret scanning pt2 and address initial feedback 2025-05-29 20:40:48 -07:00
Scott Wilson
e6c97510ca feature: secret scanning architecture and github data source (wip) 2025-05-28 22:21:03 -07:00
370 changed files with 15569 additions and 1344 deletions

View File

@@ -107,6 +107,14 @@ INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=
#github radar app connection
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG=
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID=
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
#gcp app connection
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=

View File

@@ -37,6 +37,7 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { TSecretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
@@ -271,6 +272,7 @@ declare module "fastify" {
microsoftTeams: TMicrosoftTeamsServiceFactory;
assumePrivileges: TAssumePrivilegeServiceFactory;
githubOrgSync: TGithubOrgSyncServiceFactory;
secretScanningV2: TSecretScanningV2ServiceFactory;
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
pkiTemplate: TPkiTemplatesServiceFactory;
};

View File

@@ -336,9 +336,24 @@ import {
TSecretRotationV2SecretMappingsInsert,
TSecretRotationV2SecretMappingsUpdate,
TSecrets,
TSecretScanningConfigs,
TSecretScanningConfigsInsert,
TSecretScanningConfigsUpdate,
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,
TSecretScanningDataSourcesUpdate,
TSecretScanningFindings,
TSecretScanningFindingsInsert,
TSecretScanningFindingsUpdate,
TSecretScanningGitRisks,
TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate,
TSecretScanningResources,
TSecretScanningResourcesInsert,
TSecretScanningResourcesUpdate,
TSecretScanningScans,
TSecretScanningScansInsert,
TSecretScanningScansUpdate,
TSecretSharing,
TSecretSharingInsert,
TSecretSharingUpdate,
@@ -1107,5 +1122,30 @@ declare module "knex/types/tables" {
TGithubOrgSyncConfigsInsert,
TGithubOrgSyncConfigsUpdate
>;
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,
TSecretScanningDataSourcesUpdate
>;
[TableName.SecretScanningResource]: KnexOriginal.CompositeTableType<
TSecretScanningResources,
TSecretScanningResourcesInsert,
TSecretScanningResourcesUpdate
>;
[TableName.SecretScanningScan]: KnexOriginal.CompositeTableType<
TSecretScanningScans,
TSecretScanningScansInsert,
TSecretScanningScansUpdate
>;
[TableName.SecretScanningFinding]: KnexOriginal.CompositeTableType<
TSecretScanningFindings,
TSecretScanningFindingsInsert,
TSecretScanningFindingsUpdate
>;
[TableName.SecretScanningConfig]: KnexOriginal.CompositeTableType<
TSecretScanningConfigs,
TSecretScanningConfigsInsert,
TSecretScanningConfigsUpdate
>;
}
}

View File

@@ -0,0 +1,107 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
import {
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretScanningDataSource))) {
await knex.schema.createTable(TableName.SecretScanningDataSource, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("externalId").index(); // if we need a unique way of identifying this data source from an external resource
t.string("name", 48).notNullable();
t.string("description");
t.string("type").notNullable();
t.jsonb("config").notNullable();
t.binary("encryptedCredentials"); // webhook credentials, etc.
t.uuid("connectionId");
t.boolean("isAutoScanEnabled").defaultTo(true);
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.timestamps(true, true, true);
t.boolean("isDisconnected").notNullable().defaultTo(false);
t.unique(["projectId", "name"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningResource))) {
await knex.schema.createTable(TableName.SecretScanningResource, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("externalId").notNullable();
t.string("name").notNullable();
t.string("type").notNullable();
t.uuid("dataSourceId").notNullable();
t.foreign("dataSourceId").references("id").inTable(TableName.SecretScanningDataSource).onDelete("CASCADE");
t.timestamps(true, true, true);
t.unique(["dataSourceId", "externalId"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningResource);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningScan))) {
await knex.schema.createTable(TableName.SecretScanningScan, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("status").notNullable().defaultTo(SecretScanningScanStatus.Queued);
t.string("statusMessage", 1024);
t.string("type").notNullable();
t.uuid("resourceId").notNullable();
t.foreign("resourceId").references("id").inTable(TableName.SecretScanningResource).onDelete("CASCADE");
t.timestamp("createdAt").defaultTo(knex.fn.now());
});
}
if (!(await knex.schema.hasTable(TableName.SecretScanningFinding))) {
await knex.schema.createTable(TableName.SecretScanningFinding, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("dataSourceName").notNullable();
t.string("dataSourceType").notNullable();
t.string("resourceName").notNullable();
t.string("resourceType").notNullable();
t.string("rule").notNullable();
t.string("severity").notNullable();
t.string("status").notNullable().defaultTo(SecretScanningFindingStatus.Unresolved);
t.string("remarks");
t.string("fingerprint").notNullable();
t.jsonb("details").notNullable();
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("scanId");
t.foreign("scanId").references("id").inTable(TableName.SecretScanningScan).onDelete("SET NULL");
t.timestamps(true, true, true);
t.unique(["projectId", "fingerprint"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningFinding);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningConfig))) {
await knex.schema.createTable(TableName.SecretScanningConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable().unique();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("content", 5000);
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningConfig);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretScanningFinding);
await dropOnUpdateTrigger(knex, TableName.SecretScanningFinding);
await knex.schema.dropTableIfExists(TableName.SecretScanningScan);
await knex.schema.dropTableIfExists(TableName.SecretScanningResource);
await dropOnUpdateTrigger(knex, TableName.SecretScanningResource);
await knex.schema.dropTableIfExists(TableName.SecretScanningDataSource);
await dropOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
await knex.schema.dropTableIfExists(TableName.SecretScanningConfig);
await dropOnUpdateTrigger(knex, TableName.SecretScanningConfig);
}

View File

@@ -111,7 +111,12 @@ export * from "./secret-rotation-outputs";
export * from "./secret-rotation-v2-secret-mappings";
export * from "./secret-rotations";
export * from "./secret-rotations-v2";
export * from "./secret-scanning-configs";
export * from "./secret-scanning-data-sources";
export * from "./secret-scanning-findings";
export * from "./secret-scanning-git-risks";
export * from "./secret-scanning-resources";
export * from "./secret-scanning-scans";
export * from "./secret-sharing";
export * from "./secret-snapshot-folders";
export * from "./secret-snapshot-secrets";

View File

@@ -159,7 +159,12 @@ export enum TableName {
MicrosoftTeamsIntegrations = "microsoft_teams_integrations",
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
SecretReminderRecipients = "secret_reminder_recipients",
GithubOrgSyncConfig = "github_org_sync_configs"
GithubOrgSyncConfig = "github_org_sync_configs",
SecretScanningDataSource = "secret_scanning_data_sources",
SecretScanningResource = "secret_scanning_resources",
SecretScanningScan = "secret_scanning_scans",
SecretScanningFinding = "secret_scanning_findings",
SecretScanningConfig = "secret_scanning_configs"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
@@ -248,7 +253,8 @@ export enum ProjectType {
SecretManager = "secret-manager",
CertificateManager = "cert-manager",
KMS = "kms",
SSH = "ssh"
SSH = "ssh",
SecretScanning = "secret-scanning"
}
export enum ActionProjectType {
@@ -256,6 +262,7 @@ export enum ActionProjectType {
CertificateManager = ProjectType.CertificateManager,
KMS = ProjectType.KMS,
SSH = ProjectType.SSH,
SecretScanning = ProjectType.SecretScanning,
// project operations that happen on all types
Any = "any"
}

View File

@@ -0,0 +1,20 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningConfigsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
content: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningConfigs = z.infer<typeof SecretScanningConfigsSchema>;
export type TSecretScanningConfigsInsert = Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>;
export type TSecretScanningConfigsUpdate = Partial<Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,32 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningDataSourcesSchema = z.object({
id: z.string().uuid(),
externalId: z.string().nullable().optional(),
name: z.string(),
description: z.string().nullable().optional(),
type: z.string(),
config: z.unknown(),
encryptedCredentials: zodBuffer.nullable().optional(),
connectionId: z.string().uuid().nullable().optional(),
isAutoScanEnabled: z.boolean().default(true).nullable().optional(),
projectId: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
isDisconnected: z.boolean().default(false)
});
export type TSecretScanningDataSources = z.infer<typeof SecretScanningDataSourcesSchema>;
export type TSecretScanningDataSourcesInsert = Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>;
export type TSecretScanningDataSourcesUpdate = Partial<
Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,32 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningFindingsSchema = z.object({
id: z.string().uuid(),
dataSourceName: z.string(),
dataSourceType: z.string(),
resourceName: z.string(),
resourceType: z.string(),
rule: z.string(),
severity: z.string(),
status: z.string().default("unresolved"),
remarks: z.string().nullable().optional(),
fingerprint: z.string(),
details: z.unknown(),
projectId: z.string(),
scanId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningFindings = z.infer<typeof SecretScanningFindingsSchema>;
export type TSecretScanningFindingsInsert = Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>;
export type TSecretScanningFindingsUpdate = Partial<
Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningResourcesSchema = z.object({
id: z.string().uuid(),
externalId: z.string(),
name: z.string(),
type: z.string(),
dataSourceId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningResources = z.infer<typeof SecretScanningResourcesSchema>;
export type TSecretScanningResourcesInsert = Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>;
export type TSecretScanningResourcesUpdate = Partial<
Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,21 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningScansSchema = z.object({
id: z.string().uuid(),
status: z.string().default("queued"),
statusMessage: z.string().nullable().optional(),
type: z.string(),
resourceId: z.string().uuid(),
createdAt: z.date().nullable().optional()
});
export type TSecretScanningScans = z.infer<typeof SecretScanningScansSchema>;
export type TSecretScanningScansInsert = Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>;
export type TSecretScanningScansUpdate = Partial<Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>>;

View File

@@ -2,6 +2,10 @@ import {
registerSecretRotationV2Router,
SECRET_ROTATION_REGISTER_ROUTER_MAP
} from "@app/ee/routes/v2/secret-rotation-v2-routers";
import {
registerSecretScanningV2Router,
SECRET_SCANNING_REGISTER_ROUTER_MAP
} from "@app/ee/routes/v2/secret-scanning-v2-routers";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerProjectRoleRouter } from "./project-role-router";
@@ -31,4 +35,17 @@ export const registerV2EERoutes = async (server: FastifyZodProvider) => {
},
{ prefix: "/secret-rotations" }
);
await server.register(
async (secretScanningV2Router) => {
// register generic secret scanning endpoints
await secretScanningV2Router.register(registerSecretScanningV2Router);
// register service-specific secret scanning endpoints (gitlab/github, etc.)
for await (const [type, router] of Object.entries(SECRET_SCANNING_REGISTER_ROUTER_MAP)) {
await secretScanningV2Router.register(router, { prefix: `data-sources/${type}` });
}
},
{ prefix: "/secret-scanning" }
);
};

View File

@@ -0,0 +1,16 @@
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
import {
CreateGitHubDataSourceSchema,
GitHubDataSourceSchema,
UpdateGitHubDataSourceSchema
} from "@app/ee/services/secret-scanning-v2/github";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export const registerGitHubSecretScanningRouter = async (server: FastifyZodProvider) =>
registerSecretScanningEndpoints({
type: SecretScanningDataSource.GitHub,
server,
responseSchema: GitHubDataSourceSchema,
createSchema: CreateGitHubDataSourceSchema,
updateSchema: UpdateGitHubDataSourceSchema
});

View File

@@ -0,0 +1,12 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
export * from "./secret-scanning-v2-router";
export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
SecretScanningDataSource,
(server: FastifyZodProvider) => Promise<void>
> = {
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter
};

View File

@@ -0,0 +1,593 @@
import { z } from "zod";
import { SecretScanningResourcesSchema, SecretScanningScansSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
SecretScanningDataSource,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { SECRET_SCANNING_DATA_SOURCE_NAME_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import {
TSecretScanningDataSource,
TSecretScanningDataSourceInput
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { ApiDocsTags, SecretScanningDataSources } from "@app/lib/api-docs";
import { startsWithVowel } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSecretScanningEndpoints = <
T extends TSecretScanningDataSource,
I extends TSecretScanningDataSourceInput
>({
server,
type,
createSchema,
updateSchema,
responseSchema
}: {
type: SecretScanningDataSource;
server: FastifyZodProvider;
createSchema: z.ZodType<{
name: string;
projectId: string;
connectionId?: string;
config: Partial<I["config"]>;
description?: string | null;
isAutoScanEnabled?: boolean;
}>;
updateSchema: z.ZodType<{
name?: string;
config?: Partial<I["config"]>;
description?: string | null;
isAutoScanEnabled?: boolean;
}>;
responseSchema: z.ZodTypeAny;
}) => {
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
server.route({
method: "GET",
url: `/`,
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `List the ${sourceType} Data Sources for the specified project.`,
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.LIST(type).projectId)
}),
response: {
200: z.object({ dataSources: responseSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId }
} = req;
const dataSources = (await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
{ projectId, type },
req.permission
)) as T[];
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
type,
count: dataSources.length,
dataSourceIds: dataSources.map((source) => source.id)
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.GET_BY_ID(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceById(
{ dataSourceId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
metadata: {
dataSourceId,
type
}
}
});
return { dataSource };
}
});
server.route({
method: "GET",
url: `/data-source-name/:dataSourceName`,
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the specified ${sourceType} Data Source by name and project ID.`,
params: z.object({
sourceName: z
.string()
.trim()
.min(1, "Data Source name required")
.describe(SecretScanningDataSources.GET_BY_NAME(type).sourceName)
}),
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.GET_BY_NAME(type).projectId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { sourceName } = req.params;
const { projectId } = req.query;
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceByName(
{ sourceName, projectId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
metadata: {
dataSourceId: dataSource.id,
type
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Create ${
startsWithVowel(sourceType) ? "an" : "a"
} ${sourceType} Data Source for the specified project.`,
body: createSchema,
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const dataSource = (await server.services.secretScanningV2.createSecretScanningDataSource(
{ ...req.body, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE,
metadata: {
dataSourceId: dataSource.id,
type,
...req.body
}
}
});
return { dataSource };
}
});
server.route({
method: "PATCH",
url: "/:dataSourceId",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Update the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.UPDATE(type).dataSourceId)
}),
body: updateSchema,
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.updateSecretScanningDataSource(
{ ...req.body, dataSourceId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE,
metadata: {
dataSourceId,
type,
...req.body
}
}
});
return { dataSource };
}
});
server.route({
method: "DELETE",
url: `/:dataSourceId`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Delete the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.DELETE(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.deleteSecretScanningDataSource(
{ type, dataSourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE,
metadata: {
type,
dataSourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: `/:dataSourceId/scan`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Trigger a scan for the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
{ type, dataSourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
metadata: {
type,
dataSourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: `/:dataSourceId/resources/:resourceId/scan`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Trigger a scan for the specified ${sourceType} Data Source resource.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId),
resourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).resourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId, resourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
{ type, dataSourceId, resourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
metadata: {
type,
dataSourceId,
resourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/resources",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the resources associated with the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_RESOURCES(type).dataSourceId)
}),
response: {
200: z.object({ resources: SecretScanningResourcesSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { resources, projectId } = await server.services.secretScanningV2.listSecretScanningResourcesByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
metadata: {
dataSourceId,
type,
resourceIds: resources.map((resource) => resource.id),
count: resources.length
}
}
});
return { resources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/scans",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the scans associated with the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_SCANS(type).dataSourceId)
}),
response: {
200: z.object({ scans: SecretScanningScansSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { scans, projectId } = await server.services.secretScanningV2.listSecretScanningScansByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_SCAN_LIST,
metadata: {
dataSourceId,
type,
count: scans.length
}
}
});
return { scans };
}
});
// not exposed, for UI only
server.route({
method: "GET",
url: "/:dataSourceId/resources-dashboard",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
params: z.object({
dataSourceId: z.string().uuid()
}),
response: {
200: z.object({
resources: SecretScanningResourcesSchema.extend({
lastScannedAt: z.date().nullish(),
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
lastScanStatusMessage: z.string().nullish(),
unresolvedFindings: z.number()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { resources, projectId } =
await server.services.secretScanningV2.listSecretScanningResourcesWithDetailsByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
metadata: {
dataSourceId,
type,
resourceIds: resources.map((resource) => resource.id),
count: resources.length
}
}
});
return { resources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/scans-dashboard",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
params: z.object({
dataSourceId: z.string().uuid()
}),
response: {
200: z.object({
scans: SecretScanningScansSchema.extend({
unresolvedFindings: z.number(),
resolvedFindings: z.number(),
resourceName: z.string()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { scans, projectId } =
await server.services.secretScanningV2.listSecretScanningScansWithDetailsByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_SCAN_LIST,
metadata: {
dataSourceId,
type,
count: scans.length
}
}
});
return { scans };
}
});
};

View File

@@ -0,0 +1,366 @@
import { z } from "zod";
import { SecretScanningConfigsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
import {
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
SecretScanningDataSourceSchema,
SecretScanningFindingSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-union-schemas";
import {
ApiDocsTags,
SecretScanningConfigs,
SecretScanningDataSources,
SecretScanningFindings
} from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]);
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/data-sources/options",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List the available Secret Scanning Data Source Options.",
response: {
200: z.object({
dataSourceOptions: SecretScanningDataSourceOptionsSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: () => {
const dataSourceOptions = server.services.secretScanningV2.listSecretScanningDataSourceOptions();
return { dataSourceOptions };
}
});
server.route({
method: "GET",
url: "/data-sources",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List all the Secret Scanning Data Sources for the specified project.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningDataSources.LIST().projectId)
}),
response: {
200: z.object({ dataSources: SecretScanningDataSourceSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
{ projectId },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
count: dataSources.length
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/findings",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List all the Secret Scanning Findings for the specified project.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
}),
response: {
200: z.object({ findings: SecretScanningFindingSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const findings = await server.services.secretScanningV2.listSecretScanningFindingsByProjectId(
projectId,
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_LIST,
metadata: {
findingIds: findings.map((finding) => finding.id),
count: findings.length
}
}
});
return { findings };
}
});
server.route({
method: "PATCH",
url: "/findings/:findingId",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update the specified Secret Scanning Finding.",
params: z.object({
findingId: z.string().trim().min(1, "Finding ID required").describe(SecretScanningFindings.UPDATE.findingId)
}),
body: z.object({
status: z.nativeEnum(SecretScanningFindingStatus).optional().describe(SecretScanningFindings.UPDATE.status),
remarks: z.string().nullish().describe(SecretScanningFindings.UPDATE.remarks)
}),
response: {
200: z.object({ finding: SecretScanningFindingSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
params: { findingId },
body,
permission
} = req;
const { finding, projectId } = await server.services.secretScanningV2.updateSecretScanningFindingById(
{ findingId, ...body },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_UPDATE,
metadata: {
findingId,
...body
}
}
});
return { finding };
}
});
server.route({
method: "GET",
url: "/configs",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Get the Secret Scanning Config for the specified project.",
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningConfigs.GET_BY_PROJECT_ID.projectId)
}),
response: {
200: z.object({
config: z.object({ content: z.string().nullish(), projectId: z.string(), updatedAt: z.date().nullish() })
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const config = await server.services.secretScanningV2.findSecretScanningConfigByProjectId(projectId, permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_CONFIG_GET
}
});
return { config };
}
});
server.route({
method: "PATCH",
url: "/configs",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update the specified Secret Scanning Configuration.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningConfigs.UPDATE.projectId)
}),
body: z.object({
content: z.string().nullable().describe(SecretScanningConfigs.UPDATE.content)
}),
response: {
200: z.object({ config: SecretScanningConfigsSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
body,
permission
} = req;
const config = await server.services.secretScanningV2.upsertSecretScanningConfig(
{ projectId, ...body },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_CONFIG_UPDATE,
metadata: body
}
});
return { config };
}
});
// not exposed, for UI only
server.route({
method: "GET",
url: "/data-sources-dashboard",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required")
}),
response: {
200: z.object({
dataSources: z
.intersection(
SecretScanningDataSourceSchema,
z.object({
lastScannedAt: z.date().nullish(),
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
lastScanStatusMessage: z.string().nullish(),
unresolvedFindings: z.number().nullish()
})
)
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesWithDetailsByProjectId(
{ projectId },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
count: dataSources.length
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/unresolved-findings-count",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
}),
response: {
200: z.object({ unresolvedFindings: z.number() })
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const unresolvedFindings =
await server.services.secretScanningV2.getSecretScanningUnresolvedFindingsCountByProjectId(
projectId,
permission
);
return { unresolvedFindings };
}
});
};

View File

@@ -10,6 +10,18 @@ import {
TSecretRotationV2Raw,
TUpdateSecretRotationV2DTO
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
import {
SecretScanningDataSource,
SecretScanningScanStatus,
SecretScanningScanType
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
TCreateSecretScanningDataSourceDTO,
TDeleteSecretScanningDataSourceDTO,
TTriggerSecretScanningDataSourceDTO,
TUpdateSecretScanningDataSourceDTO,
TUpdateSecretScanningFindingDTO
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
@@ -381,6 +393,20 @@ export enum EventType {
PROJECT_ASSUME_PRIVILEGE_SESSION_START = "project-assume-privileges-session-start",
PROJECT_ASSUME_PRIVILEGE_SESSION_END = "project-assume-privileges-session-end",
SECRET_SCANNING_DATA_SOURCE_LIST = "secret-scanning-data-source-list",
SECRET_SCANNING_DATA_SOURCE_CREATE = "secret-scanning-data-source-create",
SECRET_SCANNING_DATA_SOURCE_UPDATE = "secret-scanning-data-source-update",
SECRET_SCANNING_DATA_SOURCE_DELETE = "secret-scanning-data-source-delete",
SECRET_SCANNING_DATA_SOURCE_GET = "secret-scanning-data-source-get",
SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN = "secret-scanning-data-source-trigger-scan",
SECRET_SCANNING_DATA_SOURCE_SCAN = "secret-scanning-data-source-scan",
SECRET_SCANNING_RESOURCE_LIST = "secret-scanning-resource-list",
SECRET_SCANNING_SCAN_LIST = "secret-scanning-scan-list",
SECRET_SCANNING_FINDING_LIST = "secret-scanning-finding-list",
SECRET_SCANNING_FINDING_UPDATE = "secret-scanning-finding-update",
SECRET_SCANNING_CONFIG_GET = "secret-scanning-config-get",
SECRET_SCANNING_CONFIG_UPDATE = "secret-scanning-config-update",
UPDATE_ORG = "update-org",
CREATE_PROJECT = "create-project",
@@ -2953,6 +2979,101 @@ interface MicrosoftTeamsWorkflowIntegrationUpdateEvent {
};
}
interface SecretScanningDataSourceListEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST;
metadata: {
type?: SecretScanningDataSource;
count: number;
dataSourceIds: string[];
};
}
interface SecretScanningDataSourceGetEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET;
metadata: {
type: SecretScanningDataSource;
dataSourceId: string;
};
}
interface SecretScanningDataSourceCreateEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE;
metadata: Omit<TCreateSecretScanningDataSourceDTO, "projectId"> & { dataSourceId: string };
}
interface SecretScanningDataSourceUpdateEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE;
metadata: TUpdateSecretScanningDataSourceDTO;
}
interface SecretScanningDataSourceDeleteEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE;
metadata: TDeleteSecretScanningDataSourceDTO;
}
interface SecretScanningDataSourceTriggerScanEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN;
metadata: TTriggerSecretScanningDataSourceDTO;
}
interface SecretScanningDataSourceScanEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN;
metadata: {
scanId: string;
resourceId: string;
resourceType: string;
dataSourceId: string;
dataSourceType: string;
scanStatus: SecretScanningScanStatus;
scanType: SecretScanningScanType;
numberOfSecretsDetected?: number;
};
}
interface SecretScanningResourceListEvent {
type: EventType.SECRET_SCANNING_RESOURCE_LIST;
metadata: {
type: SecretScanningDataSource;
dataSourceId: string;
resourceIds: string[];
count: number;
};
}
interface SecretScanningScanListEvent {
type: EventType.SECRET_SCANNING_SCAN_LIST;
metadata: {
type: SecretScanningDataSource;
dataSourceId: string;
count: number;
};
}
interface SecretScanningFindingListEvent {
type: EventType.SECRET_SCANNING_FINDING_LIST;
metadata: {
findingIds: string[];
count: number;
};
}
interface SecretScanningFindingUpdateEvent {
type: EventType.SECRET_SCANNING_FINDING_UPDATE;
metadata: TUpdateSecretScanningFindingDTO;
}
interface SecretScanningConfigUpdateEvent {
type: EventType.SECRET_SCANNING_CONFIG_UPDATE;
metadata: {
content: string | null;
};
}
interface SecretScanningConfigReadEvent {
type: EventType.SECRET_SCANNING_CONFIG_GET;
metadata?: Record<string, never>; // not needed, based off projectId
}
interface OrgUpdateEvent {
type: EventType.UPDATE_ORG;
metadata: {
@@ -3276,6 +3397,19 @@ export type Event =
| MicrosoftTeamsWorkflowIntegrationGetEvent
| MicrosoftTeamsWorkflowIntegrationListEvent
| MicrosoftTeamsWorkflowIntegrationUpdateEvent
| SecretScanningDataSourceListEvent
| SecretScanningDataSourceGetEvent
| SecretScanningDataSourceCreateEvent
| SecretScanningDataSourceUpdateEvent
| SecretScanningDataSourceDeleteEvent
| SecretScanningDataSourceTriggerScanEvent
| SecretScanningDataSourceScanEvent
| SecretScanningResourceListEvent
| SecretScanningScanListEvent
| SecretScanningFindingListEvent
| SecretScanningFindingUpdateEvent
| SecretScanningConfigUpdateEvent
| SecretScanningConfigReadEvent
| OrgUpdateEvent
| ProjectCreateEvent
| ProjectUpdateEvent

View File

@@ -17,6 +17,7 @@ import { SapAseProvider } from "./sap-ase";
import { SapHanaProvider } from "./sap-hana";
import { SqlDatabaseProvider } from "./sql-database";
import { TotpProvider } from "./totp";
import { VerticaProvider } from "./vertica";
type TBuildDynamicSecretProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
@@ -40,5 +41,6 @@ export const buildDynamicSecretProviders = ({
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
[DynamicSecretProviders.Totp]: TotpProvider(),
[DynamicSecretProviders.SapAse]: SapAseProvider(),
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService })
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService }),
[DynamicSecretProviders.Vertica]: VerticaProvider({ gatewayService })
});

View File

@@ -16,7 +16,8 @@ export enum SqlProviders {
MySQL = "mysql2",
Oracle = "oracledb",
MsSQL = "mssql",
SapAse = "sap-ase"
SapAse = "sap-ase",
Vertica = "vertica"
}
export enum ElasticSearchAuthTypes {
@@ -293,6 +294,39 @@ export const DynamicSecretKubernetesSchema = z.object({
audiences: z.array(z.string().trim().min(1))
});
export const DynamicSecretVerticaSchema = z.object({
host: z.string().trim().toLowerCase(),
port: z.number(),
username: z.string().trim(),
password: z.string().trim(),
database: z.string().trim(),
gatewayId: z.string().nullable().optional(),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
passwordRequirements: z
.object({
length: z.number().min(1).max(250),
required: z
.object({
lowercase: z.number().min(0),
uppercase: z.number().min(0),
digits: z.number().min(0),
symbols: z.number().min(0)
})
.refine((data) => {
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
return total <= 250;
}, "Sum of required characters cannot exceed 250"),
allowedSymbols: z.string().optional()
})
.refine((data) => {
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
return total <= data.length;
}, "Sum of required characters cannot exceed the total length")
.optional()
.describe("Password generation requirements")
});
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
z.object({
configType: z.literal(TotpConfigType.URL),
@@ -337,7 +371,8 @@ export enum DynamicSecretProviders {
Snowflake = "snowflake",
Totp = "totp",
SapAse = "sap-ase",
Kubernetes = "kubernetes"
Kubernetes = "kubernetes",
Vertica = "vertica"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
@@ -356,7 +391,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema })
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Vertica), inputs: DynamicSecretVerticaSchema })
]);
export type TDynamicProviderFns = {

View File

@@ -0,0 +1,367 @@
import { randomInt } from "crypto";
import handlebars from "handlebars";
import knex, { Knex } from "knex";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { withGatewayProxy } from "@app/lib/gateway";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretVerticaSchema, PasswordRequirements, TDynamicProviderFns } from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
interface VersionResult {
version: string;
}
interface SessionResult {
session_id?: string;
}
interface DatabaseQueryResult {
rows?: Array<Record<string, unknown>>;
}
// Extended Knex client interface to handle Vertica-specific overrides
interface VerticaKnexClient extends Knex {
client: {
parseVersion?: () => string;
};
}
const DEFAULT_PASSWORD_REQUIREMENTS = {
length: 48,
required: {
lowercase: 1,
uppercase: 1,
digits: 1,
symbols: 0
},
allowedSymbols: "-_.~!*"
};
const generatePassword = (requirements?: PasswordRequirements) => {
const finalReqs = requirements || DEFAULT_PASSWORD_REQUIREMENTS;
try {
const { length, required, allowedSymbols } = finalReqs;
const chars = {
lowercase: "abcdefghijklmnopqrstuvwxyz",
uppercase: "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
digits: "0123456789",
symbols: allowedSymbols || "-_.~!*"
};
const parts: string[] = [];
if (required.lowercase > 0) {
parts.push(
...Array(required.lowercase)
.fill(0)
.map(() => chars.lowercase[randomInt(chars.lowercase.length)])
);
}
if (required.uppercase > 0) {
parts.push(
...Array(required.uppercase)
.fill(0)
.map(() => chars.uppercase[randomInt(chars.uppercase.length)])
);
}
if (required.digits > 0) {
parts.push(
...Array(required.digits)
.fill(0)
.map(() => chars.digits[randomInt(chars.digits.length)])
);
}
if (required.symbols > 0) {
parts.push(
...Array(required.symbols)
.fill(0)
.map(() => chars.symbols[randomInt(chars.symbols.length)])
);
}
const requiredTotal = Object.values(required).reduce<number>((a, b) => a + b, 0);
const remainingLength = Math.max(length - requiredTotal, 0);
const allowedChars = Object.entries(chars)
.filter(([key]) => required[key as keyof typeof required] > 0)
.map(([, value]) => value)
.join("");
parts.push(
...Array(remainingLength)
.fill(0)
.map(() => allowedChars[randomInt(allowedChars.length)])
);
// shuffle the array to mix up the characters
for (let i = parts.length - 1; i > 0; i -= 1) {
const j = randomInt(i + 1);
[parts[i], parts[j]] = [parts[j], parts[i]];
}
return parts.join("");
} catch (error: unknown) {
const message = error instanceof Error ? error.message : "Unknown error";
throw new Error(`Failed to generate password: ${message}`);
}
};
const generateUsername = (usernameTemplate?: string | null) => {
const randomUsername = `inf_${alphaNumericNanoId(25)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
});
};
type TVerticaProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
};
export const VerticaProvider = ({ gatewayService }: TVerticaProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretVerticaSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.gatewayId));
validateHandlebarTemplate("Vertica creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password"].includes(val)
});
if (providerInputs.revocationStatement) {
validateHandlebarTemplate("Vertica revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
}
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretVerticaSchema> & { hostIp: string }) => {
const config = {
client: "pg",
connection: {
host: providerInputs.hostIp,
port: providerInputs.port,
database: providerInputs.database,
user: providerInputs.username,
password: providerInputs.password,
ssl: false
},
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT,
pool: {
min: 0,
max: 1,
acquireTimeoutMillis: 30000,
createTimeoutMillis: 30000,
destroyTimeoutMillis: 5000,
idleTimeoutMillis: 30000,
reapIntervalMillis: 1000,
createRetryIntervalMillis: 100
},
// Disable version checking for Vertica compatibility
version: "9.6.0" // Fake a compatible PostgreSQL version
};
const client = knex(config) as VerticaKnexClient;
// Override the version parsing to prevent errors with Vertica
if (client.client && typeof client.client.parseVersion !== "undefined") {
client.client.parseVersion = () => "9.6.0";
}
return client;
};
const gatewayProxyWrapper = async (
providerInputs: z.infer<typeof DynamicSecretVerticaSchema>,
gatewayCallback: (host: string, port: number) => Promise<void>
) => {
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(providerInputs.gatewayId as string);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
await withGatewayProxy(
async (port) => {
await gatewayCallback("localhost", port);
},
{
targetHost: providerInputs.host,
targetPort: providerInputs.port,
relayHost,
relayPort: Number(relayPort),
identityId: relayDetails.identityId,
orgId: relayDetails.orgId,
tlsOptions: {
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
}
);
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
let isConnected = false;
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
let client: VerticaKnexClient | null = null;
try {
client = await $getClient({ ...providerInputs, hostIp: host, port });
const clientResult: DatabaseQueryResult = await client.raw("SELECT version() AS version");
const resultFromSelectedDatabase = clientResult.rows?.[0] as VersionResult | undefined;
if (!resultFromSelectedDatabase?.version) {
throw new BadRequestError({
message: "Failed to validate Vertica connection, version query failed"
});
}
isConnected = true;
} finally {
if (client) await client.destroy();
}
};
if (providerInputs.gatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
return isConnected;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(usernameTemplate);
const password = generatePassword(providerInputs.passwordRequirements);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
let client: VerticaKnexClient | null = null;
try {
client = await $getClient({ ...providerInputs, hostIp: host, port });
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password
});
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
// Execute queries sequentially to maintain transaction integrity
for (const query of queries) {
const trimmedQuery = query.trim();
if (trimmedQuery) {
// eslint-disable-next-line no-await-in-loop
await client.raw(trimmedQuery);
}
}
} finally {
if (client) await client.destroy();
}
};
if (providerInputs.gatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
let client: VerticaKnexClient | null = null;
try {
client = await $getClient({ ...providerInputs, hostIp: host, port });
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
username
});
const queries = revokeStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
// Check for active sessions and close them
try {
const sessionResult: DatabaseQueryResult = await client.raw(
"SELECT session_id FROM sessions WHERE user_name = ?",
[username]
);
const activeSessions = (sessionResult.rows || []) as SessionResult[];
// Close all sessions in parallel since they're independent operations
if (activeSessions.length > 0) {
const sessionClosePromises = activeSessions.map(async (session) => {
try {
await client!.raw("SELECT close_session(?)", [session.session_id]);
} catch (error) {
// Continue if session is already closed
logger.error(error, `Failed to close session ${session.session_id}`);
}
});
await Promise.allSettled(sessionClosePromises);
}
} catch (error) {
// Continue if we can't query sessions (permissions, etc.)
logger.error(error, "Could not query/close active sessions");
}
// Execute revocation queries sequentially to maintain transaction integrity
for (const query of queries) {
const trimmedQuery = query.trim();
if (trimmedQuery) {
// eslint-disable-next-line no-await-in-loop
await client.raw(trimmedQuery);
}
}
} finally {
if (client) await client.destroy();
}
};
if (providerInputs.gatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
return { entityId: username };
};
const renew = async (_: unknown, username: string) => {
// No need for renewal
return { entityId: username };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@@ -56,6 +56,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
kmip: false,
gateway: false,
sshHostGroups: false,
secretScanning: false,
enterpriseSecretSyncs: false,
enterpriseAppConnections: false
});

View File

@@ -72,6 +72,7 @@ export type TFeatureSet = {
kmip: false;
gateway: false;
sshHostGroups: false;
secretScanning: false;
enterpriseSecretSyncs: false;
enterpriseAppConnections: false;
};

View File

@@ -13,6 +13,9 @@ import {
ProjectPermissionPkiTemplateActions,
ProjectPermissionSecretActions,
ProjectPermissionSecretRotationActions,
ProjectPermissionSecretScanningConfigActions,
ProjectPermissionSecretScanningDataSourceActions,
ProjectPermissionSecretScanningFindingActions,
ProjectPermissionSecretSyncActions,
ProjectPermissionSet,
ProjectPermissionSshHostActions,
@@ -220,6 +223,29 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.SecretRotation
);
can(
[
ProjectPermissionSecretScanningDataSourceActions.Create,
ProjectPermissionSecretScanningDataSourceActions.Edit,
ProjectPermissionSecretScanningDataSourceActions.Delete,
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSecretScanningDataSourceActions.ReadResources
],
ProjectPermissionSub.SecretScanningDataSources
);
can(
[ProjectPermissionSecretScanningFindingActions.Read, ProjectPermissionSecretScanningFindingActions.Update],
ProjectPermissionSub.SecretScanningFindings
);
can(
[ProjectPermissionSecretScanningConfigActions.Read, ProjectPermissionSecretScanningConfigActions.Update],
ProjectPermissionSub.SecretScanningConfigs
);
return rules;
};
@@ -401,6 +427,23 @@ const buildMemberPermissionRules = () => {
ProjectPermissionSub.SecretSyncs
);
can(
[
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSecretScanningDataSourceActions.ReadResources
],
ProjectPermissionSub.SecretScanningDataSources
);
can(
[ProjectPermissionSecretScanningFindingActions.Read, ProjectPermissionSecretScanningFindingActions.Update],
ProjectPermissionSub.SecretScanningFindings
);
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
return rules;
};
@@ -437,6 +480,19 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificateTemplates);
can(ProjectPermissionSecretSyncActions.Read, ProjectPermissionSub.SecretSyncs);
can(
[
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSecretScanningDataSourceActions.ReadResources
],
ProjectPermissionSub.SecretScanningDataSources
);
can([ProjectPermissionSecretScanningFindingActions.Read], ProjectPermissionSub.SecretScanningFindings);
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
return rules;
};

View File

@@ -132,6 +132,26 @@ export enum ProjectPermissionKmipActions {
GenerateClientCertificates = "generate-client-certificates"
}
export enum ProjectPermissionSecretScanningDataSourceActions {
Read = "read-data-sources",
Create = "create-data-sources",
Edit = "edit-data-sources",
Delete = "delete-data-sources",
TriggerScans = "trigger-data-source-scans",
ReadScans = "read-data-source-scans",
ReadResources = "read-data-source-resources"
}
export enum ProjectPermissionSecretScanningFindingActions {
Read = "read-findings",
Update = "update-findings"
}
export enum ProjectPermissionSecretScanningConfigActions {
Read = "read-configs",
Update = "update-configs"
}
export enum ProjectPermissionSub {
Role = "role",
Member = "member",
@@ -167,7 +187,10 @@ export enum ProjectPermissionSub {
Kms = "kms",
Cmek = "cmek",
SecretSyncs = "secret-syncs",
Kmip = "kmip"
Kmip = "kmip",
SecretScanningDataSources = "secret-scanning-data-sources",
SecretScanningFindings = "secret-scanning-findings",
SecretScanningConfigs = "secret-scanning-configs"
}
export type SecretSubjectFields = {
@@ -301,7 +324,10 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Project]
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms];
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms]
| [ProjectPermissionSecretScanningDataSourceActions, ProjectPermissionSub.SecretScanningDataSources]
| [ProjectPermissionSecretScanningFindingActions, ProjectPermissionSub.SecretScanningFindings]
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs];
const SECRET_PATH_MISSING_SLASH_ERR_MSG = "Invalid Secret Path; it must start with a '/'";
const SECRET_PATH_PERMISSION_OPERATOR_SCHEMA = z.union([
@@ -631,6 +657,26 @@ const GeneralPermissionSchema = [
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionKmipActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z
.literal(ProjectPermissionSub.SecretScanningDataSources)
.describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningDataSourceActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretScanningFindings).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningFindingActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretScanningConfigs).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningConfigActions).describe(
"Describe what action an entity can take."
)
})
];

View File

@@ -0,0 +1,9 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
name: "GitHub",
type: SecretScanningDataSource.GitHub,
connection: AppConnection.GitHubRadar
};

View File

@@ -0,0 +1,230 @@
import { join } from "path";
import { ProbotOctokit } from "probot";
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import {
SecretScanningDataSource,
SecretScanningFindingSeverity,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
cloneRepository,
convertPatchLineToFileLineNumber,
replaceNonChangesWithNewlines
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import {
TSecretScanningFactoryGetDiffScanFindingsPayload,
TSecretScanningFactoryGetDiffScanResourcePayload,
TSecretScanningFactoryGetFullScanPath,
TSecretScanningFactoryInitialize,
TSecretScanningFactoryListRawResources,
TSecretScanningFactoryPostInitialization
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { GitHubRepositoryRegex } from "@app/lib/regex";
import { listGitHubRadarRepositories, TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
import { TGitHubDataSourceWithConnection, TQueueGitHubResourceDiffScan } from "./github-secret-scanning-types";
export const GitHubSecretScanningFactory = () => {
const initialize: TSecretScanningFactoryInitialize<TGitHubRadarConnection> = async (
{ connection, secretScanningV2DAL },
callback
) => {
const externalId = connection.credentials.installationId;
const existingDataSource = await secretScanningV2DAL.dataSources.findOne({
externalId,
type: SecretScanningDataSource.GitHub
});
if (existingDataSource)
throw new BadRequestError({
message: `A Data Source already exists for this GitHub Radar Connection in the Project with ID "${existingDataSource.projectId}"`
});
return callback({
externalId
});
};
const postInitialization: TSecretScanningFactoryPostInitialization<TGitHubRadarConnection> = async () => {
// no post-initialization required
};
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
dataSource
) => {
const {
connection,
config: { includeRepos }
} = dataSource;
const repos = await listGitHubRadarRepositories(connection);
const filteredRepos: typeof repos = [];
if (includeRepos.includes("*")) {
filteredRepos.push(...repos);
} else {
filteredRepos.push(...repos.filter((repo) => includeRepos.includes(repo.full_name)));
}
return filteredRepos.map(({ id, full_name }) => ({
name: full_name,
externalId: id.toString(),
type: SecretScanningResource.Repository
}));
};
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TGitHubDataSourceWithConnection> = async ({
dataSource,
resourceName,
tempFolder
}) => {
const appCfg = getConfig();
const {
connection: {
credentials: { installationId }
}
} = dataSource;
const octokit = new ProbotOctokit({
auth: {
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
installationId
}
});
const {
data: { token }
} = await octokit.apps.createInstallationAccessToken({
installation_id: Number(installationId)
});
const repoPath = join(tempFolder, "repo.git");
if (!GitHubRepositoryRegex.test(resourceName)) {
throw new Error("Invalid GitHub repository name");
}
await cloneRepository({
cloneUrl: `https://x-access-token:${token}@github.com/${resourceName}.git`,
repoPath
});
return repoPath;
};
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
TQueueGitHubResourceDiffScan["payload"]
> = ({ repository }) => {
return {
name: repository.full_name,
externalId: repository.id.toString(),
type: SecretScanningResource.Repository
};
};
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
TGitHubDataSourceWithConnection,
TQueueGitHubResourceDiffScan["payload"]
> = async ({ dataSource, payload, resourceName, configPath }) => {
const appCfg = getConfig();
const {
connection: {
credentials: { installationId }
}
} = dataSource;
const octokit = new ProbotOctokit({
auth: {
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
installationId
}
});
const { commits, repository } = payload;
const [owner, repo] = repository.full_name.split("/");
const allFindings: SecretMatch[] = [];
for (const commit of commits) {
// eslint-disable-next-line no-await-in-loop
const commitData = await octokit.repos.getCommit({
owner,
repo,
ref: commit.id
});
// eslint-disable-next-line no-continue
if (!commitData.data.files) continue;
for (const file of commitData.data.files) {
if ((file.status === "added" || file.status === "modified") && file.patch) {
// eslint-disable-next-line
const findings = await scanContentAndGetFindings(
replaceNonChangesWithNewlines(`\n${file.patch}`),
configPath
);
const adjustedFindings = findings.map((finding) => {
const startLine = convertPatchLineToFileLineNumber(file.patch!, finding.StartLine);
const endLine =
finding.StartLine === finding.EndLine
? startLine
: convertPatchLineToFileLineNumber(file.patch!, finding.EndLine);
const startColumn = finding.StartColumn - 1; // subtract 1 for +
const endColumn = finding.EndColumn - 1; // subtract 1 for +
return {
...finding,
StartLine: startLine,
EndLine: endLine,
StartColumn: startColumn,
EndColumn: endColumn,
File: file.filename,
Commit: commit.id,
Author: commit.author.name,
Email: commit.author.email ?? "",
Message: commit.message,
Fingerprint: `${commit.id}:${file.filename}:${finding.RuleID}:${startLine}:${startColumn}`,
Date: commit.timestamp,
Link: `https://github.com/${resourceName}/blob/${commit.id}/${file.filename}#L${startLine}`
};
});
allFindings.push(...adjustedFindings);
}
}
}
return allFindings.map(
({
// discard match and secret as we don't want to store
Match,
Secret,
...finding
}) => ({
details: titleCaseToCamelCase(finding),
fingerprint: finding.Fingerprint,
severity: SecretScanningFindingSeverity.High,
rule: finding.RuleID
})
);
};
return {
initialize,
postInitialization,
listRawResources,
getFullScanPath,
getDiffScanResourcePayload,
getDiffScanFindingsPayload
};
};

View File

@@ -0,0 +1,85 @@
import { z } from "zod";
import {
SecretScanningDataSource,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
BaseCreateSecretScanningDataSourceSchema,
BaseSecretScanningDataSourceSchema,
BaseSecretScanningFindingSchema,
BaseUpdateSecretScanningDataSourceSchema,
GitRepositoryScanFindingDetailsSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
import { SecretScanningDataSources } from "@app/lib/api-docs";
import { GitHubRepositoryRegex } from "@app/lib/regex";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const GitHubDataSourceConfigSchema = z.object({
includeRepos: z
.array(
z
.string()
.min(1)
.max(256)
.refine((value) => value === "*" || GitHubRepositoryRegex.test(value), "Invalid repository name format")
)
.nonempty("One or more repositories required")
.max(100, "Cannot configure more than 100 repositories")
.default(["*"])
.describe(SecretScanningDataSources.CONFIG.GITHUB.includeRepos)
});
export const GitHubDataSourceSchema = BaseSecretScanningDataSourceSchema({
type: SecretScanningDataSource.GitHub,
isConnectionRequired: true
})
.extend({
config: GitHubDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "GitHub"
})
);
export const CreateGitHubDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
type: SecretScanningDataSource.GitHub,
isConnectionRequired: true
})
.extend({
config: GitHubDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "GitHub"
})
);
export const UpdateGitHubDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(SecretScanningDataSource.GitHub)
.extend({
config: GitHubDataSourceConfigSchema.optional()
})
.describe(
JSON.stringify({
title: "GitHub"
})
);
export const GitHubDataSourceListItemSchema = z
.object({
name: z.literal("GitHub"),
connection: z.literal(AppConnection.GitHubRadar),
type: z.literal(SecretScanningDataSource.GitHub)
})
.describe(
JSON.stringify({
title: "GitHub"
})
);
export const GitHubFindingSchema = BaseSecretScanningFindingSchema.extend({
resourceType: z.literal(SecretScanningResource.Repository),
dataSourceType: z.literal(SecretScanningDataSource.GitHub),
details: GitRepositoryScanFindingDetailsSchema
});

View File

@@ -0,0 +1,87 @@
import { PushEvent } from "@octokit/webhooks-types";
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
import { logger } from "@app/lib/logger";
import { TGitHubDataSource } from "./github-secret-scanning-types";
export const githubSecretScanningService = (
secretScanningV2DAL: TSecretScanningV2DALFactory,
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">
) => {
const handleInstallationDeletedEvent = async (installationId: number) => {
const dataSource = await secretScanningV2DAL.dataSources.findOne({
externalId: String(installationId),
type: SecretScanningDataSource.GitHub
});
if (!dataSource) {
logger.error(
`secretScanningV2RemoveEvent: GitHub - Could not find data source [installationId=${installationId}]`
);
return;
}
logger.info(
`secretScanningV2RemoveEvent: GitHub - installation deleted [installationId=${installationId}] [dataSourceId=${dataSource.id}]`
);
await secretScanningV2DAL.dataSources.updateById(dataSource.id, {
isDisconnected: true
});
};
const handlePushEvent = async (payload: PushEvent) => {
const { commits, repository, installation } = payload;
if (!commits || !repository || !installation) {
logger.warn(
`secretScanningV2PushEvent: GitHub - Insufficient data [commits=${commits?.length ?? 0}] [repository=${repository.name}] [installationId=${installation?.id}]`
);
return;
}
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
externalId: String(installation.id),
type: SecretScanningDataSource.GitHub
})) as TGitHubDataSource | undefined;
if (!dataSource) {
logger.error(
`secretScanningV2PushEvent: GitHub - Could not find data source [installationId=${installation.id}]`
);
return;
}
const {
isAutoScanEnabled,
config: { includeRepos }
} = dataSource;
if (!isAutoScanEnabled) {
logger.info(
`secretScanningV2PushEvent: GitHub - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [installationId=${installation.id}]`
);
return;
}
if (includeRepos.includes("*") || includeRepos.includes(repository.full_name)) {
await secretScanningV2Queue.queueResourceDiffScan({
dataSourceType: SecretScanningDataSource.GitHub,
payload,
dataSourceId: dataSource.id
});
} else {
logger.info(
`secretScanningV2PushEvent: GitHub - ignoring due to repository not being present in config [installationId=${installation.id}] [dataSourceId=${dataSource.id}]`
);
}
};
return {
handlePushEvent,
handleInstallationDeletedEvent
};
};

View File

@@ -0,0 +1,32 @@
import { PushEvent } from "@octokit/webhooks-types";
import { z } from "zod";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
import {
CreateGitHubDataSourceSchema,
GitHubDataSourceListItemSchema,
GitHubDataSourceSchema,
GitHubFindingSchema
} from "./github-secret-scanning-schemas";
export type TGitHubDataSource = z.infer<typeof GitHubDataSourceSchema>;
export type TGitHubDataSourceInput = z.infer<typeof CreateGitHubDataSourceSchema>;
export type TGitHubDataSourceListItem = z.infer<typeof GitHubDataSourceListItemSchema>;
export type TGitHubFinding = z.infer<typeof GitHubFindingSchema>;
export type TGitHubDataSourceWithConnection = TGitHubDataSource & {
connection: TGitHubRadarConnection;
};
export type TQueueGitHubResourceDiffScan = {
dataSourceType: SecretScanningDataSource.GitHub;
payload: PushEvent;
dataSourceId: string;
resourceId: string;
scanId: string;
};

View File

@@ -0,0 +1,3 @@
export * from "./github-secret-scanning-constants";
export * from "./github-secret-scanning-schemas";
export * from "./github-secret-scanning-types";

View File

@@ -0,0 +1,460 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import {
SecretScanningResourcesSchema,
SecretScanningScansSchema,
TableName,
TSecretScanningDataSources
} from "@app/db/schemas";
import { SecretScanningFindingStatus } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { DatabaseError } from "@app/lib/errors";
import {
buildFindFilter,
ormify,
prependTableNameToFindFilter,
selectAllTableCols,
sqlNestRelationships,
TFindOpt
} from "@app/lib/knex";
export type TSecretScanningV2DALFactory = ReturnType<typeof secretScanningV2DALFactory>;
type TSecretScanningDataSourceFindFilter = Parameters<typeof buildFindFilter<TSecretScanningDataSources>>[0];
type TSecretScanningDataSourceFindOptions = TFindOpt<TSecretScanningDataSources, true, "name">;
const baseSecretScanningDataSourceQuery = ({
filter = {},
db,
tx
}: {
db: TDbClient;
filter?: TSecretScanningDataSourceFindFilter;
options?: TSecretScanningDataSourceFindOptions;
tx?: Knex;
}) => {
const query = (tx || db.replicaNode())(TableName.SecretScanningDataSource)
.join(
TableName.AppConnection,
`${TableName.SecretScanningDataSource}.connectionId`,
`${TableName.AppConnection}.id`
)
.select(selectAllTableCols(TableName.SecretScanningDataSource))
.select(
// entire connection
db.ref("name").withSchema(TableName.AppConnection).as("connectionName"),
db.ref("method").withSchema(TableName.AppConnection).as("connectionMethod"),
db.ref("app").withSchema(TableName.AppConnection).as("connectionApp"),
db.ref("orgId").withSchema(TableName.AppConnection).as("connectionOrgId"),
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
db
.ref("isPlatformManagedCredentials")
.withSchema(TableName.AppConnection)
.as("connectionIsPlatformManagedCredentials")
);
if (filter) {
/* eslint-disable @typescript-eslint/no-misused-promises */
void query.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretScanningDataSource, filter)));
}
return query;
};
const expandSecretScanningDataSource = <
T extends Awaited<ReturnType<typeof baseSecretScanningDataSourceQuery>>[number]
>(
dataSource: T
) => {
const {
connectionApp,
connectionName,
connectionId,
connectionOrgId,
connectionEncryptedCredentials,
connectionMethod,
connectionDescription,
connectionCreatedAt,
connectionUpdatedAt,
connectionVersion,
connectionIsPlatformManagedCredentials,
...el
} = dataSource;
return {
...el,
connectionId,
connection: connectionId
? {
app: connectionApp,
id: connectionId,
name: connectionName,
orgId: connectionOrgId,
encryptedCredentials: connectionEncryptedCredentials,
method: connectionMethod,
description: connectionDescription,
createdAt: connectionCreatedAt,
updatedAt: connectionUpdatedAt,
version: connectionVersion,
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
}
: undefined
};
};
export const secretScanningV2DALFactory = (db: TDbClient) => {
const dataSourceOrm = ormify(db, TableName.SecretScanningDataSource);
const resourceOrm = ormify(db, TableName.SecretScanningResource);
const scanOrm = ormify(db, TableName.SecretScanningScan);
const findingOrm = ormify(db, TableName.SecretScanningFinding);
const configOrm = ormify(db, TableName.SecretScanningConfig);
const findDataSource = async (filter: Parameters<(typeof dataSourceOrm)["find"]>[0], tx?: Knex) => {
try {
const dataSources = await baseSecretScanningDataSourceQuery({ filter, db, tx });
if (!dataSources.length) return [];
return dataSources.map(expandSecretScanningDataSource);
} catch (error) {
throw new DatabaseError({ error, name: "Find - Secret Scanning Data Source" });
}
};
const findDataSourceById = async (id: string, tx?: Knex) => {
try {
const dataSource = await baseSecretScanningDataSourceQuery({ filter: { id }, db, tx }).first();
if (dataSource) return expandSecretScanningDataSource(dataSource);
} catch (error) {
throw new DatabaseError({ error, name: "Find By ID - Secret Scanning Data Source" });
}
};
const createDataSource = async (data: Parameters<(typeof dataSourceOrm)["create"]>[0], tx?: Knex) => {
const source = await dataSourceOrm.create(data, tx);
const dataSource = (await baseSecretScanningDataSourceQuery({
filter: { id: source.id },
db,
tx
}).first())!;
return expandSecretScanningDataSource(dataSource);
};
const updateDataSourceById = async (
dataSourceId: string,
data: Parameters<(typeof dataSourceOrm)["updateById"]>[1],
tx?: Knex
) => {
const source = await dataSourceOrm.updateById(dataSourceId, data, tx);
const dataSource = (await baseSecretScanningDataSourceQuery({
filter: { id: source.id },
db,
tx
}).first())!;
return expandSecretScanningDataSource(dataSource);
};
const deleteDataSourceById = async (dataSourceId: string, tx?: Knex) => {
const dataSource = (await baseSecretScanningDataSourceQuery({
filter: { id: dataSourceId },
db,
tx
}).first())!;
await dataSourceOrm.deleteById(dataSourceId, tx);
return expandSecretScanningDataSource(dataSource);
};
const findOneDataSource = async (filter: Parameters<(typeof dataSourceOrm)["findOne"]>[0], tx?: Knex) => {
try {
const dataSource = await baseSecretScanningDataSourceQuery({ filter, db, tx }).first();
if (dataSource) {
return expandSecretScanningDataSource(dataSource);
}
} catch (error) {
throw new DatabaseError({ error, name: "Find One - Secret Scanning Data Source" });
}
};
const findDataSourceWithDetails = async (filter: Parameters<(typeof dataSourceOrm)["find"]>[0], tx?: Knex) => {
try {
// TODO (scott): this query will probably need to be optimized
const dataSources = await baseSecretScanningDataSourceQuery({ filter, db, tx })
.leftJoin(
TableName.SecretScanningResource,
`${TableName.SecretScanningResource}.dataSourceId`,
`${TableName.SecretScanningDataSource}.id`
)
.leftJoin(
TableName.SecretScanningScan,
`${TableName.SecretScanningScan}.resourceId`,
`${TableName.SecretScanningResource}.id`
)
.leftJoin(
TableName.SecretScanningFinding,
`${TableName.SecretScanningFinding}.scanId`,
`${TableName.SecretScanningScan}.id`
)
.where((qb) => {
void qb
.where(`${TableName.SecretScanningFinding}.status`, SecretScanningFindingStatus.Unresolved)
.orWhereNull(`${TableName.SecretScanningFinding}.status`);
})
.select(
db.ref("id").withSchema(TableName.SecretScanningScan).as("scanId"),
db.ref("status").withSchema(TableName.SecretScanningScan).as("scanStatus"),
db.ref("statusMessage").withSchema(TableName.SecretScanningScan).as("scanStatusMessage"),
db.ref("createdAt").withSchema(TableName.SecretScanningScan).as("scanCreatedAt"),
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId")
);
if (!dataSources.length) return [];
const results = sqlNestRelationships({
data: dataSources,
key: "id",
parentMapper: (dataSource) => expandSecretScanningDataSource(dataSource),
childrenMapper: [
{
key: "scanId",
label: "scans" as const,
mapper: ({ scanId, scanCreatedAt, scanStatus, scanStatusMessage }) => ({
id: scanId,
createdAt: scanCreatedAt,
status: scanStatus,
statusMessage: scanStatusMessage
})
},
{
key: "findingId",
label: "findings" as const,
mapper: ({ findingId }) => ({
id: findingId
})
}
]
});
return results.map(({ scans, findings, ...dataSource }) => {
const lastScan =
scans && scans.length
? scans.reduce((latest, current) => {
return new Date(current.createdAt) > new Date(latest.createdAt) ? current : latest;
})
: null;
return {
...dataSource,
lastScanStatus: lastScan?.status ?? null,
lastScanStatusMessage: lastScan?.statusMessage ?? null,
lastScannedAt: lastScan?.createdAt ?? null,
unresolvedFindings: scans.length ? findings.length : null
};
});
} catch (error) {
throw new DatabaseError({ error, name: "Find with Details - Secret Scanning Data Source" });
}
};
const findResourcesWithDetails = async (filter: Parameters<(typeof resourceOrm)["find"]>[0], tx?: Knex) => {
try {
// TODO (scott): this query will probably need to be optimized
const resources = await (tx || db.replicaNode())(TableName.SecretScanningResource)
.where((qb) => {
if (filter)
void qb.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretScanningResource, filter)));
})
.leftJoin(
TableName.SecretScanningScan,
`${TableName.SecretScanningScan}.resourceId`,
`${TableName.SecretScanningResource}.id`
)
.leftJoin(
TableName.SecretScanningFinding,
`${TableName.SecretScanningFinding}.scanId`,
`${TableName.SecretScanningScan}.id`
)
.where((qb) => {
void qb
.where(`${TableName.SecretScanningFinding}.status`, SecretScanningFindingStatus.Unresolved)
.orWhereNull(`${TableName.SecretScanningFinding}.status`);
})
.select(selectAllTableCols(TableName.SecretScanningResource))
.select(
db.ref("id").withSchema(TableName.SecretScanningScan).as("scanId"),
db.ref("status").withSchema(TableName.SecretScanningScan).as("scanStatus"),
db.ref("type").withSchema(TableName.SecretScanningScan).as("scanType"),
db.ref("statusMessage").withSchema(TableName.SecretScanningScan).as("scanStatusMessage"),
db.ref("createdAt").withSchema(TableName.SecretScanningScan).as("scanCreatedAt"),
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId")
);
if (!resources.length) return [];
const results = sqlNestRelationships({
data: resources,
key: "id",
parentMapper: (resource) => SecretScanningResourcesSchema.parse(resource),
childrenMapper: [
{
key: "scanId",
label: "scans" as const,
mapper: ({ scanId, scanCreatedAt, scanStatus, scanStatusMessage, scanType }) => ({
id: scanId,
type: scanType,
createdAt: scanCreatedAt,
status: scanStatus,
statusMessage: scanStatusMessage
})
},
{
key: "findingId",
label: "findings" as const,
mapper: ({ findingId }) => ({
id: findingId
})
}
]
});
return results.map(({ scans, findings, ...resource }) => {
const lastScan =
scans && scans.length
? scans.reduce((latest, current) => {
return new Date(current.createdAt) > new Date(latest.createdAt) ? current : latest;
})
: null;
return {
...resource,
lastScanStatus: lastScan?.status ?? null,
lastScanStatusMessage: lastScan?.statusMessage ?? null,
lastScannedAt: lastScan?.createdAt ?? null,
unresolvedFindings: findings?.length ?? 0
};
});
} catch (error) {
throw new DatabaseError({ error, name: "Find with Details - Secret Scanning Resource" });
}
};
const findScansWithDetailsByDataSourceId = async (dataSourceId: string, tx?: Knex) => {
try {
// TODO (scott): this query will probably need to be optimized
const scans = await (tx || db.replicaNode())(TableName.SecretScanningScan)
.leftJoin(
TableName.SecretScanningResource,
`${TableName.SecretScanningResource}.id`,
`${TableName.SecretScanningScan}.resourceId`
)
.where(`${TableName.SecretScanningResource}.dataSourceId`, dataSourceId)
.leftJoin(
TableName.SecretScanningFinding,
`${TableName.SecretScanningFinding}.scanId`,
`${TableName.SecretScanningScan}.id`
)
.select(selectAllTableCols(TableName.SecretScanningScan))
.select(
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId"),
db.ref("name").withSchema(TableName.SecretScanningResource).as("resourceName")
);
if (!scans.length) return [];
const results = sqlNestRelationships({
data: scans,
key: "id",
parentMapper: (scan) => SecretScanningScansSchema.parse(scan),
childrenMapper: [
{
key: "findingId",
label: "findings" as const,
mapper: ({ findingId, findingStatus }) => ({
id: findingId,
status: findingStatus
})
},
{
key: "resourceId",
label: "resources" as const,
mapper: ({ resourceName }) => ({
name: resourceName
})
}
]
});
return results.map(({ findings, resources, ...scan }) => {
return {
...scan,
unresolvedFindings:
findings?.filter((finding) => finding.status === SecretScanningFindingStatus.Unresolved).length ?? 0,
resolvedFindings:
findings?.filter((finding) => finding.status !== SecretScanningFindingStatus.Unresolved).length ?? 0,
resourceName: resources[0].name
};
});
} catch (error) {
throw new DatabaseError({ error, name: "Find with Details By Data Source ID - Secret Scanning Scan" });
}
};
const findScansByDataSourceId = async (dataSourceId: string, tx?: Knex) => {
try {
const scans = await (tx || db.replicaNode())(TableName.SecretScanningScan)
.leftJoin(
TableName.SecretScanningResource,
`${TableName.SecretScanningResource}.id`,
`${TableName.SecretScanningScan}.resourceId`
)
.where(`${TableName.SecretScanningResource}.dataSourceId`, dataSourceId)
.select(selectAllTableCols(TableName.SecretScanningScan));
return scans;
} catch (error) {
throw new DatabaseError({ error, name: "Find By Data Source ID - Secret Scanning Scan" });
}
};
return {
dataSources: {
...dataSourceOrm,
find: findDataSource,
findById: findDataSourceById,
findOne: findOneDataSource,
create: createDataSource,
updateById: updateDataSourceById,
deleteById: deleteDataSourceById,
findWithDetails: findDataSourceWithDetails
},
resources: {
...resourceOrm,
findWithDetails: findResourcesWithDetails
},
scans: {
...scanOrm,
findWithDetailsByDataSourceId: findScansWithDetailsByDataSourceId,
findByDataSourceId: findScansByDataSourceId
},
findings: findingOrm,
configs: configOrm
};
};

View File

@@ -0,0 +1,33 @@
export enum SecretScanningDataSource {
GitHub = "github"
}
export enum SecretScanningScanStatus {
Completed = "completed",
Failed = "failed",
Queued = "queued",
Scanning = "scanning"
}
export enum SecretScanningScanType {
FullScan = "full-scan",
DiffScan = "diff-scan"
}
export enum SecretScanningFindingStatus {
Resolved = "resolved",
Unresolved = "unresolved",
FalsePositive = "false-positive",
Ignore = "ignore"
}
export enum SecretScanningResource {
Repository = "repository",
Project = "project"
}
export enum SecretScanningFindingSeverity {
High = "high",
Medium = "medium",
Low = "low"
}

View File

@@ -0,0 +1,19 @@
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
import {
TQueueSecretScanningResourceDiffScan,
TSecretScanningDataSourceCredentials,
TSecretScanningDataSourceWithConnection,
TSecretScanningFactory
} from "./secret-scanning-v2-types";
type TSecretScanningFactoryImplementation = TSecretScanningFactory<
TSecretScanningDataSourceWithConnection,
TSecretScanningDataSourceCredentials,
TQueueSecretScanningResourceDiffScan["payload"]
>;
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation
};

View File

@@ -0,0 +1,140 @@
import { AxiosError } from "axios";
import { exec } from "child_process";
import RE2 from "re2";
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secret-scanning-v2-enums";
import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListItem } from "./secret-scanning-v2-types";
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
};
export const listSecretScanningDataSourceOptions = () => {
return Object.values(SECRET_SCANNING_SOURCE_LIST_OPTIONS).sort((a, b) => a.name.localeCompare(b.name));
};
export const cloneRepository = async ({ cloneUrl, repoPath }: TCloneRepository): Promise<void> => {
const command = `git clone ${cloneUrl} ${repoPath} --bare`;
return new Promise((resolve, reject) => {
exec(command, (error) => {
if (error) {
reject(error);
} else {
resolve();
}
});
});
};
export function scanDirectory(inputPath: string, outputPath: string, configPath?: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `cd ${inputPath} && infisical scan --exit-code=77 -r "${outputPath}" ${configPath ? `-c ${configPath}` : ""}`;
exec(command, (error) => {
if (error && error.code !== 77) {
reject(error);
} else {
resolve();
}
});
});
}
export const scanGitRepositoryAndGetFindings = async (
scanPath: string,
findingsPath: string,
configPath?: string
): TGetFindingsPayload => {
await scanDirectory(scanPath, findingsPath, configPath);
const findingsData = JSON.parse(await readFindingsFile(findingsPath)) as SecretMatch[];
return findingsData.map(
({
// discard match and secret as we don't want to store
Match,
Secret,
...finding
}) => ({
details: titleCaseToCamelCase(finding),
fingerprint: `${finding.Fingerprint}:${finding.StartColumn}`,
severity: SecretScanningFindingSeverity.High,
rule: finding.RuleID
})
);
};
export const replaceNonChangesWithNewlines = (patch: string) => {
return patch
.split("\n")
.map((line) => {
// Keep added lines (remove the + prefix)
if (line.startsWith("+") && !line.startsWith("+++")) {
return line.substring(1);
}
// Replace everything else with newlines to maintain line positioning
return "";
})
.join("\n");
};
const HunkHeaderRegex = new RE2(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/);
export const convertPatchLineToFileLineNumber = (patch: string, patchLineNumber: number) => {
const lines = patch.split("\n");
let currentPatchLine = 0;
let currentNewLine = 0;
for (const line of lines) {
currentPatchLine += 1;
// Hunk header: @@ -a,b +c,d @@
const hunkHeaderMatch = HunkHeaderRegex.match(line);
if (hunkHeaderMatch) {
const startLine = parseInt(hunkHeaderMatch[1], 10);
currentNewLine = startLine;
// eslint-disable-next-line no-continue
continue;
}
if (currentPatchLine === patchLineNumber) {
return currentNewLine;
}
if (line.startsWith("+++")) {
// eslint-disable-next-line no-continue
continue; // skip file metadata lines
}
// Advance only if the line exists in the new file
if (line.startsWith("+") || line.startsWith(" ")) {
currentNewLine += 1;
}
}
return currentNewLine;
};
const MAX_MESSAGE_LENGTH = 1024;
export const parseScanErrorMessage = (err: unknown): string => {
let errorMessage: string;
if (err instanceof AxiosError) {
errorMessage = err?.response?.data
? JSON.stringify(err?.response?.data)
: (err?.message ?? "An unknown error occurred.");
} else {
errorMessage = (err as Error)?.message || "An unknown error occurred.";
}
return errorMessage.length <= MAX_MESSAGE_LENGTH
? errorMessage
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
};

View File

@@ -0,0 +1,14 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
[SecretScanningDataSource.GitHub]: "GitHub"
};
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar
};
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" }
};

View File

@@ -0,0 +1,626 @@
import { join } from "path";
import { ProjectMembershipRole, TSecretScanningFindings } from "@app/db/schemas";
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
createTempFolder,
deleteTempFolder,
writeTextToFile
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import {
parseScanErrorMessage,
scanGitRepositoryAndGetFindings
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
import {
SecretScanningDataSource,
SecretScanningResource,
SecretScanningScanStatus,
SecretScanningScanType
} from "./secret-scanning-v2-enums";
import { SECRET_SCANNING_FACTORY_MAP } from "./secret-scanning-v2-factory";
import {
TFindingsPayload,
TQueueSecretScanningDataSourceFullScan,
TQueueSecretScanningResourceDiffScan,
TQueueSecretScanningSendNotification,
TSecretScanningDataSourceWithConnection
} from "./secret-scanning-v2-types";
type TSecretRotationV2QueueServiceFactoryDep = {
queueService: TQueueServiceFactory;
secretScanningV2DAL: TSecretScanningV2DALFactory;
smtpService: Pick<TSmtpService, "sendMail">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
projectDAL: Pick<TProjectDALFactory, "findById">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem">;
};
export type TSecretScanningV2QueueServiceFactory = Awaited<ReturnType<typeof secretScanningV2QueueServiceFactory>>;
export const secretScanningV2QueueServiceFactory = async ({
queueService,
secretScanningV2DAL,
projectMembershipDAL,
projectDAL,
smtpService,
kmsService,
auditLogService,
keyStore
}: TSecretRotationV2QueueServiceFactoryDep) => {
const queueDataSourceFullScan = async (
dataSource: TSecretScanningDataSourceWithConnection,
resourceExternalId?: string
) => {
try {
const { type } = dataSource;
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
const rawResources = await factory.listRawResources(dataSource);
let filteredRawResources = rawResources;
// TODO: should add individual resource fetch to factory
if (resourceExternalId) {
filteredRawResources = rawResources.filter((resource) => resource.externalId === resourceExternalId);
}
if (!filteredRawResources.length) {
throw new BadRequestError({
message: `${resourceExternalId ? `Resource with "ID" ${resourceExternalId} could not be found.` : "Data source has no resources to scan"}. Ensure your data source config is correct and not filtering out scanning resources.`
});
}
for (const resource of filteredRawResources) {
// eslint-disable-next-line no-await-in-loop
if (await keyStore.getItem(KeyStorePrefixes.SecretScanningLock(dataSource.id, resource.externalId))) {
throw new BadRequestError({ message: `A scan is already in progress for resource "${resource.name}"` });
}
}
await secretScanningV2DAL.resources.transaction(async (tx) => {
const resources = await secretScanningV2DAL.resources.upsert(
filteredRawResources.map((rawResource) => ({
...rawResource,
dataSourceId: dataSource.id
})),
["externalId", "dataSourceId"],
tx
);
const scans = await secretScanningV2DAL.scans.insertMany(
resources.map((resource) => ({
resourceId: resource.id,
type: SecretScanningScanType.FullScan
})),
tx
);
for (const scan of scans) {
// eslint-disable-next-line no-await-in-loop
await queueService.queuePg(QueueJobs.SecretScanningV2FullScan, {
scanId: scan.id,
resourceId: scan.resourceId,
dataSourceId: dataSource.id
});
}
});
} catch (error) {
logger.error(error, `Failed to queue full-scan for data source with ID "${dataSource.id}"`);
if (error instanceof BadRequestError) throw error;
throw new InternalServerError({ message: `Failed to queue scan: ${(error as Error).message}` });
}
};
await queueService.startPg<QueueName.SecretScanningV2>(
QueueJobs.SecretScanningV2FullScan,
async ([job]) => {
const { scanId, resourceId, dataSourceId } = job.data as TQueueSecretScanningDataSourceFullScan;
const { retryCount, retryLimit } = job;
const logDetails = `[scanId=${scanId}] [resourceId=${resourceId}] [dataSourceId=${dataSourceId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
const tempFolder = await createTempFolder();
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource) throw new Error(`Data source with ID "${dataSourceId}" not found`);
const resource = await secretScanningV2DAL.resources.findById(resourceId);
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
let lock: Awaited<ReturnType<typeof keyStore.acquireLock>> | undefined;
try {
try {
lock = await keyStore.acquireLock(
[KeyStorePrefixes.SecretScanningLock(dataSource.id, resource.externalId)],
60 * 1000 * 5
);
} catch (e) {
throw new Error("Failed to acquire scanning lock.");
}
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Scanning
}
);
let connection: TAppConnection | null = null;
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
const findingsPath = join(tempFolder, "findings.json");
const scanPath = await factory.getFullScanPath({
dataSource: {
...dataSource,
connection
} as TSecretScanningDataSourceWithConnection,
resourceName: resource.name,
tempFolder
});
const config = await secretScanningV2DAL.configs.findOne({
projectId: dataSource.projectId
});
let configPath: string | undefined;
if (config && config.content) {
configPath = join(tempFolder, "infisical-scan.toml");
await writeTextToFile(configPath, config.content);
}
let findingsPayload: TFindingsPayload;
switch (resource.type) {
case SecretScanningResource.Repository:
case SecretScanningResource.Project:
findingsPayload = await scanGitRepositoryAndGetFindings(scanPath, findingsPath, configPath);
break;
default:
throw new Error("Unhandled resource type");
}
const allFindings = await secretScanningV2DAL.findings.transaction(async (tx) => {
let findings: TSecretScanningFindings[] = [];
if (findingsPayload.length) {
findings = await secretScanningV2DAL.findings.upsert(
findingsPayload.map((finding) => ({
...finding,
projectId: dataSource.projectId,
dataSourceName: dataSource.name,
dataSourceType: dataSource.type,
resourceName: resource.name,
resourceType: resource.type,
scanId
})),
["projectId", "fingerprint"],
tx,
["resourceName", "dataSourceName"]
);
}
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Completed,
statusMessage: null
}
);
return findings;
});
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
if (newFindings.length) {
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Completed,
resourceName: resource.name,
isDiffScan: false,
dataSource,
numberOfSecrets: newFindings.length,
scanId
});
}
await auditLogService.createAuditLog({
projectId: dataSource.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
metadata: {
dataSourceId: dataSource.id,
dataSourceType: dataSource.type,
resourceId: resource.id,
resourceType: resource.type,
scanId,
scanStatus: SecretScanningScanStatus.Completed,
scanType: SecretScanningScanType.FullScan,
numberOfSecretsDetected: findingsPayload.length
}
}
});
logger.info(`secretScanningV2Queue: Full Scan Complete ${logDetails} findings=[${findingsPayload.length}]`);
} catch (error) {
if (retryCount === retryLimit) {
const errorMessage = parseScanErrorMessage(error);
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Failed,
statusMessage: errorMessage
}
);
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Failed,
resourceName: resource.name,
dataSource,
errorMessage
});
await auditLogService.createAuditLog({
projectId: dataSource.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
metadata: {
dataSourceId: dataSource.id,
dataSourceType: dataSource.type,
resourceId: resource.id,
resourceType: resource.type,
scanId,
scanStatus: SecretScanningScanStatus.Failed,
scanType: SecretScanningScanType.FullScan
}
}
});
}
logger.error(error, `secretScanningV2Queue: Full Scan Failed ${logDetails}`);
throw error;
} finally {
await deleteTempFolder(tempFolder);
await lock?.release();
}
},
{
batchSize: 1,
workerCount: 20,
pollingIntervalSeconds: 1
}
);
const queueResourceDiffScan = async ({
payload,
dataSourceId,
dataSourceType
}: Pick<TQueueSecretScanningResourceDiffScan, "payload" | "dataSourceId" | "dataSourceType">) => {
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]();
const resourcePayload = factory.getDiffScanResourcePayload(payload);
try {
const { resourceId, scanId } = await secretScanningV2DAL.resources.transaction(async (tx) => {
const [resource] = await secretScanningV2DAL.resources.upsert(
[
{
...resourcePayload,
dataSourceId
}
],
["externalId", "dataSourceId"],
tx
);
const scan = await secretScanningV2DAL.scans.create(
{
resourceId: resource.id,
type: SecretScanningScanType.DiffScan
},
tx
);
return {
resourceId: resource.id,
scanId: scan.id
};
});
await queueService.queuePg(QueueJobs.SecretScanningV2DiffScan, {
payload,
dataSourceId,
dataSourceType,
scanId,
resourceId
});
} catch (error) {
logger.error(
error,
`secretScanningV2Queue: Failed to queue diff scan [dataSourceId=${dataSourceId}] [resourceExternalId=${resourcePayload.externalId}]`
);
}
};
await queueService.startPg<QueueName.SecretScanningV2>(
QueueJobs.SecretScanningV2DiffScan,
async ([job]) => {
const { payload, dataSourceId, resourceId, scanId } = job.data as TQueueSecretScanningResourceDiffScan;
const { retryCount, retryLimit } = job;
const logDetails = `[dataSourceId=${dataSourceId}] [scanId=${scanId}] [resourceId=${resourceId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource) throw new Error(`Data source with ID "${dataSourceId}" not found`);
const resource = await secretScanningV2DAL.resources.findById(resourceId);
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
const tempFolder = await createTempFolder();
try {
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Scanning
}
);
let connection: TAppConnection | null = null;
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
const config = await secretScanningV2DAL.configs.findOne({
projectId: dataSource.projectId
});
let configPath: string | undefined;
if (config && config.content) {
configPath = join(tempFolder, "infisical-scan.toml");
await writeTextToFile(configPath, config.content);
}
const findingsPayload = await factory.getDiffScanFindingsPayload({
dataSource: {
...dataSource,
connection
} as TSecretScanningDataSourceWithConnection,
resourceName: resource.name,
payload,
configPath
});
const allFindings = await secretScanningV2DAL.findings.transaction(async (tx) => {
let findings: TSecretScanningFindings[] = [];
if (findingsPayload.length) {
findings = await secretScanningV2DAL.findings.upsert(
findingsPayload.map((finding) => ({
...finding,
projectId: dataSource.projectId,
dataSourceName: dataSource.name,
dataSourceType: dataSource.type,
resourceName: resource.name,
resourceType: resource.type,
scanId
})),
["projectId", "fingerprint"],
tx,
["resourceName", "dataSourceName"]
);
}
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Completed
}
);
return findings;
});
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
if (newFindings.length) {
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Completed,
resourceName: resource.name,
isDiffScan: true,
dataSource,
numberOfSecrets: newFindings.length,
scanId
});
}
await auditLogService.createAuditLog({
projectId: dataSource.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
metadata: {
dataSourceId: dataSource.id,
dataSourceType: dataSource.type,
resourceId,
resourceType: resource.type,
scanId,
scanStatus: SecretScanningScanStatus.Completed,
scanType: SecretScanningScanType.DiffScan,
numberOfSecretsDetected: findingsPayload.length
}
}
});
logger.info(`secretScanningV2Queue: Diff Scan Complete ${logDetails}`);
} catch (error) {
if (retryCount === retryLimit) {
const errorMessage = parseScanErrorMessage(error);
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Failed,
statusMessage: errorMessage
}
);
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Failed,
resourceName: resource.name,
dataSource,
errorMessage
});
await auditLogService.createAuditLog({
projectId: dataSource.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
metadata: {
dataSourceId: dataSource.id,
dataSourceType: dataSource.type,
resourceId: resource.id,
resourceType: resource.type,
scanId,
scanStatus: SecretScanningScanStatus.Failed,
scanType: SecretScanningScanType.DiffScan
}
}
});
}
logger.error(error, `secretScanningV2Queue: Diff Scan Failed ${logDetails}`);
throw error;
} finally {
await deleteTempFolder(tempFolder);
}
},
{
batchSize: 1,
workerCount: 20,
pollingIntervalSeconds: 1
}
);
await queueService.startPg<QueueName.SecretScanningV2>(
QueueJobs.SecretScanningV2SendNotification,
async ([job]) => {
const { dataSource, resourceName, ...payload } = job.data as TQueueSecretScanningSendNotification;
const appCfg = getConfig();
if (!appCfg.isSmtpConfigured) return;
try {
const { projectId } = dataSource;
logger.info(
`secretScanningV2Queue: Sending Status Notification [dataSourceId=${dataSource.id}] [resourceName=${resourceName}] [status=${payload.status}]`
);
const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId);
const project = await projectDAL.findById(projectId);
const projectAdmins = projectMembers.filter((member) =>
member.roles.some((role) => role.role === ProjectMembershipRole.Admin)
);
const timestamp = new Date().toISOString();
await smtpService.sendMail({
recipients: projectAdmins.map((member) => member.user.email!).filter(Boolean),
template:
payload.status === SecretScanningScanStatus.Completed
? SmtpTemplates.SecretScanningV2SecretsDetected
: SmtpTemplates.SecretScanningV2ScanFailed,
subjectLine:
payload.status === SecretScanningScanStatus.Completed
? "Incident Alert: Secret(s) Leaked"
: `Secret Scanning Failed`,
substitutions:
payload.status === SecretScanningScanStatus.Completed
? {
authorName: "Jim",
authorEmail: "jim@infisical.com",
resourceName,
numberOfSecrets: payload.numberOfSecrets,
isDiffScan: payload.isDiffScan,
url: encodeURI(
`${appCfg.SITE_URL}/secret-scanning/${projectId}/findings?search=scanId:${payload.scanId}`
),
timestamp
}
: {
dataSourceName: dataSource.name,
resourceName,
projectName: project.name,
timestamp,
errorMessage: payload.errorMessage,
url: encodeURI(
`${appCfg.SITE_URL}/secret-scanning/${projectId}/data-sources/${dataSource.type}/${dataSource.id}`
)
}
});
} catch (error) {
logger.error(
error,
`secretScanningV2Queue: Failed to Send Status Notification [dataSourceId=${dataSource.id}] [resourceName=${resourceName}] [status=${payload.status}]`
);
throw error;
}
},
{
batchSize: 1,
workerCount: 5,
pollingIntervalSeconds: 1
}
);
return {
queueDataSourceFullScan,
queueResourceDiffScan
};
};

View File

@@ -0,0 +1,99 @@
import { z } from "zod";
import { SecretScanningDataSourcesSchema, SecretScanningFindingsSchema } from "@app/db/schemas";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import { SecretScanningDataSources } from "@app/lib/api-docs";
import { slugSchema } from "@app/server/lib/schemas";
type SecretScanningDataSourceSchemaOpts = {
type: SecretScanningDataSource;
isConnectionRequired: boolean;
};
export const BaseSecretScanningDataSourceSchema = ({
type,
isConnectionRequired
}: SecretScanningDataSourceSchemaOpts) =>
SecretScanningDataSourcesSchema.omit({
// unique to provider
type: true,
connectionId: true,
config: true,
encryptedCredentials: true
}).extend({
type: z.literal(type),
connectionId: isConnectionRequired ? z.string().uuid() : z.null(),
connection: isConnectionRequired
? z.object({
app: z.literal(SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[type]),
name: z.string(),
id: z.string().uuid()
})
: z.null()
});
export const BaseCreateSecretScanningDataSourceSchema = ({
type,
isConnectionRequired
}: SecretScanningDataSourceSchemaOpts) =>
z.object({
name: slugSchema({ field: "name" }).describe(SecretScanningDataSources.CREATE(type).name),
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.CREATE(type).projectId),
description: z
.string()
.trim()
.max(256, "Description cannot exceed 256 characters")
.nullish()
.describe(SecretScanningDataSources.CREATE(type).description),
connectionId: isConnectionRequired
? z.string().uuid().describe(SecretScanningDataSources.CREATE(type).connectionId)
: z.undefined(),
isAutoScanEnabled: z
.boolean()
.optional()
.default(true)
.describe(SecretScanningDataSources.CREATE(type).isAutoScanEnabled)
});
export const BaseUpdateSecretScanningDataSourceSchema = (type: SecretScanningDataSource) =>
z.object({
name: slugSchema({ field: "name" }).describe(SecretScanningDataSources.UPDATE(type).name).optional(),
description: z
.string()
.trim()
.max(256, "Description cannot exceed 256 characters")
.nullish()
.describe(SecretScanningDataSources.UPDATE(type).description),
isAutoScanEnabled: z.boolean().optional().describe(SecretScanningDataSources.UPDATE(type).isAutoScanEnabled)
});
export const GitRepositoryScanFindingDetailsSchema = z.object({
description: z.string(),
startLine: z.number(),
endLine: z.number(),
startColumn: z.number(),
endColumn: z.number(),
file: z.string(),
link: z.string(),
symlinkFile: z.string(),
commit: z.string(),
entropy: z.number(),
author: z.string(),
email: z.string(),
date: z.string(),
message: z.string(),
tags: z.string().array(),
ruleID: z.string(),
fingerprint: z.string()
});
export const BaseSecretScanningFindingSchema = SecretScanningFindingsSchema.omit({
dataSourceType: true,
resourceType: true,
details: true
});

View File

@@ -0,0 +1,875 @@
import { ForbiddenError } from "@casl/ability";
import { join } from "path";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
ProjectPermissionSecretScanningConfigActions,
ProjectPermissionSecretScanningDataSourceActions,
ProjectPermissionSecretScanningFindingActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import {
createTempFolder,
deleteTempFolder,
scanContentAndGetFindings,
writeTextToFile
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { githubSecretScanningService } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-service";
import { SecretScanningFindingStatus } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { SECRET_SCANNING_FACTORY_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-factory";
import { listSecretScanningDataSourceOptions } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import {
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP,
SECRET_SCANNING_DATA_SOURCE_NAME_MAP
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import {
TCreateSecretScanningDataSourceDTO,
TDeleteSecretScanningDataSourceDTO,
TFindSecretScanningDataSourceByIdDTO,
TFindSecretScanningDataSourceByNameDTO,
TListSecretScanningDataSourcesByProjectId,
TSecretScanningDataSource,
TSecretScanningDataSourceWithConnection,
TSecretScanningDataSourceWithDetails,
TSecretScanningFinding,
TSecretScanningResourceWithDetails,
TSecretScanningScanWithDetails,
TTriggerSecretScanningDataSourceDTO,
TUpdateSecretScanningDataSourceDTO,
TUpdateSecretScanningFindingDTO,
TUpsertSecretScanningConfigDTO
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { DatabaseErrorCode } from "@app/lib/error-codes";
import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors";
import { OrgServiceActor } from "@app/lib/types";
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
export type TSecretScanningV2ServiceFactoryDep = {
secretScanningV2DAL: TSecretScanningV2DALFactory;
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
secretScanningV2Queue: Pick<
TSecretScanningV2QueueServiceFactory,
"queueDataSourceFullScan" | "queueResourceDiffScan"
>;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TSecretScanningV2ServiceFactory = ReturnType<typeof secretScanningV2ServiceFactory>;
export const secretScanningV2ServiceFactory = ({
secretScanningV2DAL,
permissionService,
appConnectionService,
licenseService,
secretScanningV2Queue,
kmsService
}: TSecretScanningV2ServiceFactoryDep) => {
const $checkListSecretScanningDataSourcesByProjectIdPermissions = async (
projectId: string,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Data Sources due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSub.SecretScanningDataSources
);
};
const listSecretScanningDataSourcesByProjectId = async (
{ projectId, type }: TListSecretScanningDataSourcesByProjectId,
actor: OrgServiceActor
) => {
await $checkListSecretScanningDataSourcesByProjectIdPermissions(projectId, actor);
const dataSources = await secretScanningV2DAL.dataSources.find({
...(type && { type }),
projectId
});
return dataSources as TSecretScanningDataSource[];
};
const listSecretScanningDataSourcesWithDetailsByProjectId = async (
{ projectId, type }: TListSecretScanningDataSourcesByProjectId,
actor: OrgServiceActor
) => {
await $checkListSecretScanningDataSourcesByProjectIdPermissions(projectId, actor);
const dataSources = await secretScanningV2DAL.dataSources.findWithDetails({
...(type && { type }),
projectId
});
return dataSources as TSecretScanningDataSourceWithDetails[];
};
const findSecretScanningDataSourceById = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
return dataSource as TSecretScanningDataSource;
};
const findSecretScanningDataSourceByName = async (
{ type, sourceName, projectId }: TFindSecretScanningDataSourceByNameDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
// we prevent conflicting names within a folder
const dataSource = await secretScanningV2DAL.dataSources.findOne({
name: sourceName,
projectId
});
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with name "${sourceName}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSource.id}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
return dataSource as TSecretScanningDataSource;
};
const createSecretScanningDataSource = async (
payload: TCreateSecretScanningDataSourceDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to create Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: payload.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Create,
ProjectPermissionSub.SecretScanningDataSources
);
let connection: TAppConnection | null = null;
if (payload.connectionId) {
// validates permission to connect and app is valid for data source
connection = await appConnectionService.connectAppConnectionById(
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[payload.type],
payload.connectionId,
actor
);
}
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]();
try {
const createdDataSource = await factory.initialize(
{
payload,
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
secretScanningV2DAL
},
async ({ credentials, externalId }) => {
let encryptedCredentials: Buffer | null = null;
if (credentials) {
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: payload.projectId
});
const { cipherTextBlob } = encryptor({
plainText: Buffer.from(JSON.stringify(credentials))
});
encryptedCredentials = cipherTextBlob;
}
return secretScanningV2DAL.dataSources.transaction(async (tx) => {
const dataSource = await secretScanningV2DAL.dataSources.create(
{
encryptedCredentials,
externalId,
...payload
},
tx
);
await factory.postInitialization({
payload,
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
dataSourceId: dataSource.id,
credentials
});
return dataSource;
});
}
);
if (payload.isAutoScanEnabled) {
try {
await secretScanningV2Queue.queueDataSourceFullScan({
...createdDataSource,
connection
} as TSecretScanningDataSourceWithConnection);
} catch {
// silently fail, don't want to block creation, they'll try scanning when they don't see anything and get the error
}
}
return createdDataSource as TSecretScanningDataSource;
} catch (err) {
if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) {
throw new BadRequestError({
message: `A Secret Scanning Data Source with the name "${payload.name}" already exists for the project with ID "${payload.projectId}"`
});
}
throw err;
}
};
const updateSecretScanningDataSource = async (
{ type, dataSourceId, ...payload }: TUpdateSecretScanningDataSourceDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to update Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Edit,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
try {
const updatedDataSource = await secretScanningV2DAL.dataSources.updateById(dataSourceId, payload);
return updatedDataSource as TSecretScanningDataSource;
} catch (err) {
if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) {
throw new BadRequestError({
message: `A Secret Scanning Data Source with the name "${payload.name}" already exists for the project with ID "${dataSource.projectId}"`
});
}
throw err;
}
};
const deleteSecretScanningDataSource = async (
{ type, dataSourceId }: TDeleteSecretScanningDataSourceDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to delete Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Delete,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
// TODO: clean up webhooks
await secretScanningV2DAL.dataSources.deleteById(dataSourceId);
return dataSource as TSecretScanningDataSource;
};
const triggerSecretScanningDataSourceScan = async (
{ type, dataSourceId, resourceId }: TTriggerSecretScanningDataSourceDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to trigger scan for Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
let connection: TAppConnection | null = null;
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
let resourceExternalId: string | undefined;
if (resourceId) {
const resource = await secretScanningV2DAL.resources.findOne({ id: resourceId, dataSourceId });
if (!resource) {
throw new NotFoundError({
message: `Could not find Secret Scanning Resource with ID "${resourceId}" for Data Source with ID "${dataSourceId}"`
});
}
resourceExternalId = resource.externalId;
}
await secretScanningV2Queue.queueDataSourceFullScan(
{
...dataSource,
connection
} as TSecretScanningDataSourceWithConnection,
resourceExternalId
);
return dataSource as TSecretScanningDataSource;
};
const listSecretScanningResourcesByDataSourceId = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.ReadResources,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const resources = await secretScanningV2DAL.resources.find({
dataSourceId
});
return { resources, projectId: dataSource.projectId };
};
const listSecretScanningScansByDataSourceId = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const scans = await secretScanningV2DAL.scans.findByDataSourceId(dataSourceId);
return { scans, projectId: dataSource.projectId };
};
const listSecretScanningResourcesWithDetailsByDataSourceId = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.ReadResources,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const resources = await secretScanningV2DAL.resources.findWithDetails({ dataSourceId });
return { resources: resources as TSecretScanningResourceWithDetails[], projectId: dataSource.projectId };
};
const listSecretScanningScansWithDetailsByDataSourceId = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Scans due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const scans = await secretScanningV2DAL.scans.findWithDetailsByDataSourceId(dataSourceId);
return { scans: scans as TSecretScanningScanWithDetails[], projectId: dataSource.projectId };
};
const getSecretScanningUnresolvedFindingsCountByProjectId = async (projectId: string, actor: OrgServiceActor) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningFindingActions.Read,
ProjectPermissionSub.SecretScanningFindings
);
const [finding] = await secretScanningV2DAL.findings.find(
{
projectId,
status: SecretScanningFindingStatus.Unresolved
},
{ count: true }
);
return Number(finding?.count ?? 0);
};
const listSecretScanningFindingsByProjectId = async (projectId: string, actor: OrgServiceActor) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningFindingActions.Read,
ProjectPermissionSub.SecretScanningFindings
);
const findings = await secretScanningV2DAL.findings.find({
projectId
});
return findings as TSecretScanningFinding[];
};
const updateSecretScanningFindingById = async (
{ findingId, remarks, status }: TUpdateSecretScanningFindingDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const finding = await secretScanningV2DAL.findings.findById(findingId);
if (!finding)
throw new NotFoundError({
message: `Could not find Secret Scanning Finding with ID "${findingId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: finding.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningFindingActions.Update,
ProjectPermissionSub.SecretScanningFindings
);
const updatedFinding = await secretScanningV2DAL.findings.updateById(findingId, {
remarks,
status
});
return { finding: updatedFinding as TSecretScanningFinding, projectId: finding.projectId };
};
const findSecretScanningConfigByProjectId = async (projectId: string, actor: OrgServiceActor) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Configuration due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningConfigActions.Read,
ProjectPermissionSub.SecretScanningConfigs
);
const config = await secretScanningV2DAL.configs.findOne({
projectId
});
return (
config ?? { content: null, projectId, updatedAt: null } // using default config
);
};
const upsertSecretScanningConfig = async (
{ projectId, content }: TUpsertSecretScanningConfigDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Configuration due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningConfigActions.Update,
ProjectPermissionSub.SecretScanningConfigs
);
if (content) {
const tempFolder = await createTempFolder();
try {
const configPath = join(tempFolder, "infisical-scan.toml");
await writeTextToFile(configPath, content);
// just checking if config parses
await scanContentAndGetFindings("", configPath);
} catch (e) {
throw new BadRequestError({
message: "Unable to parse configuration: Check syntax and formatting."
});
} finally {
await deleteTempFolder(tempFolder);
}
}
const [config] = await secretScanningV2DAL.configs.upsert(
[
{
projectId,
content
}
],
"projectId"
);
return config;
};
return {
listSecretScanningDataSourceOptions,
listSecretScanningDataSourcesByProjectId,
listSecretScanningDataSourcesWithDetailsByProjectId,
findSecretScanningDataSourceById,
findSecretScanningDataSourceByName,
createSecretScanningDataSource,
updateSecretScanningDataSource,
deleteSecretScanningDataSource,
triggerSecretScanningDataSourceScan,
listSecretScanningResourcesByDataSourceId,
listSecretScanningScansByDataSourceId,
listSecretScanningResourcesWithDetailsByDataSourceId,
listSecretScanningScansWithDetailsByDataSourceId,
getSecretScanningUnresolvedFindingsCountByProjectId,
listSecretScanningFindingsByProjectId,
updateSecretScanningFindingById,
findSecretScanningConfigByProjectId,
upsertSecretScanningConfig,
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue)
};
};

View File

@@ -0,0 +1,189 @@
import {
TSecretScanningDataSources,
TSecretScanningFindingsInsert,
TSecretScanningResources,
TSecretScanningScans
} from "@app/db/schemas";
import {
TGitHubDataSource,
TGitHubDataSourceInput,
TGitHubDataSourceListItem,
TGitHubDataSourceWithConnection,
TGitHubFinding,
TQueueGitHubResourceDiffScan
} from "@app/ee/services/secret-scanning-v2/github";
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import {
SecretScanningDataSource,
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export type TSecretScanningDataSource = TGitHubDataSource;
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
lastScannedAt?: Date | null;
lastScanStatus?: SecretScanningScanStatus | null;
lastScanStatusMessage?: string | null;
unresolvedFindings: number;
};
export type TSecretScanningResourceWithDetails = TSecretScanningResources & {
lastScannedAt?: Date | null;
lastScanStatus?: SecretScanningScanStatus | null;
lastScanStatusMessage?: string | null;
unresolvedFindings: number;
};
export type TSecretScanningScanWithDetails = TSecretScanningScans & {
unresolvedFindings: number;
resolvedFindings: number;
resourceName: string;
};
export type TSecretScanningDataSourceWithConnection = TGitHubDataSourceWithConnection;
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput;
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem;
export type TSecretScanningFinding = TGitHubFinding;
export type TListSecretScanningDataSourcesByProjectId = {
projectId: string;
type?: SecretScanningDataSource;
};
export type TFindSecretScanningDataSourceByIdDTO = {
dataSourceId: string;
type: SecretScanningDataSource;
};
export type TFindSecretScanningDataSourceByNameDTO = {
sourceName: string;
projectId: string;
type: SecretScanningDataSource;
};
export type TCreateSecretScanningDataSourceDTO = Pick<
TSecretScanningDataSource,
"description" | "name" | "projectId"
> & {
connectionId?: string;
type: SecretScanningDataSource;
isAutoScanEnabled?: boolean;
config: Partial<TSecretScanningDataSourceInput["config"]>;
};
export type TUpdateSecretScanningDataSourceDTO = Partial<
Omit<TCreateSecretScanningDataSourceDTO, "projectId" | "connectionId">
> & {
dataSourceId: string;
type: SecretScanningDataSource;
};
export type TDeleteSecretScanningDataSourceDTO = {
type: SecretScanningDataSource;
dataSourceId: string;
};
export type TTriggerSecretScanningDataSourceDTO = {
type: SecretScanningDataSource;
dataSourceId: string;
resourceId?: string;
};
export type TQueueSecretScanningDataSourceFullScan = {
dataSourceId: string;
resourceId: string;
scanId: string;
};
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan;
export type TQueueSecretScanningSendNotification = {
dataSource: TSecretScanningDataSources;
resourceName: string;
} & (
| { status: SecretScanningScanStatus.Failed; errorMessage: string }
| { status: SecretScanningScanStatus.Completed; numberOfSecrets: number; scanId: string; isDiffScan: boolean }
);
export type TCloneRepository = {
cloneUrl: string;
repoPath: string;
};
export type TSecretScanningFactoryListRawResources<T extends TSecretScanningDataSourceWithConnection> = (
dataSource: T
) => Promise<Pick<TSecretScanningResources, "externalId" | "name" | "type">[]>;
export type TSecretScanningFactoryGetDiffScanResourcePayload<
P extends TQueueSecretScanningResourceDiffScan["payload"]
> = (payload: P) => Pick<TSecretScanningResources, "externalId" | "name" | "type">;
export type TSecretScanningFactoryGetFullScanPath<T extends TSecretScanningDataSourceWithConnection> = (parameters: {
dataSource: T;
resourceName: string;
tempFolder: string;
}) => Promise<string>;
export type TSecretScanningFactoryGetDiffScanFindingsPayload<
T extends TSecretScanningDataSourceWithConnection,
P extends TQueueSecretScanningResourceDiffScan["payload"]
> = (parameters: { dataSource: T; resourceName: string; payload: P; configPath?: string }) => Promise<TFindingsPayload>;
export type TSecretScanningDataSourceRaw = NonNullable<
Awaited<ReturnType<TSecretScanningV2DALFactory["dataSources"]["findById"]>>
>;
export type TSecretScanningFactoryInitialize<
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
C extends TSecretScanningDataSourceCredentials = undefined
> = (
params: {
payload: TCreateSecretScanningDataSourceDTO;
connection: T;
secretScanningV2DAL: TSecretScanningV2DALFactory;
},
callback: (parameters: { credentials?: C; externalId?: string }) => Promise<TSecretScanningDataSourceRaw>
) => Promise<TSecretScanningDataSourceRaw>;
export type TSecretScanningFactoryPostInitialization<
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
C extends TSecretScanningDataSourceCredentials = undefined
> = (params: {
payload: TCreateSecretScanningDataSourceDTO;
connection: T;
credentials: C;
dataSourceId: string;
}) => Promise<void>;
export type TSecretScanningFactory<
T extends TSecretScanningDataSourceWithConnection,
C extends TSecretScanningDataSourceCredentials,
P extends TQueueSecretScanningResourceDiffScan["payload"]
> = () => {
listRawResources: TSecretScanningFactoryListRawResources<T>;
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
initialize: TSecretScanningFactoryInitialize<T["connection"] | undefined, C>;
postInitialization: TSecretScanningFactoryPostInitialization<T["connection"] | undefined, C>;
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
};
export type TFindingsPayload = Pick<TSecretScanningFindingsInsert, "details" | "fingerprint" | "severity" | "rule">[];
export type TGetFindingsPayload = Promise<TFindingsPayload>;
export type TUpdateSecretScanningFindingDTO = {
status?: SecretScanningFindingStatus;
remarks?: string | null;
findingId: string;
};
export type TUpsertSecretScanningConfigDTO = {
projectId: string;
content: string | null;
};
export type TSecretScanningDataSourceCredentials = undefined;

View File

@@ -0,0 +1,7 @@
import { z } from "zod";
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [GitHubDataSourceSchema]);
export const SecretScanningFindingSchema = z.discriminatedUnion("resourceType", [GitHubFindingSchema]);

View File

@@ -65,9 +65,9 @@ export function runInfisicalScanOnRepo(repoPath: string, outputPath: string): Pr
});
}
export function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
export function runInfisicalScan(inputPath: string, outputPath: string, configPath?: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}" ${configPath ? `-c "${configPath}"` : ""}`;
exec(command, (error) => {
if (error && error.code !== 77) {
reject(error);
@@ -138,14 +138,14 @@ export async function scanFullRepoContentAndGetFindings(
}
}
export async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
export async function scanContentAndGetFindings(textContent: string, configPath?: string): Promise<SecretMatch[]> {
const tempFolder = await createTempFolder();
const filePath = join(tempFolder, "content.txt");
const findingsPath = join(tempFolder, "findings.json");
try {
await writeTextToFile(filePath, textContent);
await runInfisicalScan(filePath, findingsPath);
await runInfisicalScan(filePath, findingsPath, configPath);
const findingsData = await readFindingsFile(findingsPath);
return JSON.parse(findingsData) as SecretMatch[];
} finally {

View File

@@ -9,6 +9,7 @@ export type SecretMatch = {
Match: string;
Secret: string;
File: string;
Link: string;
SymlinkFile: string;
Commit: string;
Entropy: number;

View File

@@ -36,6 +36,8 @@ export const KeyStorePrefixes = {
`sync-integration-last-run-${projectId}-${environmentSlug}-${secretPath}` as const,
SecretSyncLock: (syncId: string) => `secret-sync-mutex-${syncId}` as const,
SecretRotationLock: (rotationId: string) => `secret-rotation-v2-mutex-${rotationId}` as const,
SecretScanningLock: (dataSourceId: string, resourceExternalId: string) =>
`secret-scanning-v2-mutex-${dataSourceId}-${resourceExternalId}` as const,
CaOrderCertificateForSubscriberLock: (subscriberId: string) =>
`ca-order-certificate-for-subscriber-lock-${subscriberId}` as const,
SecretSyncLastRunTimestamp: (syncId: string) => `secret-sync-last-run-${syncId}` as const,

View File

@@ -3,6 +3,12 @@ import {
SECRET_ROTATION_CONNECTION_MAP,
SECRET_ROTATION_NAME_MAP
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-maps";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
AUTO_SYNC_DESCRIPTION_HELPER,
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP,
SECRET_SCANNING_DATA_SOURCE_NAME_MAP
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { APP_CONNECTION_NAME_MAP } from "@app/services/app-connection/app-connection-maps";
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
@@ -57,7 +63,8 @@ export enum ApiDocsTags {
SshHostGroups = "SSH Host Groups",
KmsKeys = "KMS Keys",
KmsEncryption = "KMS Encryption",
KmsSigning = "KMS Signing"
KmsSigning = "KMS Signing",
SecretScanning = "Secret Scanning"
}
export const GROUPS = {
@@ -2432,3 +2439,81 @@ export const SecretRotations = {
}
}
};
export const SecretScanningDataSources = {
LIST: (type?: SecretScanningDataSource) => ({
projectId: `The ID of the project to list ${type ? SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type] : "Scanning"} Data Sources from.`
}),
GET_BY_ID: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`
}),
GET_BY_NAME: (type: SecretScanningDataSource) => ({
sourceName: `The name of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`,
projectId: `The ID of the project the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source is located in.`
}),
CREATE: (type: SecretScanningDataSource) => {
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
const autoScanDescription = AUTO_SYNC_DESCRIPTION_HELPER[type];
return {
name: `The name of the ${sourceType} Data Source to create. Must be slug-friendly.`,
description: `An optional description for the ${sourceType} Data Source.`,
projectId: `The ID of the project to create the ${sourceType} Data Source in.`,
connectionId: `The ID of the ${
APP_CONNECTION_NAME_MAP[SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[type]]
} Connection to use for this Data Source.`,
isAutoScanEnabled: `Whether scans should be automatically performed when a ${autoScanDescription.verb} occurs to ${autoScanDescription.noun} associated with this Data Source.`,
config: `The configuration parameters to use for this Data Source.`
};
},
UPDATE: (type: SecretScanningDataSource) => {
const typeName = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
const autoScanDescription = AUTO_SYNC_DESCRIPTION_HELPER[type];
return {
dataSourceId: `The ID of the ${typeName} Data Source to be updated.`,
name: `The updated name of the ${typeName} Data Source. Must be slug-friendly.`,
description: `The updated description of the ${typeName} Data Source.`,
isAutoScanEnabled: `Whether scans should be automatically performed when a ${autoScanDescription.verb} occurs to ${autoScanDescription.noun} associated with this Data Source.`,
config: `The updated configuration parameters to use for this Data Source.`
};
},
DELETE: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to be deleted.`
}),
SCAN: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to trigger a scan for.`,
resourceId: `The ID of the individual Data Source resource to trigger a scan for.`
}),
LIST_RESOURCES: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to list resources from.`
}),
LIST_SCANS: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to list scans for.`
}),
CONFIG: {
GITHUB: {
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
}
}
};
export const SecretScanningFindings = {
LIST: {
projectId: `The ID of the project to list Secret Scanning Findings from.`
},
UPDATE: {
findingId: "The ID of the Secret Scanning Finding to update.",
status: "The updated status of the specified Secret Scanning Finding.",
remarks: "Remarks pertaining to the status of this finding."
}
};
export const SecretScanningConfigs = {
GET_BY_PROJECT_ID: {
projectId: `The ID of the project to retrieve the Secret Scanning Configuration for.`
},
UPDATE: {
projectId: "The ID of the project to update the Secret Scanning Configuration for.",
content: "The contents of the Secret Scanning Configuration file."
}
};

View File

@@ -1,5 +1,7 @@
import { z } from "zod";
import { QueueWorkerProfile } from "@app/lib/types";
import { removeTrailingSlash } from "../fn";
import { CustomLogger } from "../logger/logger";
import { zpStr } from "../zod";
@@ -69,6 +71,7 @@ const envSchema = z
ENCRYPTION_KEY: zpStr(z.string().optional()),
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
QUEUE_WORKERS_ENABLED: zodStrBool.default("true"),
QUEUE_WORKER_PROFILE: z.nativeEnum(QueueWorkerProfile).default(QueueWorkerProfile.All),
HTTPS_ENABLED: zodStrBool,
ROTATION_DEVELOPMENT_MODE: zodStrBool.default("false").optional(),
// smtp options
@@ -230,6 +233,14 @@ const envSchema = z
INF_APP_CONNECTION_GITHUB_APP_SLUG: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_APP_ID: zpStr(z.string().optional()),
// github radar app
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET: zpStr(z.string().optional()),
// gcp app
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL: zpStr(z.string().optional()),
@@ -298,6 +309,13 @@ const envSchema = z
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
isSecretScanningV2Configured:
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET),
isHsmConfigured:
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,

View File

@@ -32,3 +32,24 @@ export const shake = <RemovedKeys extends string, T = object>(
return acc;
}, {} as T);
};
export const titleCaseToCamelCase = (obj: unknown): unknown => {
if (typeof obj !== "object" || obj === null) {
return obj;
}
if (Array.isArray(obj)) {
return obj.map((item: object) => titleCaseToCamelCase(item));
}
const result: Record<string, unknown> = {};
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
const camelKey = key.charAt(0).toLowerCase() + key.slice(1);
result[camelKey] = titleCaseToCamelCase((obj as Record<string, unknown>)[key]);
}
}
return result;
};

View File

@@ -179,13 +179,18 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
throw new DatabaseError({ error, name: "batchInsert" });
}
},
upsert: async (data: readonly Tables[Tname]["insert"][], onConflictField: keyof Tables[Tname]["base"], tx?: Knex) => {
upsert: async (
data: readonly Tables[Tname]["insert"][],
onConflictField: keyof Tables[Tname]["base"] | Array<keyof Tables[Tname]["base"]>,
tx?: Knex,
mergeColumns?: (keyof Knex.ResolveTableType<Knex.TableType<Tname>, "update">)[] | undefined
) => {
try {
if (!data.length) return [];
const res = await (tx || db)(tableName)
.insert(data as never)
.onConflict(onConflictField as never)
.merge()
.merge(mergeColumns)
.returning("*");
return res;
} catch (error) {

View File

@@ -9,3 +9,5 @@ export const DistinguishedNameRegex =
export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]{2,}$/);
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
export const GitHubRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);

View File

@@ -78,3 +78,9 @@ export type OrgServiceActor = {
authMethod: ActorAuthMethod;
orgId: string;
};
export enum QueueWorkerProfile {
All = "all",
Standard = "standard",
SecretScanning = "secret-scanning"
}

View File

@@ -11,9 +11,15 @@ import {
TScanFullRepoEventPayload,
TScanPushEventPayload
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import {
TQueueSecretScanningDataSourceFullScan,
TQueueSecretScanningResourceDiffScan,
TQueueSecretScanningSendNotification
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { buildRedisFromConfig, TRedisConfigKeys } from "@app/lib/config/redis";
import { logger } from "@app/lib/logger";
import { QueueWorkerProfile } from "@app/lib/types";
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
import {
TFailedIntegrationSyncEmailsPayload,
@@ -54,7 +60,8 @@ export enum QueueName {
ImportSecretsFromExternalSource = "import-secrets-from-external-source",
AppConnectionSecretSync = "app-connection-secret-sync",
SecretRotationV2 = "secret-rotation-v2",
InvalidateCache = "invalidate-cache"
InvalidateCache = "invalidate-cache",
SecretScanningV2 = "secret-scanning-v2"
}
export enum QueueJobs {
@@ -88,6 +95,9 @@ export enum QueueJobs {
SecretRotationV2RotateSecrets = "secret-rotation-v2-rotate-secrets",
SecretRotationV2SendNotification = "secret-rotation-v2-send-notification",
InvalidateCache = "invalidate-cache",
SecretScanningV2FullScan = "secret-scanning-v2-full-scan",
SecretScanningV2DiffScan = "secret-scanning-v2-diff-scan",
SecretScanningV2SendNotification = "secret-scanning-v2-notification",
CaOrderCertificateForSubscriber = "ca-order-certificate-for-subscriber",
PkiSubscriberDailyAutoRenewal = "pki-subscriber-daily-auto-renewal"
}
@@ -250,6 +260,19 @@ export type TQueueJobTypes = {
};
};
};
[QueueName.SecretScanningV2]:
| {
name: QueueJobs.SecretScanningV2FullScan;
payload: TQueueSecretScanningDataSourceFullScan;
}
| {
name: QueueJobs.SecretScanningV2DiffScan;
payload: TQueueSecretScanningResourceDiffScan;
}
| {
name: QueueJobs.SecretScanningV2SendNotification;
payload: TQueueSecretScanningSendNotification;
};
[QueueName.CaLifecycle]: {
name: QueueJobs.CaOrderCertificateForSubscriber;
payload: {
@@ -263,6 +286,37 @@ export type TQueueJobTypes = {
};
};
const SECRET_SCANNING_JOBS = [
QueueJobs.SecretScanningV2FullScan,
QueueJobs.SecretScanningV2DiffScan,
QueueJobs.SecretScanningV2SendNotification,
QueueJobs.SecretScan
];
const NON_STANDARD_JOBS = [...SECRET_SCANNING_JOBS];
const SECRET_SCANNING_QUEUES = [
QueueName.SecretScanningV2,
QueueName.SecretFullRepoScan,
QueueName.SecretPushEventScan
];
const NON_STANDARD_QUEUES = [...SECRET_SCANNING_QUEUES];
const isQueueEnabled = (name: QueueName) => {
const appCfg = getConfig();
switch (appCfg.QUEUE_WORKER_PROFILE) {
case QueueWorkerProfile.Standard:
return !NON_STANDARD_QUEUES.includes(name);
case QueueWorkerProfile.SecretScanning:
return SECRET_SCANNING_QUEUES.includes(name);
case QueueWorkerProfile.All:
default:
// allow all
return true;
}
};
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
export const queueServiceFactory = (
redisCfg: TRedisConfigKeys,
@@ -319,7 +373,7 @@ export const queueServiceFactory = (
});
const appCfg = getConfig();
if (appCfg.QUEUE_WORKERS_ENABLED) {
if (appCfg.QUEUE_WORKERS_ENABLED && isQueueEnabled(name)) {
workerContainer[name] = new Worker<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>(name, jobFn, {
...queueSettings,
connection
@@ -338,6 +392,30 @@ export const queueServiceFactory = (
throw new Error(`${jobName} queue is already initialized`);
}
const appCfg = getConfig();
if (!appCfg.QUEUE_WORKERS_ENABLED) return;
switch (appCfg.QUEUE_WORKER_PROFILE) {
case QueueWorkerProfile.Standard:
if (NON_STANDARD_JOBS.includes(jobName)) {
// only process standard jobs
return;
}
break;
case QueueWorkerProfile.SecretScanning:
if (!SECRET_SCANNING_JOBS.includes(jobName)) {
// only process secret scanning jobs
return;
}
break;
case QueueWorkerProfile.All:
default:
// allow all
}
await pgBoss.createQueue(jobName);
queueContainerPg[jobName] = true;
@@ -357,7 +435,7 @@ export const queueServiceFactory = (
listener: WorkerListener<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>[U]
) => {
const appCfg = getConfig();
if (!appCfg.QUEUE_WORKERS_ENABLED) {
if (!appCfg.QUEUE_WORKERS_ENABLED || !isQueueEnabled(name)) {
return;
}

View File

@@ -0,0 +1,66 @@
import type { EmitterWebhookEventName } from "@octokit/webhooks/dist-types/types";
import { PushEvent } from "@octokit/webhooks-types";
import { Probot } from "probot";
import { getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { writeLimit } from "@app/server/config/rateLimiter";
export const registerSecretScanningV2Webhooks = async (server: FastifyZodProvider) => {
const probotApp = (app: Probot) => {
app.on("installation.deleted", async (context) => {
const { payload } = context;
const { installation } = payload;
await server.services.secretScanningV2.github.handleInstallationDeletedEvent(installation.id);
});
app.on("installation", async (context) => {
const { payload } = context;
logger.info({ repositories: payload.repositories }, "Installed secret scanner to");
});
app.on("push", async (context) => {
const { payload } = context;
await server.services.secretScanningV2.github.handlePushEvent(payload as PushEvent);
});
};
const appCfg = getConfig();
if (!appCfg.isSecretScanningV2Configured) {
logger.info("Secret Scanning V2 is not configured. Skipping registration of secret scanning v2 webhooks.");
return;
}
const probot = new Probot({
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID as string,
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY as string,
secret: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET as string
});
await probot.load(probotApp);
// github push event webhook
server.route({
method: "POST",
url: "/github",
config: {
rateLimit: writeLimit
},
handler: async (req, res) => {
const eventName = req.headers["x-github-event"] as EmitterWebhookEventName;
const signatureSHA256 = req.headers["x-hub-signature-256"] as string;
const id = req.headers["x-github-delivery"] as string;
await probot.webhooks.verifyAndReceive({
id,
name: eventName,
payload: JSON.stringify(req.body),
signature: signatureSHA256
});
return res.send("ok");
}
});
};

View File

@@ -92,6 +92,9 @@ import { gitAppInstallSessionDALFactory } from "@app/ee/services/secret-scanning
import { secretScanningDALFactory } from "@app/ee/services/secret-scanning/secret-scanning-dal";
import { secretScanningQueueFactory } from "@app/ee/services/secret-scanning/secret-scanning-queue";
import { secretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { secretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import { secretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
import { secretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
import { secretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { snapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal";
import { snapshotFolderDALFactory } from "@app/ee/services/secret-snapshot/snapshot-folder-dal";
@@ -118,6 +121,7 @@ import { getConfig, TEnvConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { TQueueServiceFactory } from "@app/queue";
import { readLimit } from "@app/server/config/rateLimiter";
import { registerSecretScanningV2Webhooks } from "@app/server/plugins/secret-scanner-v2";
import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue/access-token-queue";
import { apiKeyDALFactory } from "@app/services/api-key/api-key-dal";
import { apiKeyServiceFactory } from "@app/services/api-key/api-key-service";
@@ -312,6 +316,9 @@ export const registerRoutes = async (
) => {
const appCfg = getConfig();
await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
await server.register(registerSecretScanningV2Webhooks, {
prefix: "/secret-scanning/webhooks"
});
// db layers
const userDAL = userDALFactory(db);
@@ -459,6 +466,7 @@ export const registerRoutes = async (
const secretRotationV2DAL = secretRotationV2DALFactory(db, folderDAL);
const microsoftTeamsIntegrationDAL = microsoftTeamsIntegrationDALFactory(db);
const projectMicrosoftTeamsConfigDAL = projectMicrosoftTeamsConfigDALFactory(db);
const secretScanningV2DAL = secretScanningV2DALFactory(db);
const permissionService = permissionServiceFactory({
permissionDAL,
@@ -1784,6 +1792,26 @@ export const registerRoutes = async (
smtpService
});
const secretScanningV2Queue = await secretScanningV2QueueServiceFactory({
auditLogService,
secretScanningV2DAL,
queueService,
projectDAL,
projectMembershipDAL,
smtpService,
kmsService,
keyStore
});
const secretScanningV2Service = secretScanningV2ServiceFactory({
permissionService,
appConnectionService,
licenseService,
secretScanningV2DAL,
secretScanningV2Queue,
kmsService
});
await superAdminService.initServerCfg();
// setup the communication with license key server
@@ -1898,7 +1926,8 @@ export const registerRoutes = async (
secretRotationV2: secretRotationV2Service,
microsoftTeams: microsoftTeamsService,
assumePrivileges: assumePrivilegeService,
githubOrgSync: githubOrgSyncConfigService
githubOrgSync: githubOrgSyncConfigService,
secretScanningV2: secretScanningV2Service
});
const cronJobs: CronJob[] = [];

View File

@@ -33,6 +33,10 @@ import {
} from "@app/services/app-connection/databricks";
import { GcpConnectionListItemSchema, SanitizedGcpConnectionSchema } from "@app/services/app-connection/gcp";
import { GitHubConnectionListItemSchema, SanitizedGitHubConnectionSchema } from "@app/services/app-connection/github";
import {
GitHubRadarConnectionListItemSchema,
SanitizedGitHubRadarConnectionSchema
} from "@app/services/app-connection/github-radar";
import {
HCVaultConnectionListItemSchema,
SanitizedHCVaultConnectionSchema
@@ -67,6 +71,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedAppConnectionSchema = z.union([
...SanitizedAwsConnectionSchema.options,
...SanitizedGitHubConnectionSchema.options,
...SanitizedGitHubRadarConnectionSchema.options,
...SanitizedGcpConnectionSchema.options,
...SanitizedAzureKeyVaultConnectionSchema.options,
...SanitizedAzureAppConfigurationConnectionSchema.options,
@@ -91,6 +96,7 @@ const SanitizedAppConnectionSchema = z.union([
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
AwsConnectionListItemSchema,
GitHubConnectionListItemSchema,
GitHubRadarConnectionListItemSchema,
GcpConnectionListItemSchema,
AzureKeyVaultConnectionListItemSchema,
AzureAppConfigurationConnectionListItemSchema,

View File

@@ -0,0 +1,54 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateGitHubRadarConnectionSchema,
SanitizedGitHubRadarConnectionSchema,
UpdateGitHubRadarConnectionSchema
} from "@app/services/app-connection/github-radar";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerGitHubRadarConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.GitHubRadar,
server,
sanitizedResponseSchema: SanitizedGitHubRadarConnectionSchema,
createSchema: CreateGitHubRadarConnectionSchema,
updateSchema: UpdateGitHubRadarConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/repositories`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({
repositories: z.object({ id: z.number(), name: z.string() }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const repositories = await server.services.appConnection.githubRadar.listRepositories(
connectionId,
req.permission
);
return { repositories };
}
});
};

View File

@@ -11,6 +11,7 @@ import { registerCamundaConnectionRouter } from "./camunda-connection-router";
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
import { registerGcpConnectionRouter } from "./gcp-connection-router";
import { registerGitHubConnectionRouter } from "./github-connection-router";
import { registerGitHubRadarConnectionRouter } from "./github-radar-connection-router";
import { registerHCVaultConnectionRouter } from "./hc-vault-connection-router";
import { registerHumanitecConnectionRouter } from "./humanitec-connection-router";
import { registerLdapConnectionRouter } from "./ldap-connection-router";
@@ -28,6 +29,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
{
[AppConnection.AWS]: registerAwsConnectionRouter,
[AppConnection.GitHub]: registerGitHubConnectionRouter,
[AppConnection.GitHubRadar]: registerGitHubRadarConnectionRouter,
[AppConnection.GCP]: registerGcpConnectionRouter,
[AppConnection.AzureKeyVault]: registerAzureKeyVaultConnectionRouter,
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,

View File

@@ -160,7 +160,14 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
.default("false")
.transform((value) => value === "true"),
type: z
.enum([ProjectType.SecretManager, ProjectType.KMS, ProjectType.CertificateManager, ProjectType.SSH, "all"])
.enum([
ProjectType.SecretManager,
ProjectType.KMS,
ProjectType.CertificateManager,
ProjectType.SSH,
ProjectType.SecretScanning,
"all"
])
.optional()
}),
response: {

View File

@@ -1,5 +1,6 @@
export enum AppConnection {
GitHub = "github",
GitHubRadar = "github-radar",
AWS = "aws",
Databricks = "databricks",
GCP = "gcp",

View File

@@ -52,6 +52,11 @@ import {
} from "./databricks";
import { GcpConnectionMethod, getGcpConnectionListItem, validateGcpConnectionCredentials } from "./gcp";
import { getGitHubConnectionListItem, GitHubConnectionMethod, validateGitHubConnectionCredentials } from "./github";
import {
getGitHubRadarConnectionListItem,
GitHubRadarConnectionMethod,
validateGitHubRadarConnectionCredentials
} from "./github-radar";
import {
getHCVaultConnectionListItem,
HCVaultConnectionMethod,
@@ -89,6 +94,7 @@ export const listAppConnectionOptions = () => {
return [
getAwsConnectionListItem(),
getGitHubConnectionListItem(),
getGitHubRadarConnectionListItem(),
getGcpConnectionListItem(),
getAzureKeyVaultConnectionListItem(),
getAzureAppConfigurationConnectionListItem(),
@@ -160,6 +166,7 @@ export const validateAppConnectionCredentials = async (
[AppConnection.AWS]: validateAwsConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Databricks]: validateDatabricksConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.GitHub]: validateGitHubConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.GitHubRadar]: validateGitHubRadarConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.GCP]: validateGcpConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.AzureKeyVault]: validateAzureKeyVaultConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.AzureAppConfiguration]:
@@ -188,6 +195,7 @@ export const validateAppConnectionCredentials = async (
export const getAppConnectionMethodName = (method: TAppConnection["method"]) => {
switch (method) {
case GitHubConnectionMethod.App:
case GitHubRadarConnectionMethod.App:
return "GitHub App";
case AzureKeyVaultConnectionMethod.OAuth:
case AzureAppConfigurationConnectionMethod.OAuth:
@@ -258,6 +266,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.AWS]: platformManagedCredentialsNotSupported,
[AppConnection.Databricks]: platformManagedCredentialsNotSupported,
[AppConnection.GitHub]: platformManagedCredentialsNotSupported,
[AppConnection.GitHubRadar]: platformManagedCredentialsNotSupported,
[AppConnection.GCP]: platformManagedCredentialsNotSupported,
[AppConnection.AzureKeyVault]: platformManagedCredentialsNotSupported,
[AppConnection.AzureAppConfiguration]: platformManagedCredentialsNotSupported,

View File

@@ -3,6 +3,7 @@ import { AppConnection, AppConnectionPlanType } from "./app-connection-enums";
export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
[AppConnection.AWS]: "AWS",
[AppConnection.GitHub]: "GitHub",
[AppConnection.GitHubRadar]: "GitHub Radar",
[AppConnection.GCP]: "GCP",
[AppConnection.AzureKeyVault]: "Azure Key Vault",
[AppConnection.AzureAppConfiguration]: "Azure App Configuration",
@@ -27,6 +28,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanType> = {
[AppConnection.AWS]: AppConnectionPlanType.Regular,
[AppConnection.GitHub]: AppConnectionPlanType.Regular,
[AppConnection.GitHubRadar]: AppConnectionPlanType.Regular,
[AppConnection.GCP]: AppConnectionPlanType.Regular,
[AppConnection.AzureKeyVault]: AppConnectionPlanType.Regular,
[AppConnection.AzureAppConfiguration]: AppConnectionPlanType.Regular,

View File

@@ -19,6 +19,7 @@ import {
validateAppConnectionCredentials
} from "@app/services/app-connection/app-connection-fns";
import { auth0ConnectionService } from "@app/services/app-connection/auth0/auth0-connection-service";
import { githubRadarConnectionService } from "@app/services/app-connection/github-radar/github-radar-connection-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { ValidateOnePassConnectionCredentialsSchema } from "./1password";
@@ -49,6 +50,7 @@ import { ValidateGcpConnectionCredentialsSchema } from "./gcp";
import { gcpConnectionService } from "./gcp/gcp-connection-service";
import { ValidateGitHubConnectionCredentialsSchema } from "./github";
import { githubConnectionService } from "./github/github-connection-service";
import { ValidateGitHubRadarConnectionCredentialsSchema } from "./github-radar";
import { ValidateHCVaultConnectionCredentialsSchema } from "./hc-vault";
import { hcVaultConnectionService } from "./hc-vault/hc-vault-connection-service";
import { ValidateHumanitecConnectionCredentialsSchema } from "./humanitec";
@@ -78,6 +80,7 @@ export type TAppConnectionServiceFactory = ReturnType<typeof appConnectionServic
const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAppConnectionCredentialsSchema> = {
[AppConnection.AWS]: ValidateAwsConnectionCredentialsSchema,
[AppConnection.GitHub]: ValidateGitHubConnectionCredentialsSchema,
[AppConnection.GitHubRadar]: ValidateGitHubRadarConnectionCredentialsSchema,
[AppConnection.GCP]: ValidateGcpConnectionCredentialsSchema,
[AppConnection.AzureKeyVault]: ValidateAzureKeyVaultConnectionCredentialsSchema,
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema,
@@ -486,6 +489,7 @@ export const appConnectionServiceFactory = ({
connectAppConnectionById,
listAvailableAppConnectionsForUser,
github: githubConnectionService(connectAppConnectionById),
githubRadar: githubRadarConnectionService(connectAppConnectionById),
gcp: gcpConnectionService(connectAppConnectionById),
databricks: databricksConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
aws: awsConnectionService(connectAppConnectionById),

View File

@@ -69,6 +69,12 @@ import {
TGitHubConnectionInput,
TValidateGitHubConnectionCredentialsSchema
} from "./github";
import {
TGitHubRadarConnection,
TGitHubRadarConnectionConfig,
TGitHubRadarConnectionInput,
TValidateGitHubRadarConnectionCredentialsSchema
} from "./github-radar";
import {
THCVaultConnection,
THCVaultConnectionConfig,
@@ -122,6 +128,7 @@ import {
export type TAppConnection = { id: string } & (
| TAwsConnection
| TGitHubConnection
| TGitHubRadarConnection
| TGcpConnection
| TAzureKeyVaultConnection
| TAzureAppConfigurationConnection
@@ -150,6 +157,7 @@ export type TSqlConnection = TPostgresConnection | TMsSqlConnection | TMySqlConn
export type TAppConnectionInput = { id: string } & (
| TAwsConnectionInput
| TGitHubConnectionInput
| TGitHubRadarConnectionInput
| TGcpConnectionInput
| TAzureKeyVaultConnectionInput
| TAzureAppConfigurationConnectionInput
@@ -185,6 +193,7 @@ export type TUpdateAppConnectionDTO = Partial<Omit<TCreateAppConnectionDTO, "met
export type TAppConnectionConfig =
| TAwsConnectionConfig
| TGitHubConnectionConfig
| TGitHubRadarConnectionConfig
| TGcpConnectionConfig
| TAzureKeyVaultConnectionConfig
| TAzureAppConfigurationConnectionConfig
@@ -206,6 +215,7 @@ export type TAppConnectionConfig =
export type TValidateAppConnectionCredentialsSchema =
| TValidateAwsConnectionCredentialsSchema
| TValidateGitHubConnectionCredentialsSchema
| TValidateGitHubRadarConnectionCredentialsSchema
| TValidateGcpConnectionCredentialsSchema
| TValidateAzureKeyVaultConnectionCredentialsSchema
| TValidateAzureAppConfigurationConnectionCredentialsSchema

View File

@@ -0,0 +1,3 @@
export enum GitHubRadarConnectionMethod {
App = "github-app"
}

View File

@@ -0,0 +1,166 @@
import { createAppAuth } from "@octokit/auth-app";
import { Octokit } from "@octokit/rest";
import { AxiosResponse } from "axios";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { BadRequestError, ForbiddenRequestError, InternalServerError } from "@app/lib/errors";
import { getAppConnectionMethodName } from "@app/services/app-connection/app-connection-fns";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { AppConnection } from "../app-connection-enums";
import { GitHubRadarConnectionMethod } from "./github-radar-connection-enums";
import {
TGitHubRadarConnection,
TGitHubRadarConnectionConfig,
TGitHubRadarRepository
} from "./github-radar-connection-types";
export const getGitHubRadarConnectionListItem = () => {
const { INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG } = getConfig();
return {
name: "GitHub Radar" as const,
app: AppConnection.GitHubRadar as const,
methods: Object.values(GitHubRadarConnectionMethod) as [GitHubRadarConnectionMethod.App],
appClientSlug: INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG
};
};
export const getGitHubRadarClient = (appConnection: TGitHubRadarConnection) => {
const appCfg = getConfig();
const { method, credentials } = appConnection;
let client: Octokit;
switch (method) {
case GitHubRadarConnectionMethod.App:
if (!appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID || !appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY) {
throw new InternalServerError({
message: `GitHub ${getAppConnectionMethodName(method).replace(
"GitHub",
""
)} environment variables have not been configured`
});
}
client = new Octokit({
authStrategy: createAppAuth,
auth: {
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
installationId: credentials.installationId
}
});
break;
default:
throw new InternalServerError({
message: `Unhandled GitHub Radar connection method: ${method as GitHubRadarConnectionMethod}`
});
}
return client;
};
export const listGitHubRadarRepositories = async (appConnection: TGitHubRadarConnection) => {
const client = getGitHubRadarClient(appConnection);
const repositories: TGitHubRadarRepository[] = await client.paginate("GET /installation/repositories");
return repositories;
};
type TokenRespData = {
access_token: string;
scope: string;
token_type: string;
error?: string;
};
export const validateGitHubRadarConnectionCredentials = async (config: TGitHubRadarConnectionConfig) => {
const { credentials, method } = config;
const { INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID, INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET, SITE_URL } =
getConfig();
if (!INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID || !INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET) {
throw new InternalServerError({
message: `GitHub ${getAppConnectionMethodName(method).replace(
"GitHub",
""
)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<TokenRespData>;
try {
tokenResp = await request.get<TokenRespData>("https://github.com/login/oauth/access_token", {
params: {
client_id: INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID,
client_secret: INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET,
code: credentials.code,
redirect_uri: `${SITE_URL}/organization/app-connections/github-radar/oauth/callback`
},
headers: {
Accept: "application/json",
"Accept-Encoding": "application/json"
}
});
} catch (e: unknown) {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
if (tokenResp.status !== 200) {
throw new BadRequestError({
message: `Unable to validate credentials: GitHub responded with a status code of ${tokenResp.status} (${tokenResp.statusText}). Verify credentials and try again.`
});
}
if (method === GitHubRadarConnectionMethod.App) {
const installationsResp = await request.get<{
installations: {
id: number;
account: {
login: string;
type: string;
id: number;
};
}[];
}>(IntegrationUrls.GITHUB_USER_INSTALLATIONS, {
headers: {
Accept: "application/json",
Authorization: `Bearer ${tokenResp.data.access_token}`,
"Accept-Encoding": "application/json"
}
});
const matchingInstallation = installationsResp.data.installations.find(
(installation) => installation.id === +credentials.installationId
);
if (!matchingInstallation) {
throw new ForbiddenRequestError({
message: "User does not have access to the provided installation"
});
}
}
if (!tokenResp.data.access_token) {
throw new InternalServerError({ message: `Missing access token: ${tokenResp.data.error}` });
}
switch (method) {
case GitHubRadarConnectionMethod.App:
return {
installationId: credentials.installationId
};
default:
throw new InternalServerError({
message: `Unhandled GitHub connection method: ${method as GitHubRadarConnectionMethod}`
});
}
};

View File

@@ -0,0 +1,66 @@
import { z } from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { GitHubRadarConnectionMethod } from "./github-radar-connection-enums";
export const GitHubRadarConnectionInputCredentialsSchema = z.object({
code: z.string().trim().min(1, "GitHub Radar App code required"),
installationId: z.string().min(1, "GitHub Radar App Installation ID required")
});
export const GitHubRadarConnectionOutputCredentialsSchema = z.object({
installationId: z.string()
});
export const ValidateGitHubRadarConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
.literal(GitHubRadarConnectionMethod.App)
.describe(AppConnections.CREATE(AppConnection.GitHubRadar).method),
credentials: GitHubRadarConnectionInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.GitHubRadar).credentials
)
})
]);
export const CreateGitHubRadarConnectionSchema = ValidateGitHubRadarConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.GitHubRadar)
);
export const UpdateGitHubRadarConnectionSchema = z
.object({
credentials: GitHubRadarConnectionInputCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.GitHubRadar).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.GitHubRadar));
const BaseGitHubRadarConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.GitHubRadar) });
export const GitHubRadarConnectionSchema = BaseGitHubRadarConnectionSchema.extend({
method: z.literal(GitHubRadarConnectionMethod.App),
credentials: GitHubRadarConnectionOutputCredentialsSchema
});
export const SanitizedGitHubRadarConnectionSchema = z.discriminatedUnion("method", [
BaseGitHubRadarConnectionSchema.extend({
method: z.literal(GitHubRadarConnectionMethod.App),
credentials: GitHubRadarConnectionOutputCredentialsSchema.pick({})
})
]);
export const GitHubRadarConnectionListItemSchema = z.object({
name: z.literal("GitHub Radar"),
app: z.literal(AppConnection.GitHubRadar),
// the below is preferable but currently breaks with our zod to json schema parser
// methods: z.tuple([z.literal(GitHubConnectionMethod.App), z.literal(GitHubConnectionMethod.OAuth)]),
methods: z.nativeEnum(GitHubRadarConnectionMethod).array(),
appClientSlug: z.string().optional()
});

View File

@@ -0,0 +1,24 @@
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { listGitHubRadarRepositories } from "@app/services/app-connection/github-radar/github-radar-connection-fns";
import { TGitHubRadarConnection } from "@app/services/app-connection/github-radar/github-radar-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
connectionId: string,
actor: OrgServiceActor
) => Promise<TGitHubRadarConnection>;
export const githubRadarConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
const listRepositories = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.GitHubRadar, connectionId, actor);
const repositories = await listGitHubRadarRepositories(appConnection);
return repositories.map((repo) => ({ id: repo.id, name: repo.full_name }));
};
return {
listRepositories
};
};

View File

@@ -0,0 +1,28 @@
import { z } from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
CreateGitHubRadarConnectionSchema,
GitHubRadarConnectionSchema,
ValidateGitHubRadarConnectionCredentialsSchema
} from "./github-radar-connection-schemas";
export type TGitHubRadarConnection = z.infer<typeof GitHubRadarConnectionSchema>;
export type TGitHubRadarConnectionInput = z.infer<typeof CreateGitHubRadarConnectionSchema> & {
app: AppConnection.GitHubRadar;
};
export type TValidateGitHubRadarConnectionCredentialsSchema = typeof ValidateGitHubRadarConnectionCredentialsSchema;
export type TGitHubRadarConnectionConfig = DiscriminativePick<
TGitHubRadarConnectionInput,
"method" | "app" | "credentials"
>;
export type TGitHubRadarRepository = {
id: number;
full_name: string;
};

View File

@@ -0,0 +1,4 @@
export * from "./github-radar-connection-enums";
export * from "./github-radar-connection-fns";
export * from "./github-radar-connection-schemas";
export * from "./github-radar-connection-types";

View File

@@ -13,7 +13,12 @@ import { LdapConnectionMethod, LdapProvider } from "./ldap-connection-enums";
export const LdapConnectionSimpleBindCredentialsSchema = z.object({
provider: z.nativeEnum(LdapProvider).describe(AppConnections.CREDENTIALS.LDAP.provider),
url: z.string().trim().min(1, "URL required").regex(LdapUrlRegex).describe(AppConnections.CREDENTIALS.LDAP.url),
url: z
.string()
.trim()
.min(1, "URL required")
.refine((value) => LdapUrlRegex.test(value), "Invalid LDAP URL")
.describe(AppConnections.CREDENTIALS.LDAP.url),
dn: z
.string()
.trim()

View File

@@ -0,0 +1,67 @@
import { Button, Heading, Section, Text } from "@react-email/components";
import React from "react";
import { BaseEmailWrapper, BaseEmailWrapperProps } from "./BaseEmailWrapper";
interface SecretScanningScanFailedTemplateProps extends Omit<BaseEmailWrapperProps, "title" | "preview" | "children"> {
dataSourceName: string;
resourceName: string;
projectName: string;
timestamp: string;
url: string;
errorMessage: string;
}
export const SecretScanningScanFailedTemplate = ({
dataSourceName,
resourceName,
projectName,
siteUrl,
errorMessage,
url,
timestamp
}: SecretScanningScanFailedTemplateProps) => {
return (
<BaseEmailWrapper
title="Secret Scanning Failed"
preview="Infisical encountered an error while attempting to scan for secret leaks."
siteUrl={siteUrl}
>
<Heading className="text-black text-[18px] leading-[28px] text-center font-normal p-0 mx-0">
Infisical encountered an error while attempting to scan the resource <strong>{resourceName}</strong>
</Heading>
<Section className="px-[24px] mt-[36px] pt-[26px] pb-[4px] text-[14px] border border-solid border-gray-200 rounded-md bg-gray-50">
<strong>Resource</strong>
<Text className="text-[14px] mt-[4px]">{resourceName}</Text>
<strong>Data Source</strong>
<Text className="text-[14px] mt-[4px]">{dataSourceName}</Text>
<strong>Project</strong>
<Text className="text-[14px] mt-[4px]">{projectName}</Text>
<strong>Timestamp</strong>
<Text className="text-[14px] mt-[4px]">{timestamp}</Text>
<strong>Error</strong>
<Text className="text-[14px] text-red-500 mt-[4px]">{errorMessage}</Text>
</Section>
<Section className="text-center mt-[28px]">
<Button
href={url}
className="rounded-md p-3 px-[28px] my-[8px] text-center text-[16px] bg-[#EBF852] border-solid border border-[#d1e309] text-black font-medium"
>
View in Infisical
</Button>
</Section>
</BaseEmailWrapper>
);
};
export default SecretScanningScanFailedTemplate;
SecretScanningScanFailedTemplate.PreviewProps = {
dataSourceName: "my-data-source",
resourceName: "my-resource",
projectName: "my-project",
timestamp: "May 3rd 2025, 5:42 pm",
url: "https://infisical.com",
errorMessage: "401 Unauthorized",
siteUrl: "https://infisical.com"
} as SecretScanningScanFailedTemplateProps;

View File

@@ -0,0 +1,101 @@
import { Button, Heading, Link, Section, Text } from "@react-email/components";
import React from "react";
import { BaseEmailWrapper, BaseEmailWrapperProps } from "./BaseEmailWrapper";
interface SecretScanningSecretsDetectedTemplateProps
extends Omit<BaseEmailWrapperProps, "title" | "preview" | "children"> {
numberOfSecrets: number;
isDiffScan: boolean;
authorName?: string;
authorEmail?: string;
resourceName: string;
url: string;
}
export const SecretScanningSecretsDetectedTemplate = ({
numberOfSecrets,
siteUrl,
authorName,
authorEmail,
isDiffScan,
resourceName,
url
}: SecretScanningSecretsDetectedTemplateProps) => {
return (
<BaseEmailWrapper
title="Incident Alert: Secret(s) Leaked"
preview="Infisical uncovered one or more leaked secrets."
siteUrl={siteUrl}
>
<Heading className="text-black text-[18px] leading-[28px] text-center font-normal p-0 mx-0">
Infisical has uncovered <strong>{numberOfSecrets}</strong> secret(s)
{isDiffScan ? " from a recent commit to" : " in"} <strong>{resourceName}</strong>
</Heading>
<Section className="px-[24px] mt-[36px] pt-[8px] pb-[8px] text-[14px] border border-solid border-gray-200 rounded-md bg-gray-50">
<Text className="text-[14px]">
You are receiving this notification because one or more leaked secrets have been detected
{isDiffScan && " in a recent commit"}
{isDiffScan ? (
(authorName || authorEmail) && (
<>
{" "}
pushed by <strong>{authorName ?? "Unknown Pusher"}</strong>{" "}
{authorEmail && (
<>
(
<Link href={`mailto:${authorEmail}`} className="text-slate-700 no-underline">
{authorEmail}
</Link>
)
</>
)}
</>
)
) : (
<>
{" "}
in your resource <strong>{resourceName}</strong>
</>
)}
.
</Text>
<Text className="text-[14px]">
If these are test secrets, please add `infisical-scan:ignore` at the end of the line containing the secret as
a comment in the given programming language. This will prevent future notifications from being sent out for
these secrets.
</Text>
<Text className="text-[14px] text-red-500">
If these are production secrets, please rotate them immediately.
</Text>
<Text className="text-[14px]">
Once you have taken action, be sure to update the finding status in the{" "}
<Link href={url} className="text-slate-700 no-underline">
Infisical Dashboard
</Link>
.
</Text>
</Section>
<Section className="text-center mt-[28px]">
<Button
href={url}
className="rounded-md p-3 px-[28px] my-[8px] text-center text-[16px] bg-[#EBF852] border-solid border border-[#d1e309] text-black font-medium"
>
View Leaked Secrets
</Button>
</Section>
</BaseEmailWrapper>
);
};
export default SecretScanningSecretsDetectedTemplate;
SecretScanningSecretsDetectedTemplate.PreviewProps = {
authorName: "Jim",
authorEmail: "jim@infisical.com",
resourceName: "my-resource",
numberOfSecrets: 3,
url: "https://infisical.com",
isDiffScan: true,
siteUrl: "https://infisical.com"
} as SecretScanningSecretsDetectedTemplateProps;

View File

@@ -21,6 +21,8 @@ export * from "./SecretLeakIncidentTemplate";
export * from "./SecretReminderTemplate";
export * from "./SecretRequestCompletedTemplate";
export * from "./SecretRotationFailedTemplate";
export * from "./SecretScanningScanFailedTemplate";
export * from "./SecretScanningSecretsDetectedTemplate";
export * from "./SecretSyncFailedTemplate";
export * from "./ServiceTokenExpiryNoticeTemplate";
export * from "./SignupEmailVerificationTemplate";

View File

@@ -30,6 +30,8 @@ import {
SecretReminderTemplate,
SecretRequestCompletedTemplate,
SecretRotationFailedTemplate,
SecretScanningScanFailedTemplate,
SecretScanningSecretsDetectedTemplate,
SecretSyncFailedTemplate,
ServiceTokenExpiryNoticeTemplate,
SignupEmailVerificationTemplate,
@@ -73,7 +75,9 @@ export enum SmtpTemplates {
ProjectAccessRequest = "projectAccess",
OrgAdminProjectDirectAccess = "orgAdminProjectGrantAccess",
OrgAdminBreakglassAccess = "orgAdminBreakglassAccess",
ServiceTokenExpired = "serviceTokenExpired"
ServiceTokenExpired = "serviceTokenExpired",
SecretScanningV2ScanFailed = "secretScanningV2ScanFailed",
SecretScanningV2SecretsDetected = "secretScanningV2SecretsDetected"
}
export enum SmtpHost {
@@ -113,7 +117,9 @@ const EmailTemplateMap: Record<SmtpTemplates, React.FC<any>> = {
[SmtpTemplates.SecretApprovalRequestNeedsReview]: SecretApprovalRequestNeedsReviewTemplate,
[SmtpTemplates.ResetPassword]: PasswordResetTemplate,
[SmtpTemplates.SetupPassword]: PasswordSetupTemplate,
[SmtpTemplates.PkiExpirationAlert]: PkiExpirationAlertTemplate
[SmtpTemplates.PkiExpirationAlert]: PkiExpirationAlertTemplate,
[SmtpTemplates.SecretScanningV2ScanFailed]: SecretScanningScanFailedTemplate,
[SmtpTemplates.SecretScanningV2SecretsDetected]: SecretScanningSecretsDetectedTemplate
};
export const smtpServiceFactory = (cfg: TSmtpConfig) => {

View File

@@ -0,0 +1,4 @@
---
title: "Available"
openapi: "GET /api/v1/app-connections/github-radar/available"
---

View File

@@ -0,0 +1,10 @@
---
title: "Create"
openapi: "POST /api/v1/app-connections/github-radar"
---
<Note>
GitHub Radar Connections must be created through the Infisical UI.
Check out the configuration docs for [GitHub Radar Connections](/integrations/app-connections/github-radar) for a step-by-step
guide.
</Note>

View File

@@ -0,0 +1,4 @@
---
title: "Delete"
openapi: "DELETE /api/v1/app-connections/github-radar/{connectionId}"
---

View File

@@ -0,0 +1,4 @@
---
title: "Get by ID"
openapi: "GET /api/v1/app-connections/github-radar/{connectionId}"
---

View File

@@ -0,0 +1,4 @@
---
title: "Get by Name"
openapi: "GET /api/v1/app-connections/github-radar/connection-name/{connectionName}"
---

View File

@@ -0,0 +1,4 @@
---
title: "List"
openapi: "GET /api/v1/app-connections/github-radar"
---

View File

@@ -0,0 +1,10 @@
---
title: "Update"
openapi: "PATCH /api/v1/app-connections/github-radar/{connectionId}"
---
<Note>
GitHub Radar Connections must be updated through the Infisical UI.
Check out the configuration docs for [GitHub Radar Connections](/integrations/app-connections/github-radar) for a step-by-step
guide.
</Note>

View File

@@ -0,0 +1,4 @@
---
title: "Get by Project ID"
openapi: "GET /api/v2/secret-scanning/configs"
---

View File

@@ -0,0 +1,8 @@
---
title: "Update"
openapi: "PATCH /api/v2/secret-scanning/configs"
---
<Note>
Check out the [Configuration Docs](/documentation/platform/secret-scanning/overview#configuration) for an in-depth guide on custom configurations.
</Note>

View File

@@ -0,0 +1,4 @@
---
title: "Create"
openapi: "POST /api/v2/secret-scanning/data-sources/github"
---

View File

@@ -0,0 +1,4 @@
---
title: "Delete"
openapi: "DELETE /api/v2/secret-scanning/data-sources/github/{dataSourceId}"
---

View File

@@ -0,0 +1,4 @@
---
title: "Get by ID"
openapi: "GET /api/v2/secret-scanning/data-sources/github/{dataSourceId}"
---

View File

@@ -0,0 +1,4 @@
---
title: "Get by Name"
openapi: "GET /api/v2/secret-scanning/data-sources/github/data-source-name/{dataSourceName}"
---

View File

@@ -0,0 +1,4 @@
---
title: "List Resources"
openapi: "GET /api/v2/secret-scanning/data-sources/github/{dataSourceId}/resources"
---

View File

@@ -0,0 +1,4 @@
---
title: "List Scans"
openapi: "GET /api/v2/secret-scanning/data-sources/github/{dataSourceId}/scans"
---

View File

@@ -0,0 +1,4 @@
---
title: "List"
openapi: "GET /api/v2/secret-scanning/data-sources/github"
---

View File

@@ -0,0 +1,4 @@
---
title: "Scan Resource"
openapi: "POST /api/v2/secret-scanning/data-sources/github/{dataSourceId}/resources/{resourceId}/scan"
---

View File

@@ -0,0 +1,4 @@
---
title: "Scan"
openapi: "POST /api/v2/secret-scanning/data-sources/github/{dataSourceId}/scan"
---

View File

@@ -0,0 +1,4 @@
---
title: "Update"
openapi: "PATCH /api/v2/secret-scanning/data-sources/github/{dataSourceId}"
---

View File

@@ -0,0 +1,4 @@
---
title: "List"
openapi: "GET /api/v2/secret-scanning/data-sources"
---

View File

@@ -0,0 +1,4 @@
---
title: "Options"
openapi: "GET /api/v2/secret-scanning/data-sources/options"
---

View File

@@ -0,0 +1,4 @@
---
title: "List"
openapi: "GET /api/v2/secret-scanning/findings"
---

View File

@@ -0,0 +1,4 @@
---
title: "Update"
openapi: "PATCH /api/v2/secret-scanning/findings/{findingId}"
---

View File

@@ -0,0 +1,134 @@
---
title: "Vertica"
description: "Learn how to dynamically generate Vertica database users."
---
The Infisical Vertica dynamic secret allows you to generate Vertica database credentials on demand based on configured role.
## Prerequisite
Create a user with the required permission in your Vertica instance. This user will be used to create new accounts on-demand.
## Set up Dynamic Secrets with Vertica
<Steps>
<Step title="Open Secret Overview Dashboard">
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
</Step>
<Step title="Click on the 'Add Dynamic Secret' button">
![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png)
</Step>
<Step title="Select `Vertica`">
![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/vertica/dynamic-secret-modal-vertica.png)
</Step>
<Step title="Provide the inputs for dynamic secret parameters">
<ParamField path="Secret Name" type="string" required>
Name by which you want the secret to be referenced
</ParamField>
<ParamField path="Default TTL" type="string" required>
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
</ParamField>
<ParamField path="Max TTL" type="string" required>
Maximum time-to-live for a generated secret
</ParamField>
<ParamField path="Gateway" type="string">
Select a gateway for private cluster access. If not specified, the Internet Gateway will be used.
</ParamField>
<ParamField path="Host" type="string" required>
Vertica database host
</ParamField>
<ParamField path="Port" type="number" required>
Vertica database port (default: 5433)
</ParamField>
<ParamField path="Database" type="string" required>
Name of the Vertica database for which you want to create dynamic secrets
</ParamField>
<ParamField path="User" type="string" required>
Username that will be used to create dynamic secrets
</ParamField>
<ParamField path="Password" type="string" required>
Password that will be used to create dynamic secrets
</ParamField>
![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/vertica/dynamic-secret-setup-modal-vertica.png)
</Step>
<Step title="(Optional) Modify SQL Statements">
![Modify SQL Statements Modal](../../../images/platform/dynamic-secrets/vertica/modify-sql-statements-vertica.png)
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
<ParamField path="Creation Statement" type="string">
Customize the SQL statement used to create new users. Default creates a user with basic schema permissions.
</ParamField>
<ParamField path="Revocation Statement" type="string">
Customize the SQL statement used to revoke users. Default revokes a user.
</ParamField>
</Step>
<Step title="(Optional) Configure Password Requirements">
<ParamField path="Password Length" type="number" default="48">
Length of generated passwords (1-250 characters)
</ParamField>
<ParamField path="Character Requirements" type="object">
Minimum required character counts:
- **Lowercase Count**: Minimum lowercase letters (default: 1)
- **Uppercase Count**: Minimum uppercase letters (default: 1)
- **Digit Count**: Minimum digits (default: 1)
- **Symbol Count**: Minimum symbols (default: 0)
</ParamField>
<ParamField path="Allowed Symbols" type="string" default="-_.~!*">
Symbols allowed in generated passwords
</ParamField>
</Step>
<Step title="Click 'Submit'">
After submitting the form, you will see a dynamic secret created in the dashboard.
![Dynamic Secret](../../../images/platform/dynamic-secrets/vertica/dynamic-secret-vertica.png)
</Step>
<Step title="Generate dynamic secrets">
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png)
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png)
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png)
<Tip>
Ensure that the TTL for the lease falls within the maximum TTL defined when configuring the dynamic secret.
</Tip>
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
![Provision Lease](/images/platform/dynamic-secrets/lease-values.png)
</Step>
</Steps>
## Audit or Revoke Leases
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
This will allow you to see the expiration time of the lease or delete the lease before its set time to live.
![Provision Lease](/images/platform/dynamic-secrets/lease-data.png)
## Renew Leases
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png)
<Warning>
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
</Warning>

View File

@@ -89,22 +89,3 @@ The relay system provides secure tunneling:
- Gateways only accept connections to approved resources
- Each connection requires explicit project authorization
- Resources remain private to their assigned organization
## Security Measures
### Certificate Lifecycle
- Certificates have limited validity periods
- Automatic certificate rotation
- Immediate certificate revocation capabilities
### Monitoring and Verification
1. **Continuous Verification**:
- Regular heartbeat checks
- Certificate chain validation
- Connection state monitoring
2. **Security Controls**:
- Automatic connection termination on verification failure
- Audit logging of all access attempts
- Machine identity based authentication

View File

@@ -0,0 +1,168 @@
---
title: "Networking"
description: "Network configuration and firewall requirements for Infisical Gateway"
---
The Infisical Gateway requires outbound network connectivity to establish secure communication with Infisical's relay infrastructure.
This page outlines the required ports, protocols, and firewall configurations needed for optimal gateway usage.
## Network Architecture
The gateway uses a relay-based architecture to establish secure connections:
1. **Gateway** connects outbound to **Relay Servers** using UDP/QUIC protocol
2. **Relay Servers** facilitate secure communication between Gateway and Infisical Cloud
3. All traffic is end-to-end encrypted using mutual TLS over QUIC
## Required Network Connectivity
### Outbound Connections (Required)
The gateway requires the following outbound connectivity:
| Protocol | Destination | Ports | Purpose |
|----------|-------------|-------|---------|
| UDP | Relay Servers | 49152-65535 | Allocated relay communication (TLS) |
| TCP | app.infisical.com / eu.infisical.com | 443 | API communication and relay allocation |
### Relay Server IP Addresses
Your firewall must allow outbound connectivity to the following Infisical relay servers on dynamically allocated ports.
<Tabs>
<Tab title="Infisical cloud (US)">
```
54.235.197.91:49152-65535
18.215.196.229:49152-65535
3.222.120.233:49152-65535
34.196.115.157:49152-65535
```
</Tab>
<Tab title="Infisical cloud (EU)">
```
3.125.237.40:49152-65535
52.28.157.98:49152-65535
3.125.176.90:49152-65535
```
</Tab>
<Tab title="Infisical dedicated">
Please contact your Infisical account manager for dedicated relay server IP addresses.
</Tab>
</Tabs>
<Warning>
These IP addresses are static and managed by Infisical. Any changes will be communicated with 60-day advance notice.
</Warning>
## Protocol Details
### QUIC over UDP
The gateway uses QUIC (Quick UDP Internet Connections) for primary communication:
- **Port 5349**: STUN/TURN over TLS (secure relay communication)
- **Built-in features**: Connection migration, multiplexing, reduced latency
- **Encryption**: TLS 1.3 with certificate pinning
## Understanding Firewall Behavior with UDP
Unlike TCP connections, UDP is a stateless protocol, and depending on your organization's firewall configuration, you may need to adjust network rules accordingly.
When the gateway sends UDP packets to a relay server, the return responses need to be allowed back through the firewall.
Modern firewalls handle this through "connection tracking" (also called "stateful inspection"), but the behavior can vary depending on your firewall configuration.
### Connection Tracking
Modern firewalls automatically track UDP connections and allow return responses. This is the preferred configuration as it:
- Automatically handles return responses
- Reduces firewall rule complexity
- Avoids the need for manual IP whitelisting
In the event that your firewall does not support connection tracking, you will need to whitelist the relay IPs to explicitly define return traffic manually.
## Common Network Scenarios
### Corporate Firewalls
For corporate environments with strict egress filtering:
1. **Whitelist relay IP addresses** (listed above)
2. **Allow UDP port 5349** outbound
3. **Configure connection tracking** for UDP return traffic
4. **Allow ephemeral port range** 49152-65535 for return traffic if connection tracking is disabled
### Cloud Environments (AWS/GCP/Azure)
Configure security groups to allow:
- **Outbound UDP** to relay IPs on port 5349
- **Outbound HTTPS** to app.infisical.com/eu.infisical.com on port 443
- **Inbound UDP** on ephemeral ports (if not using stateful rules)
## Frequently Asked Questions
<Accordion title="What happens if there is a network interruption?">
The gateway is designed to handle network interruptions gracefully:
- **Automatic reconnection**: The gateway will automatically attempt to reconnect to relay servers every 5 seconds if the connection is lost
- **Connection retry logic**: Built-in retry mechanisms handle temporary network outages without manual intervention
- **Multiple relay servers**: If one relay server is unavailable, the gateway can connect to alternative relay servers
- **Persistent sessions**: Existing connections are maintained where possible during brief network interruptions
- **Graceful degradation**: The gateway logs connection issues and continues attempting to restore connectivity
No manual intervention is typically required during network interruptions.
</Accordion>
<Accordion title="Why does the gateway use QUIC instead of TCP?">
QUIC (Quick UDP Internet Connections) provides several advantages over traditional TCP for gateway communication:
- **Faster connection establishment**: QUIC combines transport and security handshakes, reducing connection setup time
- **Built-in encryption**: TLS 1.3 is integrated into the protocol, ensuring all traffic is encrypted by default
- **Connection migration**: QUIC connections can survive IP address changes (useful for NAT rebinding)
- **Reduced head-of-line blocking**: Multiple data streams can be multiplexed without blocking each other
- **Better performance over unreliable networks**: Advanced congestion control and packet loss recovery
- **Lower latency**: Optimized for real-time communication between gateway and cloud services
While TCP is stateful and easier for firewalls to track, QUIC's performance benefits outweigh the additional firewall configuration requirements.
</Accordion>
<Accordion title="Do I need to open any inbound ports on my firewall?">
No inbound ports need to be opened. The gateway only makes outbound connections:
- **Outbound UDP** to relay servers on ports 49152-65535
- **Outbound HTTPS** to Infisical API endpoints
- **Return responses** are handled by connection tracking or explicit IP whitelisting
This design maintains security by avoiding the need for inbound firewall rules that could expose your network to external threats.
</Accordion>
<Accordion title="What if my firewall blocks the required UDP ports?">
If your firewall has strict UDP restrictions:
1. **Work with your network team** to allow outbound UDP to the specific relay IP addresses
2. **Use explicit IP whitelisting** if connection tracking is disabled
3. **Consider network policy exceptions** for the gateway host
4. **Monitor firewall logs** to identify which specific rules are blocking traffic
The gateway requires UDP connectivity to function - TCP-only configurations are not supported.
</Accordion>
<Accordion title="How many relay servers does the gateway connect to?">
The gateway connects to **one relay server at a time**:
- **Single active connection**: Only one relay connection is established per gateway instance
- **Automatic failover**: If the current relay becomes unavailable, the gateway will connect to an alternative relay
- **Load distribution**: Different gateway instances may connect to different relay servers for load balancing
- **No manual selection**: The Infisical API automatically assigns the optimal relay server based on availability and proximity
You should whitelist all relay IP addresses to ensure proper failover functionality.
</Accordion>
<Accordion title="Can the relay servers decrypt traffic going through them?">
No, relay servers cannot decrypt any traffic passing through them:
- **End-to-end encryption**: All traffic between the gateway and Infisical Cloud is encrypted using mutual TLS with certificate pinning
- **Relay acts as a tunnel**: The relay server only forwards encrypted packets - it has no access to encryption keys
- **No data storage**: Relay servers do not store any traffic or network-identifiable information
- **Certificate isolation**: Each organization has its own private PKI system, ensuring complete tenant isolation
The relay infrastructure is designed as a secure forwarding mechanism, similar to a VPN tunnel, where the relay provider cannot see the contents of the traffic flowing through it.
</Accordion>

View File

@@ -32,7 +32,7 @@ For detailed installation instructions, refer to the Infisical [CLI Installation
To function, the Gateway must authenticate with Infisical. This requires a machine identity configured with the appropriate permissions to create and manage a Gateway.
Once authenticated, the Gateway establishes a secure connection with Infisical to allow your private resources to be reachable.
### Deployment process
### Get started
<Steps>
<Step title="Create a Gateway Identity">

View File

@@ -0,0 +1,177 @@
---
title: SPIFFE/SPIRE
description: "Learn how to authenticate SPIRE workloads with Infisical using OpenID Connect (OIDC)."
---
**OIDC Auth** is a platform-agnostic JWT-based authentication method that can be used to authenticate from any platform or environment using an identity provider with OpenID Connect.
## Diagram
The following sequence diagram illustrates the OIDC Auth workflow for authenticating SPIRE workloads with Infisical.
```mermaid
sequenceDiagram
participant Client as SPIRE Workload
participant Agent as SPIRE Agent
participant Server as SPIRE Server
participant Infis as Infisical
Client->>Agent: Step 1: Request JWT-SVID
Agent->>Server: Validate workload and fetch signing key
Server-->>Agent: Return signing material
Agent-->>Client: Return JWT-SVID with verifiable claims
Note over Client,Infis: Step 2: Login Operation
Client->>Infis: Send JWT-SVID to /api/v1/auth/oidc-auth/login
Note over Infis,Server: Step 3: Query verification
Infis->>Server: Request JWT public key using OIDC Discovery
Server-->>Infis: Return public key
Note over Infis: Step 4: JWT validation
Infis->>Client: Return short-lived access token
Note over Client,Infis: Step 5: Access Infisical API with Token
Client->>Infis: Make authenticated requests using the short-lived access token
```
## Concept
At a high-level, Infisical authenticates a SPIRE workload by verifying the JWT-SVID and checking that it meets specific requirements (e.g. it is issued by a trusted SPIRE server) at the `/api/v1/auth/oidc-auth/login` endpoint. If successful,
then Infisical returns a short-lived access token that can be used to make authenticated requests to the Infisical API.
To be more specific:
1. The SPIRE workload requests a JWT-SVID from the local SPIRE Agent.
2. The SPIRE Agent validates the workload's identity and requests signing material from the SPIRE Server.
3. The SPIRE Agent returns a JWT-SVID containing the workload's SPIFFE ID and other claims.
4. The JWT-SVID is sent to Infisical at the `/api/v1/auth/oidc-auth/login` endpoint.
5. Infisical fetches the public key that was used to sign the JWT-SVID from the SPIRE Server using OIDC Discovery.
6. Infisical validates the JWT-SVID using the public key provided by the SPIRE Server and checks that the subject, audience, and claims of the token matches with the set criteria.
7. If all is well, Infisical returns a short-lived access token that the workload can use to make authenticated requests to the Infisical API.
<Note>Infisical needs network-level access to the SPIRE Server's OIDC Discovery endpoint.</Note>
## Prerequisites
Before following this guide, ensure you have:
- A running SPIRE deployment with both SPIRE Server and SPIRE Agent configured
- OIDC Discovery Provider deployed alongside your SPIRE Server
- Workload registration entries created in SPIRE for the workloads that need to access Infisical
- Network connectivity between Infisical and your OIDC Discovery Provider endpoint
For detailed SPIRE setup instructions, refer to the [SPIRE documentation](https://spiffe.io/docs/latest/spire-about/).
## OIDC Discovery Provider Setup
To enable JWT-SVID verification with Infisical, you need to deploy the OIDC Discovery Provider alongside your SPIRE Server. The OIDC Discovery Provider runs as a separate service that exposes the necessary OIDC endpoints.
In Kubernetes deployments, this is typically done by adding an `oidc-discovery-provider` container to your SPIRE Server StatefulSet:
```yaml
- name: spire-oidc
image: ghcr.io/spiffe/oidc-discovery-provider:1.12.2
args:
- -config
- /run/spire/oidc/config/oidc-discovery-provider.conf
ports:
- containerPort: 443
name: spire-oidc-port
```
The OIDC Discovery Provider will expose the OIDC Discovery endpoint at `https://<spire-oidc-host>/.well-known/openid_configuration`, which Infisical will use to fetch the public keys for JWT-SVID verification.
<Note>For detailed setup instructions, refer to the [SPIRE OIDC Discovery Provider documentation](https://github.com/spiffe/spire/tree/main/support/oidc-discovery-provider).</Note>
## Guide
In the following steps, we explore how to create and use identities to access the Infisical API using the OIDC Auth authentication method with SPIFFE/SPIRE.
<Steps>
<Step title="Creating an identity">
To create an identity, head to your Organization Settings > Access Control > Identities and press **Create identity**.
![identities organization](/images/platform/identities/identities-org.png)
When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
![identities organization create](/images/platform/identities/identities-org-create.png)
Now input a few details for your new identity. Here's some guidance for each field:
- Name (required): A friendly name for the identity.
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
Once you've created an identity, you'll be redirected to a page where you can manage the identity.
![identities page](/images/platform/identities/identities-page.png)
Since the identity has been configured with Universal Auth by default, you should re-configure it to use OIDC Auth instead. To do this, press to edit the **Authentication** section,
remove the existing Universal Auth configuration, and add a new OIDC Auth configuration onto the identity.
![identities page remove default auth](/images/platform/identities/identities-page-remove-default-auth.png)
![identities create oidc auth method](/images/platform/identities/identities-org-create-oidc-auth-method.png)
<Warning>Restrict access by configuring the Subject, Audiences, and Claims fields</Warning>
Here's some more guidance on each field:
- OIDC Discovery URL: The URL used to retrieve the OpenID Connect configuration from the SPIRE Server. This will be used to fetch the public key needed for verifying the provided JWT-SVID. This should be set to your SPIRE Server's OIDC Discovery endpoint, typically `https://<spire-server-host>:<port>/.well-known/openid_configuration`
- Issuer: The unique identifier of the SPIRE Server issuing the JWT-SVID. This value is used to verify the iss (issuer) claim in the JWT-SVID to ensure the token is issued by a trusted SPIRE Server. This should match your SPIRE Server's configured issuer, typically `https://<spire-server-host>:<port>`
- CA Certificate: The PEM-encoded CA certificate for establishing secure communication with the SPIRE Server endpoints. This should contain the CA certificate that signed your SPIRE Server's TLS certificate.
- Subject: The expected SPIFFE ID that is the subject of the JWT-SVID. The format of the sub field for SPIRE JWT-SVIDs follows the SPIFFE ID format: `spiffe://<trust-domain>/<workload-path>`. For example: `spiffe://example.org/workload/api-server`
- Audiences: A list of intended recipients for the JWT-SVID. This value is checked against the aud (audience) claim in the token. When workloads request JWT-SVIDs from SPIRE, they specify an audience (e.g., `infisical` or your service name). Configure this to match what your workloads use.
- Claims: Additional information or attributes that should be present in the JWT-SVID for it to be valid. Standard SPIRE JWT-SVID claims include `sub` (SPIFFE ID), `aud` (audience), `exp` (expiration), and `iat` (issued at). You can also configure custom claims if your SPIRE Server includes additional metadata.
- Access Token TTL (default is `2592000` equivalent to 30 days): The lifetime for an access token in seconds. This value will be referenced at renewal time.
- Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an access token in seconds. This value will be referenced at renewal time.
- Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses.
- Access Token Trusted IPs: The IPs or CIDR ranges that access tokens can be used from. By default, each token is given the `0.0.0.0/0`, allowing usage from any network address.
<Tip>SPIRE JWT-SVIDs contain standard claims like `sub` (SPIFFE ID), `aud` (audience), `exp`, and `iat`. The audience is typically specified when requesting the JWT-SVID (e.g., `spire-agent api fetch jwt -audience infisical`).</Tip>
<Info>The `subject`, `audiences`, and `claims` fields support glob pattern matching; however, we highly recommend using hardcoded SPIFFE IDs whenever possible for better security.</Info>
</Step>
<Step title="Adding an identity to a project">
To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project.
To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**.
Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to.
![identities project](/images/platform/identities/identities-project.png)
![identities project create](/images/platform/identities/identities-project-create.png)
</Step>
<Step title="Using JWT-SVID to authenticate with Infisical">
Here's an example of how a workload can use its JWT-SVID to authenticate with Infisical and retrieve secrets:
```bash
#!/bin/bash
# Obtain JWT-SVID from SPIRE Agent
JWT_SVID=$(spire-agent api fetch jwt -audience infisical -socketPath /run/spire/sockets/agent.sock | grep -A1 "token(" | tail -1)
# Authenticate with Infisical using the JWT-SVID
ACCESS_TOKEN=$(curl -s -X POST \
-H "Content-Type: application/json" \
-d "{\"identityId\":\"<your-identity-id>\",\"jwt\":\"$JWT_SVID\"}" \
https://app.infisical.com/api/v1/auth/oidc-auth/login | jq -r '.accessToken')
# Use the access token to retrieve secrets
curl -s -H "Authorization: Bearer $ACCESS_TOKEN" \
"https://app.infisical.com/api/v3/secrets/raw?workspaceSlug=<project-slug>&environment=<env-slug>&secretPath=/"
```
<Note>
Each identity access token has a time-to-live (TTL) which you can infer from the response of the login operation;
the default TTL is `7200` seconds which can be adjusted.
If an identity access token expires, it can no longer authenticate with the Infisical API. In this case,
a new access token should be obtained by performing another login operation.
</Note>
<Tip>
JWT-SVIDs from SPIRE have their own expiration time (typically short-lived). Ensure your application handles both JWT-SVID renewal from SPIRE and access token renewal from Infisical appropriately.
</Tip>
</Step>
</Steps>

Some files were not shown because too many files have changed in this diff Show More