mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-31 10:38:12 +00:00
Compare commits
93 Commits
misc/updat
...
project-gr
Author | SHA1 | Date | |
---|---|---|---|
|
ea5a5e0aa7 | ||
|
4c22024d13 | ||
|
ce170a6a47 | ||
|
cb8e36ae15 | ||
|
16ce1f441e | ||
|
8043b61c9f | ||
|
d374ff2093 | ||
|
eb7c533261 | ||
|
9a935c9177 | ||
|
9d24eb15dc | ||
|
ed4882dfac | ||
|
7acd7fd522 | ||
|
2148b636f5 | ||
|
e40b4a0a4b | ||
|
311bf8b515 | ||
|
78c4c3e847 | ||
|
b8aa36be99 | ||
|
594445814a | ||
|
a467b13069 | ||
|
c425c03939 | ||
|
9cc17452fa | ||
|
93ba6f7b58 | ||
|
0fcb66e9ab | ||
|
135f425fcf | ||
|
9c149cb4bf | ||
|
ce45c1a43d | ||
|
1a14c71564 | ||
|
e7fe2ea51e | ||
|
caa129b565 | ||
|
30d7e63a67 | ||
|
a4c21d85ac | ||
|
c34a139b19 | ||
|
f2a55da9b6 | ||
|
a3584d6a8a | ||
|
36f1559e5e | ||
|
07902f7db9 | ||
|
6fddecdf82 | ||
|
99e2c85f8f | ||
|
6e1504dc73 | ||
|
07d930f608 | ||
|
1101707d8b | ||
|
696bbcb072 | ||
|
54435d0ad9 | ||
|
6c52847dec | ||
|
698260cba6 | ||
|
caeda09b21 | ||
|
1201baf35c | ||
|
5d5f843a9f | ||
|
caca23b56c | ||
|
01ea22f167 | ||
|
83c53b9d5a | ||
|
8cc457d49a | ||
|
540374f543 | ||
|
4edb90d644 | ||
|
1a7151aba7 | ||
|
80d2d9d2cf | ||
|
4268fdea44 | ||
|
781965767d | ||
|
fef7e43869 | ||
|
9e651a58e3 | ||
|
0fbf8efd3a | ||
|
dcb77bbdd4 | ||
|
36f7e7d81b | ||
|
8f97b3ad87 | ||
|
be80444ec2 | ||
|
6f2043dc26 | ||
|
6ae7b5e996 | ||
|
95fcf560a5 | ||
|
d8ee05bfba | ||
|
400157a468 | ||
|
274952544f | ||
|
d23beaedf1 | ||
|
817e762e6b | ||
|
ce5712606f | ||
|
ce67e5f137 | ||
|
440c45fd42 | ||
|
893a042c25 | ||
|
f3fb65fcc3 | ||
|
c0add863be | ||
|
98ab969356 | ||
|
d4523b0ca4 | ||
|
2be8c47ae8 | ||
|
8730d14104 | ||
|
d924580599 | ||
|
22f32e060b | ||
|
b4f26aac25 | ||
|
b634a6c371 | ||
|
080ae5ce6f | ||
|
dfcf613023 | ||
|
3ae2ec1f51 | ||
|
ce4e35e908 | ||
|
4773336a04 | ||
|
e6c97510ca |
@@ -107,6 +107,14 @@ INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
||||
|
||||
#github radar app connection
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
|
||||
|
||||
#gcp app connection
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
|
@@ -40,3 +40,4 @@ cli/detect/config/gitleaks.toml:gcp-api-key:578
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:579
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:581
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:582
|
||||
backend/src/services/smtp/smtp-service.ts:generic-api-key:79
|
||||
|
2
backend/src/@types/fastify.d.ts
vendored
2
backend/src/@types/fastify.d.ts
vendored
@@ -37,6 +37,7 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
|
||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { TSecretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
||||
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
|
||||
@@ -271,6 +272,7 @@ declare module "fastify" {
|
||||
microsoftTeams: TMicrosoftTeamsServiceFactory;
|
||||
assumePrivileges: TAssumePrivilegeServiceFactory;
|
||||
githubOrgSync: TGithubOrgSyncServiceFactory;
|
||||
secretScanningV2: TSecretScanningV2ServiceFactory;
|
||||
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
|
||||
pkiTemplate: TPkiTemplatesServiceFactory;
|
||||
};
|
||||
|
40
backend/src/@types/knex.d.ts
vendored
40
backend/src/@types/knex.d.ts
vendored
@@ -336,9 +336,24 @@ import {
|
||||
TSecretRotationV2SecretMappingsInsert,
|
||||
TSecretRotationV2SecretMappingsUpdate,
|
||||
TSecrets,
|
||||
TSecretScanningConfigs,
|
||||
TSecretScanningConfigsInsert,
|
||||
TSecretScanningConfigsUpdate,
|
||||
TSecretScanningDataSources,
|
||||
TSecretScanningDataSourcesInsert,
|
||||
TSecretScanningDataSourcesUpdate,
|
||||
TSecretScanningFindings,
|
||||
TSecretScanningFindingsInsert,
|
||||
TSecretScanningFindingsUpdate,
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate,
|
||||
TSecretScanningResources,
|
||||
TSecretScanningResourcesInsert,
|
||||
TSecretScanningResourcesUpdate,
|
||||
TSecretScanningScans,
|
||||
TSecretScanningScansInsert,
|
||||
TSecretScanningScansUpdate,
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate,
|
||||
@@ -1107,5 +1122,30 @@ declare module "knex/types/tables" {
|
||||
TGithubOrgSyncConfigsInsert,
|
||||
TGithubOrgSyncConfigsUpdate
|
||||
>;
|
||||
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningDataSources,
|
||||
TSecretScanningDataSourcesInsert,
|
||||
TSecretScanningDataSourcesUpdate
|
||||
>;
|
||||
[TableName.SecretScanningResource]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningResources,
|
||||
TSecretScanningResourcesInsert,
|
||||
TSecretScanningResourcesUpdate
|
||||
>;
|
||||
[TableName.SecretScanningScan]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningScans,
|
||||
TSecretScanningScansInsert,
|
||||
TSecretScanningScansUpdate
|
||||
>;
|
||||
[TableName.SecretScanningFinding]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningFindings,
|
||||
TSecretScanningFindingsInsert,
|
||||
TSecretScanningFindingsUpdate
|
||||
>;
|
||||
[TableName.SecretScanningConfig]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningConfigs,
|
||||
TSecretScanningConfigsInsert,
|
||||
TSecretScanningConfigsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
107
backend/src/db/migrations/20250517002225_secret-scanning-v2.ts
Normal file
107
backend/src/db/migrations/20250517002225_secret-scanning-v2.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||
import {
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningDataSource))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningDataSource, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("externalId").index(); // if we need a unique way of identifying this data source from an external resource
|
||||
t.string("name", 48).notNullable();
|
||||
t.string("description");
|
||||
t.string("type").notNullable();
|
||||
t.jsonb("config").notNullable();
|
||||
t.binary("encryptedCredentials"); // webhook credentials, etc.
|
||||
t.uuid("connectionId");
|
||||
t.boolean("isAutoScanEnabled").defaultTo(true);
|
||||
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
t.boolean("isDisconnected").notNullable().defaultTo(false);
|
||||
t.unique(["projectId", "name"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningResource))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningResource, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("externalId").notNullable();
|
||||
t.string("name").notNullable();
|
||||
t.string("type").notNullable();
|
||||
t.uuid("dataSourceId").notNullable();
|
||||
t.foreign("dataSourceId").references("id").inTable(TableName.SecretScanningDataSource).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
t.unique(["dataSourceId", "externalId"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningResource);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningScan))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningScan, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("status").notNullable().defaultTo(SecretScanningScanStatus.Queued);
|
||||
t.string("statusMessage", 1024);
|
||||
t.string("type").notNullable();
|
||||
t.uuid("resourceId").notNullable();
|
||||
t.foreign("resourceId").references("id").inTable(TableName.SecretScanningResource).onDelete("CASCADE");
|
||||
t.timestamp("createdAt").defaultTo(knex.fn.now());
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningFinding))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningFinding, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("dataSourceName").notNullable();
|
||||
t.string("dataSourceType").notNullable();
|
||||
t.string("resourceName").notNullable();
|
||||
t.string("resourceType").notNullable();
|
||||
t.string("rule").notNullable();
|
||||
t.string("severity").notNullable();
|
||||
t.string("status").notNullable().defaultTo(SecretScanningFindingStatus.Unresolved);
|
||||
t.string("remarks");
|
||||
t.string("fingerprint").notNullable();
|
||||
t.jsonb("details").notNullable();
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("scanId");
|
||||
t.foreign("scanId").references("id").inTable(TableName.SecretScanningScan).onDelete("SET NULL");
|
||||
t.timestamps(true, true, true);
|
||||
t.unique(["projectId", "fingerprint"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningFinding);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningConfig))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("projectId").notNullable().unique();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.string("content", 5000);
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningConfig);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningFinding);
|
||||
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningFinding);
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningScan);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningResource);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningResource);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningDataSource);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningConfig);
|
||||
}
|
@@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
|
||||
|
||||
if (!hasTokenReviewModeColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.string("tokenReviewMode").notNullable().defaultTo("api");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
|
||||
|
||||
if (hasTokenReviewModeColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.dropColumn("tokenReviewMode");
|
||||
});
|
||||
}
|
||||
}
|
@@ -31,7 +31,8 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional(),
|
||||
gatewayId: z.string().uuid().nullable().optional(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
accessTokenPeriod: z.coerce.number().default(0),
|
||||
tokenReviewMode: z.string().default("api")
|
||||
});
|
||||
|
||||
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
||||
|
@@ -111,7 +111,12 @@ export * from "./secret-rotation-outputs";
|
||||
export * from "./secret-rotation-v2-secret-mappings";
|
||||
export * from "./secret-rotations";
|
||||
export * from "./secret-rotations-v2";
|
||||
export * from "./secret-scanning-configs";
|
||||
export * from "./secret-scanning-data-sources";
|
||||
export * from "./secret-scanning-findings";
|
||||
export * from "./secret-scanning-git-risks";
|
||||
export * from "./secret-scanning-resources";
|
||||
export * from "./secret-scanning-scans";
|
||||
export * from "./secret-sharing";
|
||||
export * from "./secret-snapshot-folders";
|
||||
export * from "./secret-snapshot-secrets";
|
||||
|
@@ -159,7 +159,12 @@ export enum TableName {
|
||||
MicrosoftTeamsIntegrations = "microsoft_teams_integrations",
|
||||
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
|
||||
SecretReminderRecipients = "secret_reminder_recipients",
|
||||
GithubOrgSyncConfig = "github_org_sync_configs"
|
||||
GithubOrgSyncConfig = "github_org_sync_configs",
|
||||
SecretScanningDataSource = "secret_scanning_data_sources",
|
||||
SecretScanningResource = "secret_scanning_resources",
|
||||
SecretScanningScan = "secret_scanning_scans",
|
||||
SecretScanningFinding = "secret_scanning_findings",
|
||||
SecretScanningConfig = "secret_scanning_configs"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
@@ -248,7 +253,8 @@ export enum ProjectType {
|
||||
SecretManager = "secret-manager",
|
||||
CertificateManager = "cert-manager",
|
||||
KMS = "kms",
|
||||
SSH = "ssh"
|
||||
SSH = "ssh",
|
||||
SecretScanning = "secret-scanning"
|
||||
}
|
||||
|
||||
export enum ActionProjectType {
|
||||
@@ -256,6 +262,7 @@ export enum ActionProjectType {
|
||||
CertificateManager = ProjectType.CertificateManager,
|
||||
KMS = ProjectType.KMS,
|
||||
SSH = ProjectType.SSH,
|
||||
SecretScanning = ProjectType.SecretScanning,
|
||||
// project operations that happen on all types
|
||||
Any = "any"
|
||||
}
|
||||
|
20
backend/src/db/schemas/secret-scanning-configs.ts
Normal file
20
backend/src/db/schemas/secret-scanning-configs.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
projectId: z.string(),
|
||||
content: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretScanningConfigs = z.infer<typeof SecretScanningConfigsSchema>;
|
||||
export type TSecretScanningConfigsInsert = Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningConfigsUpdate = Partial<Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>>;
|
32
backend/src/db/schemas/secret-scanning-data-sources.ts
Normal file
32
backend/src/db/schemas/secret-scanning-data-sources.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningDataSourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
externalId: z.string().nullable().optional(),
|
||||
name: z.string(),
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
config: z.unknown(),
|
||||
encryptedCredentials: zodBuffer.nullable().optional(),
|
||||
connectionId: z.string().uuid().nullable().optional(),
|
||||
isAutoScanEnabled: z.boolean().default(true).nullable().optional(),
|
||||
projectId: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isDisconnected: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TSecretScanningDataSources = z.infer<typeof SecretScanningDataSourcesSchema>;
|
||||
export type TSecretScanningDataSourcesInsert = Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningDataSourcesUpdate = Partial<
|
||||
Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
32
backend/src/db/schemas/secret-scanning-findings.ts
Normal file
32
backend/src/db/schemas/secret-scanning-findings.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningFindingsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
dataSourceName: z.string(),
|
||||
dataSourceType: z.string(),
|
||||
resourceName: z.string(),
|
||||
resourceType: z.string(),
|
||||
rule: z.string(),
|
||||
severity: z.string(),
|
||||
status: z.string().default("unresolved"),
|
||||
remarks: z.string().nullable().optional(),
|
||||
fingerprint: z.string(),
|
||||
details: z.unknown(),
|
||||
projectId: z.string(),
|
||||
scanId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretScanningFindings = z.infer<typeof SecretScanningFindingsSchema>;
|
||||
export type TSecretScanningFindingsInsert = Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningFindingsUpdate = Partial<
|
||||
Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>
|
||||
>;
|
24
backend/src/db/schemas/secret-scanning-resources.ts
Normal file
24
backend/src/db/schemas/secret-scanning-resources.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningResourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
externalId: z.string(),
|
||||
name: z.string(),
|
||||
type: z.string(),
|
||||
dataSourceId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretScanningResources = z.infer<typeof SecretScanningResourcesSchema>;
|
||||
export type TSecretScanningResourcesInsert = Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningResourcesUpdate = Partial<
|
||||
Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
21
backend/src/db/schemas/secret-scanning-scans.ts
Normal file
21
backend/src/db/schemas/secret-scanning-scans.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningScansSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
status: z.string().default("queued"),
|
||||
statusMessage: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
resourceId: z.string().uuid(),
|
||||
createdAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretScanningScans = z.infer<typeof SecretScanningScansSchema>;
|
||||
export type TSecretScanningScansInsert = Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningScansUpdate = Partial<Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>>;
|
@@ -2,6 +2,10 @@ import {
|
||||
registerSecretRotationV2Router,
|
||||
SECRET_ROTATION_REGISTER_ROUTER_MAP
|
||||
} from "@app/ee/routes/v2/secret-rotation-v2-routers";
|
||||
import {
|
||||
registerSecretScanningV2Router,
|
||||
SECRET_SCANNING_REGISTER_ROUTER_MAP
|
||||
} from "@app/ee/routes/v2/secret-scanning-v2-routers";
|
||||
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
@@ -31,4 +35,17 @@ export const registerV2EERoutes = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
{ prefix: "/secret-rotations" }
|
||||
);
|
||||
|
||||
await server.register(
|
||||
async (secretScanningV2Router) => {
|
||||
// register generic secret scanning endpoints
|
||||
await secretScanningV2Router.register(registerSecretScanningV2Router);
|
||||
|
||||
// register service-specific secret scanning endpoints (gitlab/github, etc.)
|
||||
for await (const [type, router] of Object.entries(SECRET_SCANNING_REGISTER_ROUTER_MAP)) {
|
||||
await secretScanningV2Router.register(router, { prefix: `data-sources/${type}` });
|
||||
}
|
||||
},
|
||||
{ prefix: "/secret-scanning" }
|
||||
);
|
||||
};
|
||||
|
@@ -0,0 +1,16 @@
|
||||
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
|
||||
import {
|
||||
CreateGitHubDataSourceSchema,
|
||||
GitHubDataSourceSchema,
|
||||
UpdateGitHubDataSourceSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export const registerGitHubSecretScanningRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretScanningEndpoints({
|
||||
type: SecretScanningDataSource.GitHub,
|
||||
server,
|
||||
responseSchema: GitHubDataSourceSchema,
|
||||
createSchema: CreateGitHubDataSourceSchema,
|
||||
updateSchema: UpdateGitHubDataSourceSchema
|
||||
});
|
12
backend/src/ee/routes/v2/secret-scanning-v2-routers/index.ts
Normal file
12
backend/src/ee/routes/v2/secret-scanning-v2-routers/index.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
|
||||
|
||||
export * from "./secret-scanning-v2-router";
|
||||
|
||||
export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
|
||||
SecretScanningDataSource,
|
||||
(server: FastifyZodProvider) => Promise<void>
|
||||
> = {
|
||||
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter
|
||||
};
|
@@ -0,0 +1,593 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningResourcesSchema, SecretScanningScansSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { SECRET_SCANNING_DATA_SOURCE_NAME_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
|
||||
import {
|
||||
TSecretScanningDataSource,
|
||||
TSecretScanningDataSourceInput
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { ApiDocsTags, SecretScanningDataSources } from "@app/lib/api-docs";
|
||||
import { startsWithVowel } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerSecretScanningEndpoints = <
|
||||
T extends TSecretScanningDataSource,
|
||||
I extends TSecretScanningDataSourceInput
|
||||
>({
|
||||
server,
|
||||
type,
|
||||
createSchema,
|
||||
updateSchema,
|
||||
responseSchema
|
||||
}: {
|
||||
type: SecretScanningDataSource;
|
||||
server: FastifyZodProvider;
|
||||
createSchema: z.ZodType<{
|
||||
name: string;
|
||||
projectId: string;
|
||||
connectionId?: string;
|
||||
config: Partial<I["config"]>;
|
||||
description?: string | null;
|
||||
isAutoScanEnabled?: boolean;
|
||||
}>;
|
||||
updateSchema: z.ZodType<{
|
||||
name?: string;
|
||||
config?: Partial<I["config"]>;
|
||||
description?: string | null;
|
||||
isAutoScanEnabled?: boolean;
|
||||
}>;
|
||||
responseSchema: z.ZodTypeAny;
|
||||
}) => {
|
||||
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `List the ${sourceType} Data Sources for the specified project.`,
|
||||
querystring: z.object({
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretScanningDataSources.LIST(type).projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSources: responseSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId }
|
||||
} = req;
|
||||
|
||||
const dataSources = (await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
|
||||
{ projectId, type },
|
||||
req.permission
|
||||
)) as T[];
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
|
||||
metadata: {
|
||||
type,
|
||||
count: dataSources.length,
|
||||
dataSourceIds: dataSources.map((source) => source.id)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Get the specified ${sourceType} Data Source by ID.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.GET_BY_ID(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceById(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/data-source-name/:dataSourceName`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Get the specified ${sourceType} Data Source by name and project ID.`,
|
||||
params: z.object({
|
||||
sourceName: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Data Source name required")
|
||||
.describe(SecretScanningDataSources.GET_BY_NAME(type).sourceName)
|
||||
}),
|
||||
querystring: z.object({
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretScanningDataSources.GET_BY_NAME(type).projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { sourceName } = req.params;
|
||||
const { projectId } = req.query;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceByName(
|
||||
{ sourceName, projectId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
type
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Create ${
|
||||
startsWithVowel(sourceType) ? "an" : "a"
|
||||
} ${sourceType} Data Source for the specified project.`,
|
||||
body: createSchema,
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const dataSource = (await server.services.secretScanningV2.createSecretScanningDataSource(
|
||||
{ ...req.body, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
type,
|
||||
...req.body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:dataSourceId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Update the specified ${sourceType} Data Source.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.UPDATE(type).dataSourceId)
|
||||
}),
|
||||
body: updateSchema,
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.updateSecretScanningDataSource(
|
||||
{ ...req.body, dataSourceId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
...req.body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: `/:dataSourceId`,
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Delete the specified ${sourceType} Data Source.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.DELETE(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.deleteSecretScanningDataSource(
|
||||
{ type, dataSourceId },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE,
|
||||
metadata: {
|
||||
type,
|
||||
dataSourceId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: `/:dataSourceId/scan`,
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Trigger a scan for the specified ${sourceType} Data Source.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
|
||||
{ type, dataSourceId },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
|
||||
metadata: {
|
||||
type,
|
||||
dataSourceId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: `/:dataSourceId/resources/:resourceId/scan`,
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Trigger a scan for the specified ${sourceType} Data Source resource.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId),
|
||||
resourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).resourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId, resourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
|
||||
{ type, dataSourceId, resourceId },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
|
||||
metadata: {
|
||||
type,
|
||||
dataSourceId,
|
||||
resourceId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId/resources",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Get the resources associated with the specified ${sourceType} Data Source by ID.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_RESOURCES(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ resources: SecretScanningResourcesSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const { resources, projectId } = await server.services.secretScanningV2.listSecretScanningResourcesByDataSourceId(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
resourceIds: resources.map((resource) => resource.id),
|
||||
count: resources.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { resources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId/scans",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Get the scans associated with the specified ${sourceType} Data Source by ID.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_SCANS(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ scans: SecretScanningScansSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const { scans, projectId } = await server.services.secretScanningV2.listSecretScanningScansByDataSourceId(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_SCAN_LIST,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
count: scans.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { scans };
|
||||
}
|
||||
});
|
||||
|
||||
// not exposed, for UI only
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId/resources-dashboard",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
resources: SecretScanningResourcesSchema.extend({
|
||||
lastScannedAt: z.date().nullish(),
|
||||
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
|
||||
lastScanStatusMessage: z.string().nullish(),
|
||||
unresolvedFindings: z.number()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const { resources, projectId } =
|
||||
await server.services.secretScanningV2.listSecretScanningResourcesWithDetailsByDataSourceId(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
resourceIds: resources.map((resource) => resource.id),
|
||||
count: resources.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { resources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId/scans-dashboard",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
scans: SecretScanningScansSchema.extend({
|
||||
unresolvedFindings: z.number(),
|
||||
resolvedFindings: z.number(),
|
||||
resourceName: z.string()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const { scans, projectId } =
|
||||
await server.services.secretScanningV2.listSecretScanningScansWithDetailsByDataSourceId(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_SCAN_LIST,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
count: scans.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { scans };
|
||||
}
|
||||
});
|
||||
};
|
@@ -0,0 +1,366 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningConfigsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import {
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
SecretScanningDataSourceSchema,
|
||||
SecretScanningFindingSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-union-schemas";
|
||||
import {
|
||||
ApiDocsTags,
|
||||
SecretScanningConfigs,
|
||||
SecretScanningDataSources,
|
||||
SecretScanningFindings
|
||||
} from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]);
|
||||
|
||||
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/data-sources/options",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "List the available Secret Scanning Data Source Options.",
|
||||
response: {
|
||||
200: z.object({
|
||||
dataSourceOptions: SecretScanningDataSourceOptionsSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: () => {
|
||||
const dataSourceOptions = server.services.secretScanningV2.listSecretScanningDataSourceOptions();
|
||||
return { dataSourceOptions };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/data-sources",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "List all the Secret Scanning Data Sources for the specified project.",
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningDataSources.LIST().projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSources: SecretScanningDataSourceSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
|
||||
{ projectId },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
|
||||
metadata: {
|
||||
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
|
||||
count: dataSources.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/findings",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "List all the Secret Scanning Findings for the specified project.",
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ findings: SecretScanningFindingSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const findings = await server.services.secretScanningV2.listSecretScanningFindingsByProjectId(
|
||||
projectId,
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_FINDING_LIST,
|
||||
metadata: {
|
||||
findingIds: findings.map((finding) => finding.id),
|
||||
count: findings.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { findings };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/findings/:findingId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "Update the specified Secret Scanning Finding.",
|
||||
params: z.object({
|
||||
findingId: z.string().trim().min(1, "Finding ID required").describe(SecretScanningFindings.UPDATE.findingId)
|
||||
}),
|
||||
body: z.object({
|
||||
status: z.nativeEnum(SecretScanningFindingStatus).optional().describe(SecretScanningFindings.UPDATE.status),
|
||||
remarks: z.string().nullish().describe(SecretScanningFindings.UPDATE.remarks)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ finding: SecretScanningFindingSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
params: { findingId },
|
||||
body,
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const { finding, projectId } = await server.services.secretScanningV2.updateSecretScanningFindingById(
|
||||
{ findingId, ...body },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_FINDING_UPDATE,
|
||||
metadata: {
|
||||
findingId,
|
||||
...body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { finding };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/configs",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "Get the Secret Scanning Config for the specified project.",
|
||||
querystring: z.object({
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretScanningConfigs.GET_BY_PROJECT_ID.projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
config: z.object({ content: z.string().nullish(), projectId: z.string(), updatedAt: z.date().nullish() })
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const config = await server.services.secretScanningV2.findSecretScanningConfigByProjectId(projectId, permission);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_CONFIG_GET
|
||||
}
|
||||
});
|
||||
|
||||
return { config };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/configs",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "Update the specified Secret Scanning Configuration.",
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningConfigs.UPDATE.projectId)
|
||||
}),
|
||||
body: z.object({
|
||||
content: z.string().nullable().describe(SecretScanningConfigs.UPDATE.content)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ config: SecretScanningConfigsSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
body,
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const config = await server.services.secretScanningV2.upsertSecretScanningConfig(
|
||||
{ projectId, ...body },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_CONFIG_UPDATE,
|
||||
metadata: body
|
||||
}
|
||||
});
|
||||
|
||||
return { config };
|
||||
}
|
||||
});
|
||||
|
||||
// not exposed, for UI only
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/data-sources-dashboard",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
dataSources: z
|
||||
.intersection(
|
||||
SecretScanningDataSourceSchema,
|
||||
z.object({
|
||||
lastScannedAt: z.date().nullish(),
|
||||
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
|
||||
lastScanStatusMessage: z.string().nullish(),
|
||||
unresolvedFindings: z.number().nullish()
|
||||
})
|
||||
)
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesWithDetailsByProjectId(
|
||||
{ projectId },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
|
||||
metadata: {
|
||||
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
|
||||
count: dataSources.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/unresolved-findings-count",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ unresolvedFindings: z.number() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const unresolvedFindings =
|
||||
await server.services.secretScanningV2.getSecretScanningUnresolvedFindingsCountByProjectId(
|
||||
projectId,
|
||||
permission
|
||||
);
|
||||
|
||||
return { unresolvedFindings };
|
||||
}
|
||||
});
|
||||
};
|
@@ -10,6 +10,18 @@ import {
|
||||
TSecretRotationV2Raw,
|
||||
TUpdateSecretRotationV2DTO
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningScanStatus,
|
||||
SecretScanningScanType
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
TCreateSecretScanningDataSourceDTO,
|
||||
TDeleteSecretScanningDataSourceDTO,
|
||||
TTriggerSecretScanningDataSourceDTO,
|
||||
TUpdateSecretScanningDataSourceDTO,
|
||||
TUpdateSecretScanningFindingDTO
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
|
||||
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
|
||||
@@ -381,6 +393,20 @@ export enum EventType {
|
||||
PROJECT_ASSUME_PRIVILEGE_SESSION_START = "project-assume-privileges-session-start",
|
||||
PROJECT_ASSUME_PRIVILEGE_SESSION_END = "project-assume-privileges-session-end",
|
||||
|
||||
SECRET_SCANNING_DATA_SOURCE_LIST = "secret-scanning-data-source-list",
|
||||
SECRET_SCANNING_DATA_SOURCE_CREATE = "secret-scanning-data-source-create",
|
||||
SECRET_SCANNING_DATA_SOURCE_UPDATE = "secret-scanning-data-source-update",
|
||||
SECRET_SCANNING_DATA_SOURCE_DELETE = "secret-scanning-data-source-delete",
|
||||
SECRET_SCANNING_DATA_SOURCE_GET = "secret-scanning-data-source-get",
|
||||
SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN = "secret-scanning-data-source-trigger-scan",
|
||||
SECRET_SCANNING_DATA_SOURCE_SCAN = "secret-scanning-data-source-scan",
|
||||
SECRET_SCANNING_RESOURCE_LIST = "secret-scanning-resource-list",
|
||||
SECRET_SCANNING_SCAN_LIST = "secret-scanning-scan-list",
|
||||
SECRET_SCANNING_FINDING_LIST = "secret-scanning-finding-list",
|
||||
SECRET_SCANNING_FINDING_UPDATE = "secret-scanning-finding-update",
|
||||
SECRET_SCANNING_CONFIG_GET = "secret-scanning-config-get",
|
||||
SECRET_SCANNING_CONFIG_UPDATE = "secret-scanning-config-update",
|
||||
|
||||
UPDATE_ORG = "update-org",
|
||||
|
||||
CREATE_PROJECT = "create-project",
|
||||
@@ -2953,6 +2979,101 @@ interface MicrosoftTeamsWorkflowIntegrationUpdateEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceListEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST;
|
||||
metadata: {
|
||||
type?: SecretScanningDataSource;
|
||||
count: number;
|
||||
dataSourceIds: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceGetEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET;
|
||||
metadata: {
|
||||
type: SecretScanningDataSource;
|
||||
dataSourceId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceCreateEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE;
|
||||
metadata: Omit<TCreateSecretScanningDataSourceDTO, "projectId"> & { dataSourceId: string };
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceUpdateEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE;
|
||||
metadata: TUpdateSecretScanningDataSourceDTO;
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceDeleteEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE;
|
||||
metadata: TDeleteSecretScanningDataSourceDTO;
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceTriggerScanEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN;
|
||||
metadata: TTriggerSecretScanningDataSourceDTO;
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceScanEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN;
|
||||
metadata: {
|
||||
scanId: string;
|
||||
resourceId: string;
|
||||
resourceType: string;
|
||||
dataSourceId: string;
|
||||
dataSourceType: string;
|
||||
scanStatus: SecretScanningScanStatus;
|
||||
scanType: SecretScanningScanType;
|
||||
numberOfSecretsDetected?: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningResourceListEvent {
|
||||
type: EventType.SECRET_SCANNING_RESOURCE_LIST;
|
||||
metadata: {
|
||||
type: SecretScanningDataSource;
|
||||
dataSourceId: string;
|
||||
resourceIds: string[];
|
||||
count: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningScanListEvent {
|
||||
type: EventType.SECRET_SCANNING_SCAN_LIST;
|
||||
metadata: {
|
||||
type: SecretScanningDataSource;
|
||||
dataSourceId: string;
|
||||
count: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningFindingListEvent {
|
||||
type: EventType.SECRET_SCANNING_FINDING_LIST;
|
||||
metadata: {
|
||||
findingIds: string[];
|
||||
count: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningFindingUpdateEvent {
|
||||
type: EventType.SECRET_SCANNING_FINDING_UPDATE;
|
||||
metadata: TUpdateSecretScanningFindingDTO;
|
||||
}
|
||||
|
||||
interface SecretScanningConfigUpdateEvent {
|
||||
type: EventType.SECRET_SCANNING_CONFIG_UPDATE;
|
||||
metadata: {
|
||||
content: string | null;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningConfigReadEvent {
|
||||
type: EventType.SECRET_SCANNING_CONFIG_GET;
|
||||
metadata?: Record<string, never>; // not needed, based off projectId
|
||||
}
|
||||
|
||||
interface OrgUpdateEvent {
|
||||
type: EventType.UPDATE_ORG;
|
||||
metadata: {
|
||||
@@ -3276,6 +3397,19 @@ export type Event =
|
||||
| MicrosoftTeamsWorkflowIntegrationGetEvent
|
||||
| MicrosoftTeamsWorkflowIntegrationListEvent
|
||||
| MicrosoftTeamsWorkflowIntegrationUpdateEvent
|
||||
| SecretScanningDataSourceListEvent
|
||||
| SecretScanningDataSourceGetEvent
|
||||
| SecretScanningDataSourceCreateEvent
|
||||
| SecretScanningDataSourceUpdateEvent
|
||||
| SecretScanningDataSourceDeleteEvent
|
||||
| SecretScanningDataSourceTriggerScanEvent
|
||||
| SecretScanningDataSourceScanEvent
|
||||
| SecretScanningResourceListEvent
|
||||
| SecretScanningScanListEvent
|
||||
| SecretScanningFindingListEvent
|
||||
| SecretScanningFindingUpdateEvent
|
||||
| SecretScanningConfigUpdateEvent
|
||||
| SecretScanningConfigReadEvent
|
||||
| OrgUpdateEvent
|
||||
| ProjectCreateEvent
|
||||
| ProjectUpdateEvent
|
||||
|
@@ -17,6 +17,7 @@ import { SapAseProvider } from "./sap-ase";
|
||||
import { SapHanaProvider } from "./sap-hana";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
import { TotpProvider } from "./totp";
|
||||
import { VerticaProvider } from "./vertica";
|
||||
|
||||
type TBuildDynamicSecretProviderDTO = {
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
@@ -40,5 +41,6 @@ export const buildDynamicSecretProviders = ({
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
|
||||
[DynamicSecretProviders.Totp]: TotpProvider(),
|
||||
[DynamicSecretProviders.SapAse]: SapAseProvider(),
|
||||
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService })
|
||||
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService }),
|
||||
[DynamicSecretProviders.Vertica]: VerticaProvider({ gatewayService })
|
||||
});
|
||||
|
@@ -2,7 +2,7 @@ import axios from "axios";
|
||||
import https from "https";
|
||||
|
||||
import { InternalServerError } from "@app/lib/errors";
|
||||
import { withGatewayProxy } from "@app/lib/gateway";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { TKubernetesTokenRequest } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-types";
|
||||
|
||||
@@ -43,6 +43,7 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
return res;
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Tcp,
|
||||
targetHost: inputs.targetHost,
|
||||
targetPort: inputs.targetPort,
|
||||
relayHost,
|
||||
|
@@ -16,7 +16,8 @@ export enum SqlProviders {
|
||||
MySQL = "mysql2",
|
||||
Oracle = "oracledb",
|
||||
MsSQL = "mssql",
|
||||
SapAse = "sap-ase"
|
||||
SapAse = "sap-ase",
|
||||
Vertica = "vertica"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
@@ -293,6 +294,39 @@ export const DynamicSecretKubernetesSchema = z.object({
|
||||
audiences: z.array(z.string().trim().min(1))
|
||||
});
|
||||
|
||||
export const DynamicSecretVerticaSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
database: z.string().trim(),
|
||||
gatewayId: z.string().nullable().optional(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
passwordRequirements: z
|
||||
.object({
|
||||
length: z.number().min(1).max(250),
|
||||
required: z
|
||||
.object({
|
||||
lowercase: z.number().min(0),
|
||||
uppercase: z.number().min(0),
|
||||
digits: z.number().min(0),
|
||||
symbols: z.number().min(0)
|
||||
})
|
||||
.refine((data) => {
|
||||
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
|
||||
return total <= 250;
|
||||
}, "Sum of required characters cannot exceed 250"),
|
||||
allowedSymbols: z.string().optional()
|
||||
})
|
||||
.refine((data) => {
|
||||
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
|
||||
return total <= data.length;
|
||||
}, "Sum of required characters cannot exceed the total length")
|
||||
.optional()
|
||||
.describe("Password generation requirements")
|
||||
});
|
||||
|
||||
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.URL),
|
||||
@@ -337,7 +371,8 @@ export enum DynamicSecretProviders {
|
||||
Snowflake = "snowflake",
|
||||
Totp = "totp",
|
||||
SapAse = "sap-ase",
|
||||
Kubernetes = "kubernetes"
|
||||
Kubernetes = "kubernetes",
|
||||
Vertica = "vertica"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
@@ -356,7 +391,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Vertica), inputs: DynamicSecretVerticaSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
@@ -3,7 +3,7 @@ import handlebars from "handlebars";
|
||||
import knex from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { withGatewayProxy } from "@app/lib/gateway";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
@@ -185,6 +185,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
await gatewayCallback("localhost", port);
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Tcp,
|
||||
targetHost: providerInputs.host,
|
||||
targetPort: providerInputs.port,
|
||||
relayHost,
|
||||
|
368
backend/src/ee/services/dynamic-secret/providers/vertica.ts
Normal file
368
backend/src/ee/services/dynamic-secret/providers/vertica.ts
Normal file
@@ -0,0 +1,368 @@
|
||||
import { randomInt } from "crypto";
|
||||
import handlebars from "handlebars";
|
||||
import knex, { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretVerticaSchema, PasswordRequirements, TDynamicProviderFns } from "./models";
|
||||
|
||||
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
||||
|
||||
interface VersionResult {
|
||||
version: string;
|
||||
}
|
||||
|
||||
interface SessionResult {
|
||||
session_id?: string;
|
||||
}
|
||||
|
||||
interface DatabaseQueryResult {
|
||||
rows?: Array<Record<string, unknown>>;
|
||||
}
|
||||
|
||||
// Extended Knex client interface to handle Vertica-specific overrides
|
||||
interface VerticaKnexClient extends Knex {
|
||||
client: {
|
||||
parseVersion?: () => string;
|
||||
};
|
||||
}
|
||||
|
||||
const DEFAULT_PASSWORD_REQUIREMENTS = {
|
||||
length: 48,
|
||||
required: {
|
||||
lowercase: 1,
|
||||
uppercase: 1,
|
||||
digits: 1,
|
||||
symbols: 0
|
||||
},
|
||||
allowedSymbols: "-_.~!*"
|
||||
};
|
||||
|
||||
const generatePassword = (requirements?: PasswordRequirements) => {
|
||||
const finalReqs = requirements || DEFAULT_PASSWORD_REQUIREMENTS;
|
||||
|
||||
try {
|
||||
const { length, required, allowedSymbols } = finalReqs;
|
||||
|
||||
const chars = {
|
||||
lowercase: "abcdefghijklmnopqrstuvwxyz",
|
||||
uppercase: "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
|
||||
digits: "0123456789",
|
||||
symbols: allowedSymbols || "-_.~!*"
|
||||
};
|
||||
|
||||
const parts: string[] = [];
|
||||
|
||||
if (required.lowercase > 0) {
|
||||
parts.push(
|
||||
...Array(required.lowercase)
|
||||
.fill(0)
|
||||
.map(() => chars.lowercase[randomInt(chars.lowercase.length)])
|
||||
);
|
||||
}
|
||||
|
||||
if (required.uppercase > 0) {
|
||||
parts.push(
|
||||
...Array(required.uppercase)
|
||||
.fill(0)
|
||||
.map(() => chars.uppercase[randomInt(chars.uppercase.length)])
|
||||
);
|
||||
}
|
||||
|
||||
if (required.digits > 0) {
|
||||
parts.push(
|
||||
...Array(required.digits)
|
||||
.fill(0)
|
||||
.map(() => chars.digits[randomInt(chars.digits.length)])
|
||||
);
|
||||
}
|
||||
|
||||
if (required.symbols > 0) {
|
||||
parts.push(
|
||||
...Array(required.symbols)
|
||||
.fill(0)
|
||||
.map(() => chars.symbols[randomInt(chars.symbols.length)])
|
||||
);
|
||||
}
|
||||
|
||||
const requiredTotal = Object.values(required).reduce<number>((a, b) => a + b, 0);
|
||||
const remainingLength = Math.max(length - requiredTotal, 0);
|
||||
|
||||
const allowedChars = Object.entries(chars)
|
||||
.filter(([key]) => required[key as keyof typeof required] > 0)
|
||||
.map(([, value]) => value)
|
||||
.join("");
|
||||
|
||||
parts.push(
|
||||
...Array(remainingLength)
|
||||
.fill(0)
|
||||
.map(() => allowedChars[randomInt(allowedChars.length)])
|
||||
);
|
||||
|
||||
// shuffle the array to mix up the characters
|
||||
for (let i = parts.length - 1; i > 0; i -= 1) {
|
||||
const j = randomInt(i + 1);
|
||||
[parts[i], parts[j]] = [parts[j], parts[i]];
|
||||
}
|
||||
|
||||
return parts.join("");
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
throw new Error(`Failed to generate password: ${message}`);
|
||||
}
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const randomUsername = `inf_${alphaNumericNanoId(25)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
});
|
||||
};
|
||||
|
||||
type TVerticaProviderDTO = {
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
};
|
||||
|
||||
export const VerticaProvider = ({ gatewayService }: TVerticaProviderDTO): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretVerticaSchema.parseAsync(inputs);
|
||||
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.gatewayId));
|
||||
validateHandlebarTemplate("Vertica creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password"].includes(val)
|
||||
});
|
||||
if (providerInputs.revocationStatement) {
|
||||
validateHandlebarTemplate("Vertica revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
}
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretVerticaSchema> & { hostIp: string }) => {
|
||||
const config = {
|
||||
client: "pg",
|
||||
connection: {
|
||||
host: providerInputs.hostIp,
|
||||
port: providerInputs.port,
|
||||
database: providerInputs.database,
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
ssl: false
|
||||
},
|
||||
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT,
|
||||
pool: {
|
||||
min: 0,
|
||||
max: 1,
|
||||
acquireTimeoutMillis: 30000,
|
||||
createTimeoutMillis: 30000,
|
||||
destroyTimeoutMillis: 5000,
|
||||
idleTimeoutMillis: 30000,
|
||||
reapIntervalMillis: 1000,
|
||||
createRetryIntervalMillis: 100
|
||||
},
|
||||
// Disable version checking for Vertica compatibility
|
||||
version: "9.6.0" // Fake a compatible PostgreSQL version
|
||||
};
|
||||
|
||||
const client = knex(config) as VerticaKnexClient;
|
||||
|
||||
// Override the version parsing to prevent errors with Vertica
|
||||
if (client.client && typeof client.client.parseVersion !== "undefined") {
|
||||
client.client.parseVersion = () => "9.6.0";
|
||||
}
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const gatewayProxyWrapper = async (
|
||||
providerInputs: z.infer<typeof DynamicSecretVerticaSchema>,
|
||||
gatewayCallback: (host: string, port: number) => Promise<void>
|
||||
) => {
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(providerInputs.gatewayId as string);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
await withGatewayProxy(
|
||||
async (port) => {
|
||||
await gatewayCallback("localhost", port);
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Tcp,
|
||||
targetHost: providerInputs.host,
|
||||
targetPort: providerInputs.port,
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
let isConnected = false;
|
||||
|
||||
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
|
||||
let client: VerticaKnexClient | null = null;
|
||||
|
||||
try {
|
||||
client = await $getClient({ ...providerInputs, hostIp: host, port });
|
||||
|
||||
const clientResult: DatabaseQueryResult = await client.raw("SELECT version() AS version");
|
||||
|
||||
const resultFromSelectedDatabase = clientResult.rows?.[0] as VersionResult | undefined;
|
||||
|
||||
if (!resultFromSelectedDatabase?.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate Vertica connection, version query failed"
|
||||
});
|
||||
}
|
||||
|
||||
isConnected = true;
|
||||
} finally {
|
||||
if (client) await client.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
if (providerInputs.gatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
|
||||
const { inputs, usernameTemplate } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const password = generatePassword(providerInputs.passwordRequirements);
|
||||
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
let client: VerticaKnexClient | null = null;
|
||||
|
||||
try {
|
||||
client = await $getClient({ ...providerInputs, hostIp: host, port });
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password
|
||||
});
|
||||
|
||||
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
|
||||
|
||||
// Execute queries sequentially to maintain transaction integrity
|
||||
for (const query of queries) {
|
||||
const trimmedQuery = query.trim();
|
||||
if (trimmedQuery) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await client.raw(trimmedQuery);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (client) await client.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
if (providerInputs.gatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
let client: VerticaKnexClient | null = null;
|
||||
|
||||
try {
|
||||
client = await $getClient({ ...providerInputs, hostIp: host, port });
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
|
||||
username
|
||||
});
|
||||
|
||||
const queries = revokeStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
|
||||
|
||||
// Check for active sessions and close them
|
||||
try {
|
||||
const sessionResult: DatabaseQueryResult = await client.raw(
|
||||
"SELECT session_id FROM sessions WHERE user_name = ?",
|
||||
[username]
|
||||
);
|
||||
|
||||
const activeSessions = (sessionResult.rows || []) as SessionResult[];
|
||||
|
||||
// Close all sessions in parallel since they're independent operations
|
||||
if (activeSessions.length > 0) {
|
||||
const sessionClosePromises = activeSessions.map(async (session) => {
|
||||
try {
|
||||
await client!.raw("SELECT close_session(?)", [session.session_id]);
|
||||
} catch (error) {
|
||||
// Continue if session is already closed
|
||||
logger.error(error, `Failed to close session ${session.session_id}`);
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.allSettled(sessionClosePromises);
|
||||
}
|
||||
} catch (error) {
|
||||
// Continue if we can't query sessions (permissions, etc.)
|
||||
logger.error(error, "Could not query/close active sessions");
|
||||
}
|
||||
|
||||
// Execute revocation queries sequentially to maintain transaction integrity
|
||||
for (const query of queries) {
|
||||
const trimmedQuery = query.trim();
|
||||
if (trimmedQuery) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await client.raw(trimmedQuery);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (client) await client.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
if (providerInputs.gatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_: unknown, username: string) => {
|
||||
// No need for renewal
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@@ -42,6 +42,10 @@ export type TListGroupUsersDTO = {
|
||||
filter?: EFilterReturnedUsers;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TListProjectGroupUsersDTO = TListGroupUsersDTO & {
|
||||
projectId: string;
|
||||
};
|
||||
|
||||
export type TAddUserToGroupDTO = {
|
||||
id: string;
|
||||
username: string;
|
||||
|
@@ -56,6 +56,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
kmip: false,
|
||||
gateway: false,
|
||||
sshHostGroups: false,
|
||||
secretScanning: false,
|
||||
enterpriseSecretSyncs: false,
|
||||
enterpriseAppConnections: false
|
||||
});
|
||||
|
@@ -72,6 +72,7 @@ export type TFeatureSet = {
|
||||
kmip: false;
|
||||
gateway: false;
|
||||
sshHostGroups: false;
|
||||
secretScanning: false;
|
||||
enterpriseSecretSyncs: false;
|
||||
enterpriseAppConnections: false;
|
||||
};
|
||||
|
@@ -13,6 +13,9 @@ import {
|
||||
ProjectPermissionPkiTemplateActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSecretRotationActions,
|
||||
ProjectPermissionSecretScanningConfigActions,
|
||||
ProjectPermissionSecretScanningDataSourceActions,
|
||||
ProjectPermissionSecretScanningFindingActions,
|
||||
ProjectPermissionSecretSyncActions,
|
||||
ProjectPermissionSet,
|
||||
ProjectPermissionSshHostActions,
|
||||
@@ -148,6 +151,7 @@ const buildAdminPermissionRules = () => {
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
@@ -219,6 +223,29 @@ const buildAdminPermissionRules = () => {
|
||||
ProjectPermissionSub.SecretRotation
|
||||
);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretScanningDataSourceActions.Create,
|
||||
ProjectPermissionSecretScanningDataSourceActions.Edit,
|
||||
ProjectPermissionSecretScanningDataSourceActions.Delete,
|
||||
ProjectPermissionSecretScanningDataSourceActions.Read,
|
||||
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadResources
|
||||
],
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
can(
|
||||
[ProjectPermissionSecretScanningFindingActions.Read, ProjectPermissionSecretScanningFindingActions.Update],
|
||||
ProjectPermissionSub.SecretScanningFindings
|
||||
);
|
||||
|
||||
can(
|
||||
[ProjectPermissionSecretScanningConfigActions.Read, ProjectPermissionSecretScanningConfigActions.Update],
|
||||
ProjectPermissionSub.SecretScanningConfigs
|
||||
);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
@@ -228,6 +255,7 @@ const buildMemberPermissionRules = () => {
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
@@ -399,6 +427,23 @@ const buildMemberPermissionRules = () => {
|
||||
ProjectPermissionSub.SecretSyncs
|
||||
);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretScanningDataSourceActions.Read,
|
||||
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadResources
|
||||
],
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
can(
|
||||
[ProjectPermissionSecretScanningFindingActions.Read, ProjectPermissionSecretScanningFindingActions.Update],
|
||||
ProjectPermissionSub.SecretScanningFindings
|
||||
);
|
||||
|
||||
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
@@ -435,6 +480,19 @@ const buildViewerPermissionRules = () => {
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificateTemplates);
|
||||
can(ProjectPermissionSecretSyncActions.Read, ProjectPermissionSub.SecretSyncs);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretScanningDataSourceActions.Read,
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadResources
|
||||
],
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
can([ProjectPermissionSecretScanningFindingActions.Read], ProjectPermissionSub.SecretScanningFindings);
|
||||
|
||||
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
|
@@ -132,6 +132,26 @@ export enum ProjectPermissionKmipActions {
|
||||
GenerateClientCertificates = "generate-client-certificates"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSecretScanningDataSourceActions {
|
||||
Read = "read-data-sources",
|
||||
Create = "create-data-sources",
|
||||
Edit = "edit-data-sources",
|
||||
Delete = "delete-data-sources",
|
||||
TriggerScans = "trigger-data-source-scans",
|
||||
ReadScans = "read-data-source-scans",
|
||||
ReadResources = "read-data-source-resources"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSecretScanningFindingActions {
|
||||
Read = "read-findings",
|
||||
Update = "update-findings"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSecretScanningConfigActions {
|
||||
Read = "read-configs",
|
||||
Update = "update-configs"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSub {
|
||||
Role = "role",
|
||||
Member = "member",
|
||||
@@ -167,7 +187,10 @@ export enum ProjectPermissionSub {
|
||||
Kms = "kms",
|
||||
Cmek = "cmek",
|
||||
SecretSyncs = "secret-syncs",
|
||||
Kmip = "kmip"
|
||||
Kmip = "kmip",
|
||||
SecretScanningDataSources = "secret-scanning-data-sources",
|
||||
SecretScanningFindings = "secret-scanning-findings",
|
||||
SecretScanningConfigs = "secret-scanning-configs"
|
||||
}
|
||||
|
||||
export type SecretSubjectFields = {
|
||||
@@ -301,7 +324,10 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Project]
|
||||
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
|
||||
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
|
||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms];
|
||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms]
|
||||
| [ProjectPermissionSecretScanningDataSourceActions, ProjectPermissionSub.SecretScanningDataSources]
|
||||
| [ProjectPermissionSecretScanningFindingActions, ProjectPermissionSub.SecretScanningFindings]
|
||||
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs];
|
||||
|
||||
const SECRET_PATH_MISSING_SLASH_ERR_MSG = "Invalid Secret Path; it must start with a '/'";
|
||||
const SECRET_PATH_PERMISSION_OPERATOR_SCHEMA = z.union([
|
||||
@@ -631,6 +657,26 @@ const GeneralPermissionSchema = [
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionKmipActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z
|
||||
.literal(ProjectPermissionSub.SecretScanningDataSources)
|
||||
.describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningDataSourceActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.SecretScanningFindings).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningFindingActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.SecretScanningConfigs).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningConfigActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
})
|
||||
];
|
||||
|
||||
|
@@ -0,0 +1,9 @@
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
|
||||
name: "GitHub",
|
||||
type: SecretScanningDataSource.GitHub,
|
||||
connection: AppConnection.GitHubRadar
|
||||
};
|
@@ -0,0 +1,230 @@
|
||||
import { join } from "path";
|
||||
import { ProbotOctokit } from "probot";
|
||||
|
||||
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningFindingSeverity,
|
||||
SecretScanningResource
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
cloneRepository,
|
||||
convertPatchLineToFileLineNumber,
|
||||
replaceNonChangesWithNewlines
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import {
|
||||
TSecretScanningFactoryGetDiffScanFindingsPayload,
|
||||
TSecretScanningFactoryGetDiffScanResourcePayload,
|
||||
TSecretScanningFactoryGetFullScanPath,
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryPostInitialization
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||
import { GitHubRepositoryRegex } from "@app/lib/regex";
|
||||
import { listGitHubRadarRepositories, TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
|
||||
|
||||
import { TGitHubDataSourceWithConnection, TQueueGitHubResourceDiffScan } from "./github-secret-scanning-types";
|
||||
|
||||
export const GitHubSecretScanningFactory = () => {
|
||||
const initialize: TSecretScanningFactoryInitialize<TGitHubRadarConnection> = async (
|
||||
{ connection, secretScanningV2DAL },
|
||||
callback
|
||||
) => {
|
||||
const externalId = connection.credentials.installationId;
|
||||
|
||||
const existingDataSource = await secretScanningV2DAL.dataSources.findOne({
|
||||
externalId,
|
||||
type: SecretScanningDataSource.GitHub
|
||||
});
|
||||
|
||||
if (existingDataSource)
|
||||
throw new BadRequestError({
|
||||
message: `A Data Source already exists for this GitHub Radar Connection in the Project with ID "${existingDataSource.projectId}"`
|
||||
});
|
||||
|
||||
return callback({
|
||||
externalId
|
||||
});
|
||||
};
|
||||
|
||||
const postInitialization: TSecretScanningFactoryPostInitialization<TGitHubRadarConnection> = async () => {
|
||||
// no post-initialization required
|
||||
};
|
||||
|
||||
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
|
||||
dataSource
|
||||
) => {
|
||||
const {
|
||||
connection,
|
||||
config: { includeRepos }
|
||||
} = dataSource;
|
||||
|
||||
const repos = await listGitHubRadarRepositories(connection);
|
||||
|
||||
const filteredRepos: typeof repos = [];
|
||||
if (includeRepos.includes("*")) {
|
||||
filteredRepos.push(...repos);
|
||||
} else {
|
||||
filteredRepos.push(...repos.filter((repo) => includeRepos.includes(repo.full_name)));
|
||||
}
|
||||
|
||||
return filteredRepos.map(({ id, full_name }) => ({
|
||||
name: full_name,
|
||||
externalId: id.toString(),
|
||||
type: SecretScanningResource.Repository
|
||||
}));
|
||||
};
|
||||
|
||||
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TGitHubDataSourceWithConnection> = async ({
|
||||
dataSource,
|
||||
resourceName,
|
||||
tempFolder
|
||||
}) => {
|
||||
const appCfg = getConfig();
|
||||
const {
|
||||
connection: {
|
||||
credentials: { installationId }
|
||||
}
|
||||
} = dataSource;
|
||||
|
||||
const octokit = new ProbotOctokit({
|
||||
auth: {
|
||||
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
|
||||
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
|
||||
installationId
|
||||
}
|
||||
});
|
||||
|
||||
const {
|
||||
data: { token }
|
||||
} = await octokit.apps.createInstallationAccessToken({
|
||||
installation_id: Number(installationId)
|
||||
});
|
||||
|
||||
const repoPath = join(tempFolder, "repo.git");
|
||||
|
||||
if (!GitHubRepositoryRegex.test(resourceName)) {
|
||||
throw new Error("Invalid GitHub repository name");
|
||||
}
|
||||
|
||||
await cloneRepository({
|
||||
cloneUrl: `https://x-access-token:${token}@github.com/${resourceName}.git`,
|
||||
repoPath
|
||||
});
|
||||
|
||||
return repoPath;
|
||||
};
|
||||
|
||||
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
|
||||
TQueueGitHubResourceDiffScan["payload"]
|
||||
> = ({ repository }) => {
|
||||
return {
|
||||
name: repository.full_name,
|
||||
externalId: repository.id.toString(),
|
||||
type: SecretScanningResource.Repository
|
||||
};
|
||||
};
|
||||
|
||||
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
|
||||
TGitHubDataSourceWithConnection,
|
||||
TQueueGitHubResourceDiffScan["payload"]
|
||||
> = async ({ dataSource, payload, resourceName, configPath }) => {
|
||||
const appCfg = getConfig();
|
||||
const {
|
||||
connection: {
|
||||
credentials: { installationId }
|
||||
}
|
||||
} = dataSource;
|
||||
|
||||
const octokit = new ProbotOctokit({
|
||||
auth: {
|
||||
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
|
||||
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
|
||||
installationId
|
||||
}
|
||||
});
|
||||
|
||||
const { commits, repository } = payload;
|
||||
|
||||
const [owner, repo] = repository.full_name.split("/");
|
||||
|
||||
const allFindings: SecretMatch[] = [];
|
||||
|
||||
for (const commit of commits) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const commitData = await octokit.repos.getCommit({
|
||||
owner,
|
||||
repo,
|
||||
ref: commit.id
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!commitData.data.files) continue;
|
||||
|
||||
for (const file of commitData.data.files) {
|
||||
if ((file.status === "added" || file.status === "modified") && file.patch) {
|
||||
// eslint-disable-next-line
|
||||
const findings = await scanContentAndGetFindings(
|
||||
replaceNonChangesWithNewlines(`\n${file.patch}`),
|
||||
configPath
|
||||
);
|
||||
|
||||
const adjustedFindings = findings.map((finding) => {
|
||||
const startLine = convertPatchLineToFileLineNumber(file.patch!, finding.StartLine);
|
||||
const endLine =
|
||||
finding.StartLine === finding.EndLine
|
||||
? startLine
|
||||
: convertPatchLineToFileLineNumber(file.patch!, finding.EndLine);
|
||||
const startColumn = finding.StartColumn - 1; // subtract 1 for +
|
||||
const endColumn = finding.EndColumn - 1; // subtract 1 for +
|
||||
|
||||
return {
|
||||
...finding,
|
||||
StartLine: startLine,
|
||||
EndLine: endLine,
|
||||
StartColumn: startColumn,
|
||||
EndColumn: endColumn,
|
||||
File: file.filename,
|
||||
Commit: commit.id,
|
||||
Author: commit.author.name,
|
||||
Email: commit.author.email ?? "",
|
||||
Message: commit.message,
|
||||
Fingerprint: `${commit.id}:${file.filename}:${finding.RuleID}:${startLine}:${startColumn}`,
|
||||
Date: commit.timestamp,
|
||||
Link: `https://github.com/${resourceName}/blob/${commit.id}/${file.filename}#L${startLine}`
|
||||
};
|
||||
});
|
||||
|
||||
allFindings.push(...adjustedFindings);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return allFindings.map(
|
||||
({
|
||||
// discard match and secret as we don't want to store
|
||||
Match,
|
||||
Secret,
|
||||
...finding
|
||||
}) => ({
|
||||
details: titleCaseToCamelCase(finding),
|
||||
fingerprint: finding.Fingerprint,
|
||||
severity: SecretScanningFindingSeverity.High,
|
||||
rule: finding.RuleID
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
return {
|
||||
initialize,
|
||||
postInitialization,
|
||||
listRawResources,
|
||||
getFullScanPath,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload
|
||||
};
|
||||
};
|
@@ -0,0 +1,85 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningResource
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
BaseCreateSecretScanningDataSourceSchema,
|
||||
BaseSecretScanningDataSourceSchema,
|
||||
BaseSecretScanningFindingSchema,
|
||||
BaseUpdateSecretScanningDataSourceSchema,
|
||||
GitRepositoryScanFindingDetailsSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
|
||||
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
||||
import { GitHubRepositoryRegex } from "@app/lib/regex";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const GitHubDataSourceConfigSchema = z.object({
|
||||
includeRepos: z
|
||||
.array(
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(256)
|
||||
.refine((value) => value === "*" || GitHubRepositoryRegex.test(value), "Invalid repository name format")
|
||||
)
|
||||
.nonempty("One or more repositories required")
|
||||
.max(100, "Cannot configure more than 100 repositories")
|
||||
.default(["*"])
|
||||
.describe(SecretScanningDataSources.CONFIG.GITHUB.includeRepos)
|
||||
});
|
||||
|
||||
export const GitHubDataSourceSchema = BaseSecretScanningDataSourceSchema({
|
||||
type: SecretScanningDataSource.GitHub,
|
||||
isConnectionRequired: true
|
||||
})
|
||||
.extend({
|
||||
config: GitHubDataSourceConfigSchema
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitHub"
|
||||
})
|
||||
);
|
||||
|
||||
export const CreateGitHubDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
|
||||
type: SecretScanningDataSource.GitHub,
|
||||
isConnectionRequired: true
|
||||
})
|
||||
.extend({
|
||||
config: GitHubDataSourceConfigSchema
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitHub"
|
||||
})
|
||||
);
|
||||
|
||||
export const UpdateGitHubDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(SecretScanningDataSource.GitHub)
|
||||
.extend({
|
||||
config: GitHubDataSourceConfigSchema.optional()
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitHub"
|
||||
})
|
||||
);
|
||||
|
||||
export const GitHubDataSourceListItemSchema = z
|
||||
.object({
|
||||
name: z.literal("GitHub"),
|
||||
connection: z.literal(AppConnection.GitHubRadar),
|
||||
type: z.literal(SecretScanningDataSource.GitHub)
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitHub"
|
||||
})
|
||||
);
|
||||
|
||||
export const GitHubFindingSchema = BaseSecretScanningFindingSchema.extend({
|
||||
resourceType: z.literal(SecretScanningResource.Repository),
|
||||
dataSourceType: z.literal(SecretScanningDataSource.GitHub),
|
||||
details: GitRepositoryScanFindingDetailsSchema
|
||||
});
|
@@ -0,0 +1,87 @@
|
||||
import { PushEvent } from "@octokit/webhooks-types";
|
||||
|
||||
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TGitHubDataSource } from "./github-secret-scanning-types";
|
||||
|
||||
export const githubSecretScanningService = (
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory,
|
||||
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">
|
||||
) => {
|
||||
const handleInstallationDeletedEvent = async (installationId: number) => {
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findOne({
|
||||
externalId: String(installationId),
|
||||
type: SecretScanningDataSource.GitHub
|
||||
});
|
||||
|
||||
if (!dataSource) {
|
||||
logger.error(
|
||||
`secretScanningV2RemoveEvent: GitHub - Could not find data source [installationId=${installationId}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`secretScanningV2RemoveEvent: GitHub - installation deleted [installationId=${installationId}] [dataSourceId=${dataSource.id}]`
|
||||
);
|
||||
|
||||
await secretScanningV2DAL.dataSources.updateById(dataSource.id, {
|
||||
isDisconnected: true
|
||||
});
|
||||
};
|
||||
|
||||
const handlePushEvent = async (payload: PushEvent) => {
|
||||
const { commits, repository, installation } = payload;
|
||||
|
||||
if (!commits || !repository || !installation) {
|
||||
logger.warn(
|
||||
`secretScanningV2PushEvent: GitHub - Insufficient data [commits=${commits?.length ?? 0}] [repository=${repository.name}] [installationId=${installation?.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
|
||||
externalId: String(installation.id),
|
||||
type: SecretScanningDataSource.GitHub
|
||||
})) as TGitHubDataSource | undefined;
|
||||
|
||||
if (!dataSource) {
|
||||
logger.error(
|
||||
`secretScanningV2PushEvent: GitHub - Could not find data source [installationId=${installation.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const {
|
||||
isAutoScanEnabled,
|
||||
config: { includeRepos }
|
||||
} = dataSource;
|
||||
|
||||
if (!isAutoScanEnabled) {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitHub - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [installationId=${installation.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (includeRepos.includes("*") || includeRepos.includes(repository.full_name)) {
|
||||
await secretScanningV2Queue.queueResourceDiffScan({
|
||||
dataSourceType: SecretScanningDataSource.GitHub,
|
||||
payload,
|
||||
dataSourceId: dataSource.id
|
||||
});
|
||||
} else {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitHub - ignoring due to repository not being present in config [installationId=${installation.id}] [dataSourceId=${dataSource.id}]`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
handlePushEvent,
|
||||
handleInstallationDeletedEvent
|
||||
};
|
||||
};
|
@@ -0,0 +1,32 @@
|
||||
import { PushEvent } from "@octokit/webhooks-types";
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
|
||||
|
||||
import {
|
||||
CreateGitHubDataSourceSchema,
|
||||
GitHubDataSourceListItemSchema,
|
||||
GitHubDataSourceSchema,
|
||||
GitHubFindingSchema
|
||||
} from "./github-secret-scanning-schemas";
|
||||
|
||||
export type TGitHubDataSource = z.infer<typeof GitHubDataSourceSchema>;
|
||||
|
||||
export type TGitHubDataSourceInput = z.infer<typeof CreateGitHubDataSourceSchema>;
|
||||
|
||||
export type TGitHubDataSourceListItem = z.infer<typeof GitHubDataSourceListItemSchema>;
|
||||
|
||||
export type TGitHubFinding = z.infer<typeof GitHubFindingSchema>;
|
||||
|
||||
export type TGitHubDataSourceWithConnection = TGitHubDataSource & {
|
||||
connection: TGitHubRadarConnection;
|
||||
};
|
||||
|
||||
export type TQueueGitHubResourceDiffScan = {
|
||||
dataSourceType: SecretScanningDataSource.GitHub;
|
||||
payload: PushEvent;
|
||||
dataSourceId: string;
|
||||
resourceId: string;
|
||||
scanId: string;
|
||||
};
|
@@ -0,0 +1,3 @@
|
||||
export * from "./github-secret-scanning-constants";
|
||||
export * from "./github-secret-scanning-schemas";
|
||||
export * from "./github-secret-scanning-types";
|
@@ -0,0 +1,460 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import {
|
||||
SecretScanningResourcesSchema,
|
||||
SecretScanningScansSchema,
|
||||
TableName,
|
||||
TSecretScanningDataSources
|
||||
} from "@app/db/schemas";
|
||||
import { SecretScanningFindingStatus } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import {
|
||||
buildFindFilter,
|
||||
ormify,
|
||||
prependTableNameToFindFilter,
|
||||
selectAllTableCols,
|
||||
sqlNestRelationships,
|
||||
TFindOpt
|
||||
} from "@app/lib/knex";
|
||||
|
||||
export type TSecretScanningV2DALFactory = ReturnType<typeof secretScanningV2DALFactory>;
|
||||
|
||||
type TSecretScanningDataSourceFindFilter = Parameters<typeof buildFindFilter<TSecretScanningDataSources>>[0];
|
||||
type TSecretScanningDataSourceFindOptions = TFindOpt<TSecretScanningDataSources, true, "name">;
|
||||
|
||||
const baseSecretScanningDataSourceQuery = ({
|
||||
filter = {},
|
||||
db,
|
||||
tx
|
||||
}: {
|
||||
db: TDbClient;
|
||||
filter?: TSecretScanningDataSourceFindFilter;
|
||||
options?: TSecretScanningDataSourceFindOptions;
|
||||
tx?: Knex;
|
||||
}) => {
|
||||
const query = (tx || db.replicaNode())(TableName.SecretScanningDataSource)
|
||||
.join(
|
||||
TableName.AppConnection,
|
||||
`${TableName.SecretScanningDataSource}.connectionId`,
|
||||
`${TableName.AppConnection}.id`
|
||||
)
|
||||
.select(selectAllTableCols(TableName.SecretScanningDataSource))
|
||||
.select(
|
||||
// entire connection
|
||||
db.ref("name").withSchema(TableName.AppConnection).as("connectionName"),
|
||||
db.ref("method").withSchema(TableName.AppConnection).as("connectionMethod"),
|
||||
db.ref("app").withSchema(TableName.AppConnection).as("connectionApp"),
|
||||
db.ref("orgId").withSchema(TableName.AppConnection).as("connectionOrgId"),
|
||||
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
|
||||
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
|
||||
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
|
||||
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
|
||||
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
|
||||
db
|
||||
.ref("isPlatformManagedCredentials")
|
||||
.withSchema(TableName.AppConnection)
|
||||
.as("connectionIsPlatformManagedCredentials")
|
||||
);
|
||||
|
||||
if (filter) {
|
||||
/* eslint-disable @typescript-eslint/no-misused-promises */
|
||||
void query.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretScanningDataSource, filter)));
|
||||
}
|
||||
|
||||
return query;
|
||||
};
|
||||
|
||||
const expandSecretScanningDataSource = <
|
||||
T extends Awaited<ReturnType<typeof baseSecretScanningDataSourceQuery>>[number]
|
||||
>(
|
||||
dataSource: T
|
||||
) => {
|
||||
const {
|
||||
connectionApp,
|
||||
connectionName,
|
||||
connectionId,
|
||||
connectionOrgId,
|
||||
connectionEncryptedCredentials,
|
||||
connectionMethod,
|
||||
connectionDescription,
|
||||
connectionCreatedAt,
|
||||
connectionUpdatedAt,
|
||||
connectionVersion,
|
||||
connectionIsPlatformManagedCredentials,
|
||||
...el
|
||||
} = dataSource;
|
||||
|
||||
return {
|
||||
...el,
|
||||
connectionId,
|
||||
connection: connectionId
|
||||
? {
|
||||
app: connectionApp,
|
||||
id: connectionId,
|
||||
name: connectionName,
|
||||
orgId: connectionOrgId,
|
||||
encryptedCredentials: connectionEncryptedCredentials,
|
||||
method: connectionMethod,
|
||||
description: connectionDescription,
|
||||
createdAt: connectionCreatedAt,
|
||||
updatedAt: connectionUpdatedAt,
|
||||
version: connectionVersion,
|
||||
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
|
||||
}
|
||||
: undefined
|
||||
};
|
||||
};
|
||||
|
||||
export const secretScanningV2DALFactory = (db: TDbClient) => {
|
||||
const dataSourceOrm = ormify(db, TableName.SecretScanningDataSource);
|
||||
const resourceOrm = ormify(db, TableName.SecretScanningResource);
|
||||
const scanOrm = ormify(db, TableName.SecretScanningScan);
|
||||
const findingOrm = ormify(db, TableName.SecretScanningFinding);
|
||||
const configOrm = ormify(db, TableName.SecretScanningConfig);
|
||||
|
||||
const findDataSource = async (filter: Parameters<(typeof dataSourceOrm)["find"]>[0], tx?: Knex) => {
|
||||
try {
|
||||
const dataSources = await baseSecretScanningDataSourceQuery({ filter, db, tx });
|
||||
|
||||
if (!dataSources.length) return [];
|
||||
|
||||
return dataSources.map(expandSecretScanningDataSource);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find - Secret Scanning Data Source" });
|
||||
}
|
||||
};
|
||||
|
||||
const findDataSourceById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const dataSource = await baseSecretScanningDataSourceQuery({ filter: { id }, db, tx }).first();
|
||||
|
||||
if (dataSource) return expandSecretScanningDataSource(dataSource);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find By ID - Secret Scanning Data Source" });
|
||||
}
|
||||
};
|
||||
|
||||
const createDataSource = async (data: Parameters<(typeof dataSourceOrm)["create"]>[0], tx?: Knex) => {
|
||||
const source = await dataSourceOrm.create(data, tx);
|
||||
|
||||
const dataSource = (await baseSecretScanningDataSourceQuery({
|
||||
filter: { id: source.id },
|
||||
db,
|
||||
tx
|
||||
}).first())!;
|
||||
|
||||
return expandSecretScanningDataSource(dataSource);
|
||||
};
|
||||
|
||||
const updateDataSourceById = async (
|
||||
dataSourceId: string,
|
||||
data: Parameters<(typeof dataSourceOrm)["updateById"]>[1],
|
||||
tx?: Knex
|
||||
) => {
|
||||
const source = await dataSourceOrm.updateById(dataSourceId, data, tx);
|
||||
|
||||
const dataSource = (await baseSecretScanningDataSourceQuery({
|
||||
filter: { id: source.id },
|
||||
db,
|
||||
tx
|
||||
}).first())!;
|
||||
|
||||
return expandSecretScanningDataSource(dataSource);
|
||||
};
|
||||
|
||||
const deleteDataSourceById = async (dataSourceId: string, tx?: Knex) => {
|
||||
const dataSource = (await baseSecretScanningDataSourceQuery({
|
||||
filter: { id: dataSourceId },
|
||||
db,
|
||||
tx
|
||||
}).first())!;
|
||||
|
||||
await dataSourceOrm.deleteById(dataSourceId, tx);
|
||||
|
||||
return expandSecretScanningDataSource(dataSource);
|
||||
};
|
||||
|
||||
const findOneDataSource = async (filter: Parameters<(typeof dataSourceOrm)["findOne"]>[0], tx?: Knex) => {
|
||||
try {
|
||||
const dataSource = await baseSecretScanningDataSourceQuery({ filter, db, tx }).first();
|
||||
|
||||
if (dataSource) {
|
||||
return expandSecretScanningDataSource(dataSource);
|
||||
}
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find One - Secret Scanning Data Source" });
|
||||
}
|
||||
};
|
||||
|
||||
const findDataSourceWithDetails = async (filter: Parameters<(typeof dataSourceOrm)["find"]>[0], tx?: Knex) => {
|
||||
try {
|
||||
// TODO (scott): this query will probably need to be optimized
|
||||
|
||||
const dataSources = await baseSecretScanningDataSourceQuery({ filter, db, tx })
|
||||
.leftJoin(
|
||||
TableName.SecretScanningResource,
|
||||
`${TableName.SecretScanningResource}.dataSourceId`,
|
||||
`${TableName.SecretScanningDataSource}.id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.SecretScanningScan,
|
||||
`${TableName.SecretScanningScan}.resourceId`,
|
||||
`${TableName.SecretScanningResource}.id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.SecretScanningFinding,
|
||||
`${TableName.SecretScanningFinding}.scanId`,
|
||||
`${TableName.SecretScanningScan}.id`
|
||||
)
|
||||
.where((qb) => {
|
||||
void qb
|
||||
.where(`${TableName.SecretScanningFinding}.status`, SecretScanningFindingStatus.Unresolved)
|
||||
.orWhereNull(`${TableName.SecretScanningFinding}.status`);
|
||||
})
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.SecretScanningScan).as("scanId"),
|
||||
db.ref("status").withSchema(TableName.SecretScanningScan).as("scanStatus"),
|
||||
db.ref("statusMessage").withSchema(TableName.SecretScanningScan).as("scanStatusMessage"),
|
||||
db.ref("createdAt").withSchema(TableName.SecretScanningScan).as("scanCreatedAt"),
|
||||
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
|
||||
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId")
|
||||
);
|
||||
|
||||
if (!dataSources.length) return [];
|
||||
|
||||
const results = sqlNestRelationships({
|
||||
data: dataSources,
|
||||
key: "id",
|
||||
parentMapper: (dataSource) => expandSecretScanningDataSource(dataSource),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "scanId",
|
||||
label: "scans" as const,
|
||||
mapper: ({ scanId, scanCreatedAt, scanStatus, scanStatusMessage }) => ({
|
||||
id: scanId,
|
||||
createdAt: scanCreatedAt,
|
||||
status: scanStatus,
|
||||
statusMessage: scanStatusMessage
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "findingId",
|
||||
label: "findings" as const,
|
||||
mapper: ({ findingId }) => ({
|
||||
id: findingId
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return results.map(({ scans, findings, ...dataSource }) => {
|
||||
const lastScan =
|
||||
scans && scans.length
|
||||
? scans.reduce((latest, current) => {
|
||||
return new Date(current.createdAt) > new Date(latest.createdAt) ? current : latest;
|
||||
})
|
||||
: null;
|
||||
|
||||
return {
|
||||
...dataSource,
|
||||
lastScanStatus: lastScan?.status ?? null,
|
||||
lastScanStatusMessage: lastScan?.statusMessage ?? null,
|
||||
lastScannedAt: lastScan?.createdAt ?? null,
|
||||
unresolvedFindings: scans.length ? findings.length : null
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find with Details - Secret Scanning Data Source" });
|
||||
}
|
||||
};
|
||||
|
||||
const findResourcesWithDetails = async (filter: Parameters<(typeof resourceOrm)["find"]>[0], tx?: Knex) => {
|
||||
try {
|
||||
// TODO (scott): this query will probably need to be optimized
|
||||
|
||||
const resources = await (tx || db.replicaNode())(TableName.SecretScanningResource)
|
||||
.where((qb) => {
|
||||
if (filter)
|
||||
void qb.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretScanningResource, filter)));
|
||||
})
|
||||
.leftJoin(
|
||||
TableName.SecretScanningScan,
|
||||
`${TableName.SecretScanningScan}.resourceId`,
|
||||
`${TableName.SecretScanningResource}.id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.SecretScanningFinding,
|
||||
`${TableName.SecretScanningFinding}.scanId`,
|
||||
`${TableName.SecretScanningScan}.id`
|
||||
)
|
||||
.where((qb) => {
|
||||
void qb
|
||||
.where(`${TableName.SecretScanningFinding}.status`, SecretScanningFindingStatus.Unresolved)
|
||||
.orWhereNull(`${TableName.SecretScanningFinding}.status`);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.SecretScanningResource))
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.SecretScanningScan).as("scanId"),
|
||||
db.ref("status").withSchema(TableName.SecretScanningScan).as("scanStatus"),
|
||||
db.ref("type").withSchema(TableName.SecretScanningScan).as("scanType"),
|
||||
db.ref("statusMessage").withSchema(TableName.SecretScanningScan).as("scanStatusMessage"),
|
||||
db.ref("createdAt").withSchema(TableName.SecretScanningScan).as("scanCreatedAt"),
|
||||
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
|
||||
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId")
|
||||
);
|
||||
|
||||
if (!resources.length) return [];
|
||||
|
||||
const results = sqlNestRelationships({
|
||||
data: resources,
|
||||
key: "id",
|
||||
parentMapper: (resource) => SecretScanningResourcesSchema.parse(resource),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "scanId",
|
||||
label: "scans" as const,
|
||||
mapper: ({ scanId, scanCreatedAt, scanStatus, scanStatusMessage, scanType }) => ({
|
||||
id: scanId,
|
||||
type: scanType,
|
||||
createdAt: scanCreatedAt,
|
||||
status: scanStatus,
|
||||
statusMessage: scanStatusMessage
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "findingId",
|
||||
label: "findings" as const,
|
||||
mapper: ({ findingId }) => ({
|
||||
id: findingId
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return results.map(({ scans, findings, ...resource }) => {
|
||||
const lastScan =
|
||||
scans && scans.length
|
||||
? scans.reduce((latest, current) => {
|
||||
return new Date(current.createdAt) > new Date(latest.createdAt) ? current : latest;
|
||||
})
|
||||
: null;
|
||||
|
||||
return {
|
||||
...resource,
|
||||
lastScanStatus: lastScan?.status ?? null,
|
||||
lastScanStatusMessage: lastScan?.statusMessage ?? null,
|
||||
lastScannedAt: lastScan?.createdAt ?? null,
|
||||
unresolvedFindings: findings?.length ?? 0
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find with Details - Secret Scanning Resource" });
|
||||
}
|
||||
};
|
||||
|
||||
const findScansWithDetailsByDataSourceId = async (dataSourceId: string, tx?: Knex) => {
|
||||
try {
|
||||
// TODO (scott): this query will probably need to be optimized
|
||||
|
||||
const scans = await (tx || db.replicaNode())(TableName.SecretScanningScan)
|
||||
.leftJoin(
|
||||
TableName.SecretScanningResource,
|
||||
`${TableName.SecretScanningResource}.id`,
|
||||
`${TableName.SecretScanningScan}.resourceId`
|
||||
)
|
||||
.where(`${TableName.SecretScanningResource}.dataSourceId`, dataSourceId)
|
||||
.leftJoin(
|
||||
TableName.SecretScanningFinding,
|
||||
`${TableName.SecretScanningFinding}.scanId`,
|
||||
`${TableName.SecretScanningScan}.id`
|
||||
)
|
||||
.select(selectAllTableCols(TableName.SecretScanningScan))
|
||||
.select(
|
||||
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
|
||||
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId"),
|
||||
db.ref("name").withSchema(TableName.SecretScanningResource).as("resourceName")
|
||||
);
|
||||
|
||||
if (!scans.length) return [];
|
||||
|
||||
const results = sqlNestRelationships({
|
||||
data: scans,
|
||||
key: "id",
|
||||
parentMapper: (scan) => SecretScanningScansSchema.parse(scan),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "findingId",
|
||||
label: "findings" as const,
|
||||
mapper: ({ findingId, findingStatus }) => ({
|
||||
id: findingId,
|
||||
status: findingStatus
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "resourceId",
|
||||
label: "resources" as const,
|
||||
mapper: ({ resourceName }) => ({
|
||||
name: resourceName
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return results.map(({ findings, resources, ...scan }) => {
|
||||
return {
|
||||
...scan,
|
||||
unresolvedFindings:
|
||||
findings?.filter((finding) => finding.status === SecretScanningFindingStatus.Unresolved).length ?? 0,
|
||||
resolvedFindings:
|
||||
findings?.filter((finding) => finding.status !== SecretScanningFindingStatus.Unresolved).length ?? 0,
|
||||
resourceName: resources[0].name
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find with Details By Data Source ID - Secret Scanning Scan" });
|
||||
}
|
||||
};
|
||||
|
||||
const findScansByDataSourceId = async (dataSourceId: string, tx?: Knex) => {
|
||||
try {
|
||||
const scans = await (tx || db.replicaNode())(TableName.SecretScanningScan)
|
||||
.leftJoin(
|
||||
TableName.SecretScanningResource,
|
||||
`${TableName.SecretScanningResource}.id`,
|
||||
`${TableName.SecretScanningScan}.resourceId`
|
||||
)
|
||||
.where(`${TableName.SecretScanningResource}.dataSourceId`, dataSourceId)
|
||||
|
||||
.select(selectAllTableCols(TableName.SecretScanningScan));
|
||||
|
||||
return scans;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find By Data Source ID - Secret Scanning Scan" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
dataSources: {
|
||||
...dataSourceOrm,
|
||||
find: findDataSource,
|
||||
findById: findDataSourceById,
|
||||
findOne: findOneDataSource,
|
||||
create: createDataSource,
|
||||
updateById: updateDataSourceById,
|
||||
deleteById: deleteDataSourceById,
|
||||
findWithDetails: findDataSourceWithDetails
|
||||
},
|
||||
resources: {
|
||||
...resourceOrm,
|
||||
findWithDetails: findResourcesWithDetails
|
||||
},
|
||||
scans: {
|
||||
...scanOrm,
|
||||
findWithDetailsByDataSourceId: findScansWithDetailsByDataSourceId,
|
||||
findByDataSourceId: findScansByDataSourceId
|
||||
},
|
||||
findings: findingOrm,
|
||||
configs: configOrm
|
||||
};
|
||||
};
|
@@ -0,0 +1,33 @@
|
||||
export enum SecretScanningDataSource {
|
||||
GitHub = "github"
|
||||
}
|
||||
|
||||
export enum SecretScanningScanStatus {
|
||||
Completed = "completed",
|
||||
Failed = "failed",
|
||||
Queued = "queued",
|
||||
Scanning = "scanning"
|
||||
}
|
||||
|
||||
export enum SecretScanningScanType {
|
||||
FullScan = "full-scan",
|
||||
DiffScan = "diff-scan"
|
||||
}
|
||||
|
||||
export enum SecretScanningFindingStatus {
|
||||
Resolved = "resolved",
|
||||
Unresolved = "unresolved",
|
||||
FalsePositive = "false-positive",
|
||||
Ignore = "ignore"
|
||||
}
|
||||
|
||||
export enum SecretScanningResource {
|
||||
Repository = "repository",
|
||||
Project = "project"
|
||||
}
|
||||
|
||||
export enum SecretScanningFindingSeverity {
|
||||
High = "high",
|
||||
Medium = "medium",
|
||||
Low = "low"
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
|
||||
|
||||
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
|
||||
import {
|
||||
TQueueSecretScanningResourceDiffScan,
|
||||
TSecretScanningDataSourceCredentials,
|
||||
TSecretScanningDataSourceWithConnection,
|
||||
TSecretScanningFactory
|
||||
} from "./secret-scanning-v2-types";
|
||||
|
||||
type TSecretScanningFactoryImplementation = TSecretScanningFactory<
|
||||
TSecretScanningDataSourceWithConnection,
|
||||
TSecretScanningDataSourceCredentials,
|
||||
TQueueSecretScanningResourceDiffScan["payload"]
|
||||
>;
|
||||
|
||||
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
|
||||
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation
|
||||
};
|
@@ -0,0 +1,140 @@
|
||||
import { AxiosError } from "axios";
|
||||
import { exec } from "child_process";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||
|
||||
import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secret-scanning-v2-enums";
|
||||
import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListItem } from "./secret-scanning-v2-types";
|
||||
|
||||
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
|
||||
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
||||
};
|
||||
|
||||
export const listSecretScanningDataSourceOptions = () => {
|
||||
return Object.values(SECRET_SCANNING_SOURCE_LIST_OPTIONS).sort((a, b) => a.name.localeCompare(b.name));
|
||||
};
|
||||
|
||||
export const cloneRepository = async ({ cloneUrl, repoPath }: TCloneRepository): Promise<void> => {
|
||||
const command = `git clone ${cloneUrl} ${repoPath} --bare`;
|
||||
return new Promise((resolve, reject) => {
|
||||
exec(command, (error) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export function scanDirectory(inputPath: string, outputPath: string, configPath?: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const command = `cd ${inputPath} && infisical scan --exit-code=77 -r "${outputPath}" ${configPath ? `-c ${configPath}` : ""}`;
|
||||
exec(command, (error) => {
|
||||
if (error && error.code !== 77) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export const scanGitRepositoryAndGetFindings = async (
|
||||
scanPath: string,
|
||||
findingsPath: string,
|
||||
configPath?: string
|
||||
): TGetFindingsPayload => {
|
||||
await scanDirectory(scanPath, findingsPath, configPath);
|
||||
|
||||
const findingsData = JSON.parse(await readFindingsFile(findingsPath)) as SecretMatch[];
|
||||
|
||||
return findingsData.map(
|
||||
({
|
||||
// discard match and secret as we don't want to store
|
||||
Match,
|
||||
Secret,
|
||||
...finding
|
||||
}) => ({
|
||||
details: titleCaseToCamelCase(finding),
|
||||
fingerprint: `${finding.Fingerprint}:${finding.StartColumn}`,
|
||||
severity: SecretScanningFindingSeverity.High,
|
||||
rule: finding.RuleID
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
export const replaceNonChangesWithNewlines = (patch: string) => {
|
||||
return patch
|
||||
.split("\n")
|
||||
.map((line) => {
|
||||
// Keep added lines (remove the + prefix)
|
||||
if (line.startsWith("+") && !line.startsWith("+++")) {
|
||||
return line.substring(1);
|
||||
}
|
||||
|
||||
// Replace everything else with newlines to maintain line positioning
|
||||
|
||||
return "";
|
||||
})
|
||||
.join("\n");
|
||||
};
|
||||
|
||||
const HunkHeaderRegex = new RE2(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/);
|
||||
|
||||
export const convertPatchLineToFileLineNumber = (patch: string, patchLineNumber: number) => {
|
||||
const lines = patch.split("\n");
|
||||
let currentPatchLine = 0;
|
||||
let currentNewLine = 0;
|
||||
|
||||
for (const line of lines) {
|
||||
currentPatchLine += 1;
|
||||
|
||||
// Hunk header: @@ -a,b +c,d @@
|
||||
const hunkHeaderMatch = HunkHeaderRegex.match(line);
|
||||
if (hunkHeaderMatch) {
|
||||
const startLine = parseInt(hunkHeaderMatch[1], 10);
|
||||
currentNewLine = startLine;
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
if (currentPatchLine === patchLineNumber) {
|
||||
return currentNewLine;
|
||||
}
|
||||
|
||||
if (line.startsWith("+++")) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue; // skip file metadata lines
|
||||
}
|
||||
|
||||
// Advance only if the line exists in the new file
|
||||
if (line.startsWith("+") || line.startsWith(" ")) {
|
||||
currentNewLine += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return currentNewLine;
|
||||
};
|
||||
|
||||
const MAX_MESSAGE_LENGTH = 1024;
|
||||
|
||||
export const parseScanErrorMessage = (err: unknown): string => {
|
||||
let errorMessage: string;
|
||||
|
||||
if (err instanceof AxiosError) {
|
||||
errorMessage = err?.response?.data
|
||||
? JSON.stringify(err?.response?.data)
|
||||
: (err?.message ?? "An unknown error occurred.");
|
||||
} else {
|
||||
errorMessage = (err as Error)?.message || "An unknown error occurred.";
|
||||
}
|
||||
|
||||
return errorMessage.length <= MAX_MESSAGE_LENGTH
|
||||
? errorMessage
|
||||
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
|
||||
};
|
@@ -0,0 +1,14 @@
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
|
||||
[SecretScanningDataSource.GitHub]: "GitHub"
|
||||
};
|
||||
|
||||
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
|
||||
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar
|
||||
};
|
||||
|
||||
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
|
||||
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" }
|
||||
};
|
@@ -0,0 +1,626 @@
|
||||
import { join } from "path";
|
||||
|
||||
import { ProjectMembershipRole, TSecretScanningFindings } from "@app/db/schemas";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import {
|
||||
createTempFolder,
|
||||
deleteTempFolder,
|
||||
writeTextToFile
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import {
|
||||
parseScanErrorMessage,
|
||||
scanGitRepositoryAndGetFindings
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningResource,
|
||||
SecretScanningScanStatus,
|
||||
SecretScanningScanType
|
||||
} from "./secret-scanning-v2-enums";
|
||||
import { SECRET_SCANNING_FACTORY_MAP } from "./secret-scanning-v2-factory";
|
||||
import {
|
||||
TFindingsPayload,
|
||||
TQueueSecretScanningDataSourceFullScan,
|
||||
TQueueSecretScanningResourceDiffScan,
|
||||
TQueueSecretScanningSendNotification,
|
||||
TSecretScanningDataSourceWithConnection
|
||||
} from "./secret-scanning-v2-types";
|
||||
|
||||
type TSecretRotationV2QueueServiceFactoryDep = {
|
||||
queueService: TQueueServiceFactory;
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem">;
|
||||
};
|
||||
|
||||
export type TSecretScanningV2QueueServiceFactory = Awaited<ReturnType<typeof secretScanningV2QueueServiceFactory>>;
|
||||
|
||||
export const secretScanningV2QueueServiceFactory = async ({
|
||||
queueService,
|
||||
secretScanningV2DAL,
|
||||
projectMembershipDAL,
|
||||
projectDAL,
|
||||
smtpService,
|
||||
kmsService,
|
||||
auditLogService,
|
||||
keyStore
|
||||
}: TSecretRotationV2QueueServiceFactoryDep) => {
|
||||
const queueDataSourceFullScan = async (
|
||||
dataSource: TSecretScanningDataSourceWithConnection,
|
||||
resourceExternalId?: string
|
||||
) => {
|
||||
try {
|
||||
const { type } = dataSource;
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
|
||||
|
||||
const rawResources = await factory.listRawResources(dataSource);
|
||||
|
||||
let filteredRawResources = rawResources;
|
||||
|
||||
// TODO: should add individual resource fetch to factory
|
||||
if (resourceExternalId) {
|
||||
filteredRawResources = rawResources.filter((resource) => resource.externalId === resourceExternalId);
|
||||
}
|
||||
|
||||
if (!filteredRawResources.length) {
|
||||
throw new BadRequestError({
|
||||
message: `${resourceExternalId ? `Resource with "ID" ${resourceExternalId} could not be found.` : "Data source has no resources to scan"}. Ensure your data source config is correct and not filtering out scanning resources.`
|
||||
});
|
||||
}
|
||||
|
||||
for (const resource of filteredRawResources) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
if (await keyStore.getItem(KeyStorePrefixes.SecretScanningLock(dataSource.id, resource.externalId))) {
|
||||
throw new BadRequestError({ message: `A scan is already in progress for resource "${resource.name}"` });
|
||||
}
|
||||
}
|
||||
|
||||
await secretScanningV2DAL.resources.transaction(async (tx) => {
|
||||
const resources = await secretScanningV2DAL.resources.upsert(
|
||||
filteredRawResources.map((rawResource) => ({
|
||||
...rawResource,
|
||||
dataSourceId: dataSource.id
|
||||
})),
|
||||
["externalId", "dataSourceId"],
|
||||
tx
|
||||
);
|
||||
|
||||
const scans = await secretScanningV2DAL.scans.insertMany(
|
||||
resources.map((resource) => ({
|
||||
resourceId: resource.id,
|
||||
type: SecretScanningScanType.FullScan
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
||||
for (const scan of scans) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await queueService.queuePg(QueueJobs.SecretScanningV2FullScan, {
|
||||
scanId: scan.id,
|
||||
resourceId: scan.resourceId,
|
||||
dataSourceId: dataSource.id
|
||||
});
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(error, `Failed to queue full-scan for data source with ID "${dataSource.id}"`);
|
||||
|
||||
if (error instanceof BadRequestError) throw error;
|
||||
|
||||
throw new InternalServerError({ message: `Failed to queue scan: ${(error as Error).message}` });
|
||||
}
|
||||
};
|
||||
|
||||
await queueService.startPg<QueueName.SecretScanningV2>(
|
||||
QueueJobs.SecretScanningV2FullScan,
|
||||
async ([job]) => {
|
||||
const { scanId, resourceId, dataSourceId } = job.data as TQueueSecretScanningDataSourceFullScan;
|
||||
const { retryCount, retryLimit } = job;
|
||||
|
||||
const logDetails = `[scanId=${scanId}] [resourceId=${resourceId}] [dataSourceId=${dataSourceId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
|
||||
|
||||
const tempFolder = await createTempFolder();
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource) throw new Error(`Data source with ID "${dataSourceId}" not found`);
|
||||
|
||||
const resource = await secretScanningV2DAL.resources.findById(resourceId);
|
||||
|
||||
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
|
||||
|
||||
let lock: Awaited<ReturnType<typeof keyStore.acquireLock>> | undefined;
|
||||
|
||||
try {
|
||||
try {
|
||||
lock = await keyStore.acquireLock(
|
||||
[KeyStorePrefixes.SecretScanningLock(dataSource.id, resource.externalId)],
|
||||
60 * 1000 * 5
|
||||
);
|
||||
} catch (e) {
|
||||
throw new Error("Failed to acquire scanning lock.");
|
||||
}
|
||||
|
||||
await secretScanningV2DAL.scans.update(
|
||||
{ id: scanId },
|
||||
{
|
||||
status: SecretScanningScanStatus.Scanning
|
||||
}
|
||||
);
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
|
||||
|
||||
const findingsPath = join(tempFolder, "findings.json");
|
||||
|
||||
const scanPath = await factory.getFullScanPath({
|
||||
dataSource: {
|
||||
...dataSource,
|
||||
connection
|
||||
} as TSecretScanningDataSourceWithConnection,
|
||||
resourceName: resource.name,
|
||||
tempFolder
|
||||
});
|
||||
|
||||
const config = await secretScanningV2DAL.configs.findOne({
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
let configPath: string | undefined;
|
||||
|
||||
if (config && config.content) {
|
||||
configPath = join(tempFolder, "infisical-scan.toml");
|
||||
await writeTextToFile(configPath, config.content);
|
||||
}
|
||||
|
||||
let findingsPayload: TFindingsPayload;
|
||||
switch (resource.type) {
|
||||
case SecretScanningResource.Repository:
|
||||
case SecretScanningResource.Project:
|
||||
findingsPayload = await scanGitRepositoryAndGetFindings(scanPath, findingsPath, configPath);
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unhandled resource type");
|
||||
}
|
||||
|
||||
const allFindings = await secretScanningV2DAL.findings.transaction(async (tx) => {
|
||||
let findings: TSecretScanningFindings[] = [];
|
||||
if (findingsPayload.length) {
|
||||
findings = await secretScanningV2DAL.findings.upsert(
|
||||
findingsPayload.map((finding) => ({
|
||||
...finding,
|
||||
projectId: dataSource.projectId,
|
||||
dataSourceName: dataSource.name,
|
||||
dataSourceType: dataSource.type,
|
||||
resourceName: resource.name,
|
||||
resourceType: resource.type,
|
||||
scanId
|
||||
})),
|
||||
["projectId", "fingerprint"],
|
||||
tx,
|
||||
["resourceName", "dataSourceName"]
|
||||
);
|
||||
}
|
||||
|
||||
await secretScanningV2DAL.scans.update(
|
||||
{ id: scanId },
|
||||
{
|
||||
status: SecretScanningScanStatus.Completed,
|
||||
statusMessage: null
|
||||
}
|
||||
);
|
||||
|
||||
return findings;
|
||||
});
|
||||
|
||||
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
|
||||
|
||||
if (newFindings.length) {
|
||||
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
|
||||
status: SecretScanningScanStatus.Completed,
|
||||
resourceName: resource.name,
|
||||
isDiffScan: false,
|
||||
dataSource,
|
||||
numberOfSecrets: newFindings.length,
|
||||
scanId
|
||||
});
|
||||
}
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: dataSource.projectId,
|
||||
actor: {
|
||||
type: ActorType.PLATFORM,
|
||||
metadata: {}
|
||||
},
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
dataSourceType: dataSource.type,
|
||||
resourceId: resource.id,
|
||||
resourceType: resource.type,
|
||||
scanId,
|
||||
scanStatus: SecretScanningScanStatus.Completed,
|
||||
scanType: SecretScanningScanType.FullScan,
|
||||
numberOfSecretsDetected: findingsPayload.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
logger.info(`secretScanningV2Queue: Full Scan Complete ${logDetails} findings=[${findingsPayload.length}]`);
|
||||
} catch (error) {
|
||||
if (retryCount === retryLimit) {
|
||||
const errorMessage = parseScanErrorMessage(error);
|
||||
|
||||
await secretScanningV2DAL.scans.update(
|
||||
{ id: scanId },
|
||||
{
|
||||
status: SecretScanningScanStatus.Failed,
|
||||
statusMessage: errorMessage
|
||||
}
|
||||
);
|
||||
|
||||
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
|
||||
status: SecretScanningScanStatus.Failed,
|
||||
resourceName: resource.name,
|
||||
dataSource,
|
||||
errorMessage
|
||||
});
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: dataSource.projectId,
|
||||
actor: {
|
||||
type: ActorType.PLATFORM,
|
||||
metadata: {}
|
||||
},
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
dataSourceType: dataSource.type,
|
||||
resourceId: resource.id,
|
||||
resourceType: resource.type,
|
||||
scanId,
|
||||
scanStatus: SecretScanningScanStatus.Failed,
|
||||
scanType: SecretScanningScanType.FullScan
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
logger.error(error, `secretScanningV2Queue: Full Scan Failed ${logDetails}`);
|
||||
throw error;
|
||||
} finally {
|
||||
await deleteTempFolder(tempFolder);
|
||||
await lock?.release();
|
||||
}
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 20,
|
||||
pollingIntervalSeconds: 1
|
||||
}
|
||||
);
|
||||
|
||||
const queueResourceDiffScan = async ({
|
||||
payload,
|
||||
dataSourceId,
|
||||
dataSourceType
|
||||
}: Pick<TQueueSecretScanningResourceDiffScan, "payload" | "dataSourceId" | "dataSourceType">) => {
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]();
|
||||
|
||||
const resourcePayload = factory.getDiffScanResourcePayload(payload);
|
||||
|
||||
try {
|
||||
const { resourceId, scanId } = await secretScanningV2DAL.resources.transaction(async (tx) => {
|
||||
const [resource] = await secretScanningV2DAL.resources.upsert(
|
||||
[
|
||||
{
|
||||
...resourcePayload,
|
||||
dataSourceId
|
||||
}
|
||||
],
|
||||
["externalId", "dataSourceId"],
|
||||
tx
|
||||
);
|
||||
|
||||
const scan = await secretScanningV2DAL.scans.create(
|
||||
{
|
||||
resourceId: resource.id,
|
||||
type: SecretScanningScanType.DiffScan
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return {
|
||||
resourceId: resource.id,
|
||||
scanId: scan.id
|
||||
};
|
||||
});
|
||||
|
||||
await queueService.queuePg(QueueJobs.SecretScanningV2DiffScan, {
|
||||
payload,
|
||||
dataSourceId,
|
||||
dataSourceType,
|
||||
scanId,
|
||||
resourceId
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
error,
|
||||
`secretScanningV2Queue: Failed to queue diff scan [dataSourceId=${dataSourceId}] [resourceExternalId=${resourcePayload.externalId}]`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
await queueService.startPg<QueueName.SecretScanningV2>(
|
||||
QueueJobs.SecretScanningV2DiffScan,
|
||||
async ([job]) => {
|
||||
const { payload, dataSourceId, resourceId, scanId } = job.data as TQueueSecretScanningResourceDiffScan;
|
||||
const { retryCount, retryLimit } = job;
|
||||
|
||||
const logDetails = `[dataSourceId=${dataSourceId}] [scanId=${scanId}] [resourceId=${resourceId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource) throw new Error(`Data source with ID "${dataSourceId}" not found`);
|
||||
|
||||
const resource = await secretScanningV2DAL.resources.findById(resourceId);
|
||||
|
||||
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
|
||||
|
||||
const tempFolder = await createTempFolder();
|
||||
|
||||
try {
|
||||
await secretScanningV2DAL.scans.update(
|
||||
{ id: scanId },
|
||||
{
|
||||
status: SecretScanningScanStatus.Scanning
|
||||
}
|
||||
);
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
|
||||
|
||||
const config = await secretScanningV2DAL.configs.findOne({
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
let configPath: string | undefined;
|
||||
|
||||
if (config && config.content) {
|
||||
configPath = join(tempFolder, "infisical-scan.toml");
|
||||
await writeTextToFile(configPath, config.content);
|
||||
}
|
||||
|
||||
const findingsPayload = await factory.getDiffScanFindingsPayload({
|
||||
dataSource: {
|
||||
...dataSource,
|
||||
connection
|
||||
} as TSecretScanningDataSourceWithConnection,
|
||||
resourceName: resource.name,
|
||||
payload,
|
||||
configPath
|
||||
});
|
||||
|
||||
const allFindings = await secretScanningV2DAL.findings.transaction(async (tx) => {
|
||||
let findings: TSecretScanningFindings[] = [];
|
||||
|
||||
if (findingsPayload.length) {
|
||||
findings = await secretScanningV2DAL.findings.upsert(
|
||||
findingsPayload.map((finding) => ({
|
||||
...finding,
|
||||
projectId: dataSource.projectId,
|
||||
dataSourceName: dataSource.name,
|
||||
dataSourceType: dataSource.type,
|
||||
resourceName: resource.name,
|
||||
resourceType: resource.type,
|
||||
scanId
|
||||
})),
|
||||
["projectId", "fingerprint"],
|
||||
tx,
|
||||
["resourceName", "dataSourceName"]
|
||||
);
|
||||
}
|
||||
|
||||
await secretScanningV2DAL.scans.update(
|
||||
{ id: scanId },
|
||||
{
|
||||
status: SecretScanningScanStatus.Completed
|
||||
}
|
||||
);
|
||||
|
||||
return findings;
|
||||
});
|
||||
|
||||
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
|
||||
|
||||
if (newFindings.length) {
|
||||
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
|
||||
status: SecretScanningScanStatus.Completed,
|
||||
resourceName: resource.name,
|
||||
isDiffScan: true,
|
||||
dataSource,
|
||||
numberOfSecrets: newFindings.length,
|
||||
scanId
|
||||
});
|
||||
}
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: dataSource.projectId,
|
||||
actor: {
|
||||
type: ActorType.PLATFORM,
|
||||
metadata: {}
|
||||
},
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
dataSourceType: dataSource.type,
|
||||
resourceId,
|
||||
resourceType: resource.type,
|
||||
scanId,
|
||||
scanStatus: SecretScanningScanStatus.Completed,
|
||||
scanType: SecretScanningScanType.DiffScan,
|
||||
numberOfSecretsDetected: findingsPayload.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
logger.info(`secretScanningV2Queue: Diff Scan Complete ${logDetails}`);
|
||||
} catch (error) {
|
||||
if (retryCount === retryLimit) {
|
||||
const errorMessage = parseScanErrorMessage(error);
|
||||
|
||||
await secretScanningV2DAL.scans.update(
|
||||
{ id: scanId },
|
||||
{
|
||||
status: SecretScanningScanStatus.Failed,
|
||||
statusMessage: errorMessage
|
||||
}
|
||||
);
|
||||
|
||||
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
|
||||
status: SecretScanningScanStatus.Failed,
|
||||
resourceName: resource.name,
|
||||
dataSource,
|
||||
errorMessage
|
||||
});
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: dataSource.projectId,
|
||||
actor: {
|
||||
type: ActorType.PLATFORM,
|
||||
metadata: {}
|
||||
},
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
dataSourceType: dataSource.type,
|
||||
resourceId: resource.id,
|
||||
resourceType: resource.type,
|
||||
scanId,
|
||||
scanStatus: SecretScanningScanStatus.Failed,
|
||||
scanType: SecretScanningScanType.DiffScan
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
logger.error(error, `secretScanningV2Queue: Diff Scan Failed ${logDetails}`);
|
||||
throw error;
|
||||
} finally {
|
||||
await deleteTempFolder(tempFolder);
|
||||
}
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 20,
|
||||
pollingIntervalSeconds: 1
|
||||
}
|
||||
);
|
||||
|
||||
await queueService.startPg<QueueName.SecretScanningV2>(
|
||||
QueueJobs.SecretScanningV2SendNotification,
|
||||
async ([job]) => {
|
||||
const { dataSource, resourceName, ...payload } = job.data as TQueueSecretScanningSendNotification;
|
||||
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (!appCfg.isSmtpConfigured) return;
|
||||
|
||||
try {
|
||||
const { projectId } = dataSource;
|
||||
|
||||
logger.info(
|
||||
`secretScanningV2Queue: Sending Status Notification [dataSourceId=${dataSource.id}] [resourceName=${resourceName}] [status=${payload.status}]`
|
||||
);
|
||||
|
||||
const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId);
|
||||
const project = await projectDAL.findById(projectId);
|
||||
|
||||
const projectAdmins = projectMembers.filter((member) =>
|
||||
member.roles.some((role) => role.role === ProjectMembershipRole.Admin)
|
||||
);
|
||||
|
||||
const timestamp = new Date().toISOString();
|
||||
|
||||
await smtpService.sendMail({
|
||||
recipients: projectAdmins.map((member) => member.user.email!).filter(Boolean),
|
||||
template:
|
||||
payload.status === SecretScanningScanStatus.Completed
|
||||
? SmtpTemplates.SecretScanningV2SecretsDetected
|
||||
: SmtpTemplates.SecretScanningV2ScanFailed,
|
||||
subjectLine:
|
||||
payload.status === SecretScanningScanStatus.Completed
|
||||
? "Incident Alert: Secret(s) Leaked"
|
||||
: `Secret Scanning Failed`,
|
||||
substitutions:
|
||||
payload.status === SecretScanningScanStatus.Completed
|
||||
? {
|
||||
authorName: "Jim",
|
||||
authorEmail: "jim@infisical.com",
|
||||
resourceName,
|
||||
numberOfSecrets: payload.numberOfSecrets,
|
||||
isDiffScan: payload.isDiffScan,
|
||||
url: encodeURI(
|
||||
`${appCfg.SITE_URL}/secret-scanning/${projectId}/findings?search=scanId:${payload.scanId}`
|
||||
),
|
||||
timestamp
|
||||
}
|
||||
: {
|
||||
dataSourceName: dataSource.name,
|
||||
resourceName,
|
||||
projectName: project.name,
|
||||
timestamp,
|
||||
errorMessage: payload.errorMessage,
|
||||
url: encodeURI(
|
||||
`${appCfg.SITE_URL}/secret-scanning/${projectId}/data-sources/${dataSource.type}/${dataSource.id}`
|
||||
)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
error,
|
||||
`secretScanningV2Queue: Failed to Send Status Notification [dataSourceId=${dataSource.id}] [resourceName=${resourceName}] [status=${payload.status}]`
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 5,
|
||||
pollingIntervalSeconds: 1
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
queueDataSourceFullScan,
|
||||
queueResourceDiffScan
|
||||
};
|
||||
};
|
@@ -0,0 +1,99 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningDataSourcesSchema, SecretScanningFindingsSchema } from "@app/db/schemas";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
|
||||
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
|
||||
type SecretScanningDataSourceSchemaOpts = {
|
||||
type: SecretScanningDataSource;
|
||||
isConnectionRequired: boolean;
|
||||
};
|
||||
|
||||
export const BaseSecretScanningDataSourceSchema = ({
|
||||
type,
|
||||
isConnectionRequired
|
||||
}: SecretScanningDataSourceSchemaOpts) =>
|
||||
SecretScanningDataSourcesSchema.omit({
|
||||
// unique to provider
|
||||
type: true,
|
||||
connectionId: true,
|
||||
config: true,
|
||||
encryptedCredentials: true
|
||||
}).extend({
|
||||
type: z.literal(type),
|
||||
connectionId: isConnectionRequired ? z.string().uuid() : z.null(),
|
||||
connection: isConnectionRequired
|
||||
? z.object({
|
||||
app: z.literal(SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[type]),
|
||||
name: z.string(),
|
||||
id: z.string().uuid()
|
||||
})
|
||||
: z.null()
|
||||
});
|
||||
|
||||
export const BaseCreateSecretScanningDataSourceSchema = ({
|
||||
type,
|
||||
isConnectionRequired
|
||||
}: SecretScanningDataSourceSchemaOpts) =>
|
||||
z.object({
|
||||
name: slugSchema({ field: "name" }).describe(SecretScanningDataSources.CREATE(type).name),
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretScanningDataSources.CREATE(type).projectId),
|
||||
description: z
|
||||
.string()
|
||||
.trim()
|
||||
.max(256, "Description cannot exceed 256 characters")
|
||||
.nullish()
|
||||
.describe(SecretScanningDataSources.CREATE(type).description),
|
||||
connectionId: isConnectionRequired
|
||||
? z.string().uuid().describe(SecretScanningDataSources.CREATE(type).connectionId)
|
||||
: z.undefined(),
|
||||
isAutoScanEnabled: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.default(true)
|
||||
.describe(SecretScanningDataSources.CREATE(type).isAutoScanEnabled)
|
||||
});
|
||||
|
||||
export const BaseUpdateSecretScanningDataSourceSchema = (type: SecretScanningDataSource) =>
|
||||
z.object({
|
||||
name: slugSchema({ field: "name" }).describe(SecretScanningDataSources.UPDATE(type).name).optional(),
|
||||
description: z
|
||||
.string()
|
||||
.trim()
|
||||
.max(256, "Description cannot exceed 256 characters")
|
||||
.nullish()
|
||||
.describe(SecretScanningDataSources.UPDATE(type).description),
|
||||
isAutoScanEnabled: z.boolean().optional().describe(SecretScanningDataSources.UPDATE(type).isAutoScanEnabled)
|
||||
});
|
||||
|
||||
export const GitRepositoryScanFindingDetailsSchema = z.object({
|
||||
description: z.string(),
|
||||
startLine: z.number(),
|
||||
endLine: z.number(),
|
||||
startColumn: z.number(),
|
||||
endColumn: z.number(),
|
||||
file: z.string(),
|
||||
link: z.string(),
|
||||
symlinkFile: z.string(),
|
||||
commit: z.string(),
|
||||
entropy: z.number(),
|
||||
author: z.string(),
|
||||
email: z.string(),
|
||||
date: z.string(),
|
||||
message: z.string(),
|
||||
tags: z.string().array(),
|
||||
ruleID: z.string(),
|
||||
fingerprint: z.string()
|
||||
});
|
||||
|
||||
export const BaseSecretScanningFindingSchema = SecretScanningFindingsSchema.omit({
|
||||
dataSourceType: true,
|
||||
resourceType: true,
|
||||
details: true
|
||||
});
|
@@ -0,0 +1,875 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { join } from "path";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import {
|
||||
ProjectPermissionSecretScanningConfigActions,
|
||||
ProjectPermissionSecretScanningDataSourceActions,
|
||||
ProjectPermissionSecretScanningFindingActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import {
|
||||
createTempFolder,
|
||||
deleteTempFolder,
|
||||
scanContentAndGetFindings,
|
||||
writeTextToFile
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { githubSecretScanningService } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-service";
|
||||
import { SecretScanningFindingStatus } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { SECRET_SCANNING_FACTORY_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-factory";
|
||||
import { listSecretScanningDataSourceOptions } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import {
|
||||
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP,
|
||||
SECRET_SCANNING_DATA_SOURCE_NAME_MAP
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
|
||||
import {
|
||||
TCreateSecretScanningDataSourceDTO,
|
||||
TDeleteSecretScanningDataSourceDTO,
|
||||
TFindSecretScanningDataSourceByIdDTO,
|
||||
TFindSecretScanningDataSourceByNameDTO,
|
||||
TListSecretScanningDataSourcesByProjectId,
|
||||
TSecretScanningDataSource,
|
||||
TSecretScanningDataSourceWithConnection,
|
||||
TSecretScanningDataSourceWithDetails,
|
||||
TSecretScanningFinding,
|
||||
TSecretScanningResourceWithDetails,
|
||||
TSecretScanningScanWithDetails,
|
||||
TTriggerSecretScanningDataSourceDTO,
|
||||
TUpdateSecretScanningDataSourceDTO,
|
||||
TUpdateSecretScanningFindingDTO,
|
||||
TUpsertSecretScanningConfigDTO
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { DatabaseErrorCode } from "@app/lib/error-codes";
|
||||
import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
|
||||
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
|
||||
|
||||
export type TSecretScanningV2ServiceFactoryDep = {
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
||||
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
secretScanningV2Queue: Pick<
|
||||
TSecretScanningV2QueueServiceFactory,
|
||||
"queueDataSourceFullScan" | "queueResourceDiffScan"
|
||||
>;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TSecretScanningV2ServiceFactory = ReturnType<typeof secretScanningV2ServiceFactory>;
|
||||
|
||||
export const secretScanningV2ServiceFactory = ({
|
||||
secretScanningV2DAL,
|
||||
permissionService,
|
||||
appConnectionService,
|
||||
licenseService,
|
||||
secretScanningV2Queue,
|
||||
kmsService
|
||||
}: TSecretScanningV2ServiceFactoryDep) => {
|
||||
const $checkListSecretScanningDataSourcesByProjectIdPermissions = async (
|
||||
projectId: string,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Data Sources due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.Read,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
};
|
||||
|
||||
const listSecretScanningDataSourcesByProjectId = async (
|
||||
{ projectId, type }: TListSecretScanningDataSourcesByProjectId,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
await $checkListSecretScanningDataSourcesByProjectIdPermissions(projectId, actor);
|
||||
|
||||
const dataSources = await secretScanningV2DAL.dataSources.find({
|
||||
...(type && { type }),
|
||||
projectId
|
||||
});
|
||||
|
||||
return dataSources as TSecretScanningDataSource[];
|
||||
};
|
||||
|
||||
const listSecretScanningDataSourcesWithDetailsByProjectId = async (
|
||||
{ projectId, type }: TListSecretScanningDataSourcesByProjectId,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
await $checkListSecretScanningDataSourcesByProjectIdPermissions(projectId, actor);
|
||||
|
||||
const dataSources = await secretScanningV2DAL.dataSources.findWithDetails({
|
||||
...(type && { type }),
|
||||
projectId
|
||||
});
|
||||
|
||||
return dataSources as TSecretScanningDataSourceWithDetails[];
|
||||
};
|
||||
|
||||
const findSecretScanningDataSourceById = async (
|
||||
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.Read,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
return dataSource as TSecretScanningDataSource;
|
||||
};
|
||||
|
||||
const findSecretScanningDataSourceByName = async (
|
||||
{ type, sourceName, projectId }: TFindSecretScanningDataSourceByNameDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
// we prevent conflicting names within a folder
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findOne({
|
||||
name: sourceName,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with name "${sourceName}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.Read,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSource.id}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
return dataSource as TSecretScanningDataSource;
|
||||
};
|
||||
|
||||
const createSecretScanningDataSource = async (
|
||||
payload: TCreateSecretScanningDataSourceDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to create Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: payload.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.Create,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (payload.connectionId) {
|
||||
// validates permission to connect and app is valid for data source
|
||||
connection = await appConnectionService.connectAppConnectionById(
|
||||
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[payload.type],
|
||||
payload.connectionId,
|
||||
actor
|
||||
);
|
||||
}
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]();
|
||||
|
||||
try {
|
||||
const createdDataSource = await factory.initialize(
|
||||
{
|
||||
payload,
|
||||
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
|
||||
secretScanningV2DAL
|
||||
},
|
||||
async ({ credentials, externalId }) => {
|
||||
let encryptedCredentials: Buffer | null = null;
|
||||
|
||||
if (credentials) {
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: payload.projectId
|
||||
});
|
||||
|
||||
const { cipherTextBlob } = encryptor({
|
||||
plainText: Buffer.from(JSON.stringify(credentials))
|
||||
});
|
||||
|
||||
encryptedCredentials = cipherTextBlob;
|
||||
}
|
||||
|
||||
return secretScanningV2DAL.dataSources.transaction(async (tx) => {
|
||||
const dataSource = await secretScanningV2DAL.dataSources.create(
|
||||
{
|
||||
encryptedCredentials,
|
||||
externalId,
|
||||
...payload
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await factory.postInitialization({
|
||||
payload,
|
||||
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
|
||||
dataSourceId: dataSource.id,
|
||||
credentials
|
||||
});
|
||||
|
||||
return dataSource;
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
if (payload.isAutoScanEnabled) {
|
||||
try {
|
||||
await secretScanningV2Queue.queueDataSourceFullScan({
|
||||
...createdDataSource,
|
||||
connection
|
||||
} as TSecretScanningDataSourceWithConnection);
|
||||
} catch {
|
||||
// silently fail, don't want to block creation, they'll try scanning when they don't see anything and get the error
|
||||
}
|
||||
}
|
||||
|
||||
return createdDataSource as TSecretScanningDataSource;
|
||||
} catch (err) {
|
||||
if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) {
|
||||
throw new BadRequestError({
|
||||
message: `A Secret Scanning Data Source with the name "${payload.name}" already exists for the project with ID "${payload.projectId}"`
|
||||
});
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const updateSecretScanningDataSource = async (
|
||||
{ type, dataSourceId, ...payload }: TUpdateSecretScanningDataSourceDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to update Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.Edit,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
try {
|
||||
const updatedDataSource = await secretScanningV2DAL.dataSources.updateById(dataSourceId, payload);
|
||||
|
||||
return updatedDataSource as TSecretScanningDataSource;
|
||||
} catch (err) {
|
||||
if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) {
|
||||
throw new BadRequestError({
|
||||
message: `A Secret Scanning Data Source with the name "${payload.name}" already exists for the project with ID "${dataSource.projectId}"`
|
||||
});
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const deleteSecretScanningDataSource = async (
|
||||
{ type, dataSourceId }: TDeleteSecretScanningDataSourceDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to delete Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.Delete,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
// TODO: clean up webhooks
|
||||
|
||||
await secretScanningV2DAL.dataSources.deleteById(dataSourceId);
|
||||
|
||||
return dataSource as TSecretScanningDataSource;
|
||||
};
|
||||
|
||||
const triggerSecretScanningDataSourceScan = async (
|
||||
{ type, dataSourceId, resourceId }: TTriggerSecretScanningDataSourceDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to trigger scan for Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
|
||||
|
||||
let resourceExternalId: string | undefined;
|
||||
|
||||
if (resourceId) {
|
||||
const resource = await secretScanningV2DAL.resources.findOne({ id: resourceId, dataSourceId });
|
||||
if (!resource) {
|
||||
throw new NotFoundError({
|
||||
message: `Could not find Secret Scanning Resource with ID "${resourceId}" for Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
}
|
||||
resourceExternalId = resource.externalId;
|
||||
}
|
||||
|
||||
await secretScanningV2Queue.queueDataSourceFullScan(
|
||||
{
|
||||
...dataSource,
|
||||
connection
|
||||
} as TSecretScanningDataSourceWithConnection,
|
||||
resourceExternalId
|
||||
);
|
||||
|
||||
return dataSource as TSecretScanningDataSource;
|
||||
};
|
||||
|
||||
const listSecretScanningResourcesByDataSourceId = async (
|
||||
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadResources,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
const resources = await secretScanningV2DAL.resources.find({
|
||||
dataSourceId
|
||||
});
|
||||
|
||||
return { resources, projectId: dataSource.projectId };
|
||||
};
|
||||
|
||||
const listSecretScanningScansByDataSourceId = async (
|
||||
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
const scans = await secretScanningV2DAL.scans.findByDataSourceId(dataSourceId);
|
||||
|
||||
return { scans, projectId: dataSource.projectId };
|
||||
};
|
||||
|
||||
const listSecretScanningResourcesWithDetailsByDataSourceId = async (
|
||||
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadResources,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
const resources = await secretScanningV2DAL.resources.findWithDetails({ dataSourceId });
|
||||
|
||||
return { resources: resources as TSecretScanningResourceWithDetails[], projectId: dataSource.projectId };
|
||||
};
|
||||
|
||||
const listSecretScanningScansWithDetailsByDataSourceId = async (
|
||||
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Scans due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
const scans = await secretScanningV2DAL.scans.findWithDetailsByDataSourceId(dataSourceId);
|
||||
|
||||
return { scans: scans as TSecretScanningScanWithDetails[], projectId: dataSource.projectId };
|
||||
};
|
||||
|
||||
const getSecretScanningUnresolvedFindingsCountByProjectId = async (projectId: string, actor: OrgServiceActor) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningFindingActions.Read,
|
||||
ProjectPermissionSub.SecretScanningFindings
|
||||
);
|
||||
|
||||
const [finding] = await secretScanningV2DAL.findings.find(
|
||||
{
|
||||
projectId,
|
||||
status: SecretScanningFindingStatus.Unresolved
|
||||
},
|
||||
{ count: true }
|
||||
);
|
||||
|
||||
return Number(finding?.count ?? 0);
|
||||
};
|
||||
|
||||
const listSecretScanningFindingsByProjectId = async (projectId: string, actor: OrgServiceActor) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningFindingActions.Read,
|
||||
ProjectPermissionSub.SecretScanningFindings
|
||||
);
|
||||
|
||||
const findings = await secretScanningV2DAL.findings.find({
|
||||
projectId
|
||||
});
|
||||
|
||||
return findings as TSecretScanningFinding[];
|
||||
};
|
||||
|
||||
const updateSecretScanningFindingById = async (
|
||||
{ findingId, remarks, status }: TUpdateSecretScanningFindingDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const finding = await secretScanningV2DAL.findings.findById(findingId);
|
||||
|
||||
if (!finding)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find Secret Scanning Finding with ID "${findingId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: finding.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningFindingActions.Update,
|
||||
ProjectPermissionSub.SecretScanningFindings
|
||||
);
|
||||
|
||||
const updatedFinding = await secretScanningV2DAL.findings.updateById(findingId, {
|
||||
remarks,
|
||||
status
|
||||
});
|
||||
|
||||
return { finding: updatedFinding as TSecretScanningFinding, projectId: finding.projectId };
|
||||
};
|
||||
|
||||
const findSecretScanningConfigByProjectId = async (projectId: string, actor: OrgServiceActor) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Configuration due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningConfigActions.Read,
|
||||
ProjectPermissionSub.SecretScanningConfigs
|
||||
);
|
||||
|
||||
const config = await secretScanningV2DAL.configs.findOne({
|
||||
projectId
|
||||
});
|
||||
|
||||
return (
|
||||
config ?? { content: null, projectId, updatedAt: null } // using default config
|
||||
);
|
||||
};
|
||||
|
||||
const upsertSecretScanningConfig = async (
|
||||
{ projectId, content }: TUpsertSecretScanningConfigDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Configuration due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningConfigActions.Update,
|
||||
ProjectPermissionSub.SecretScanningConfigs
|
||||
);
|
||||
|
||||
if (content) {
|
||||
const tempFolder = await createTempFolder();
|
||||
try {
|
||||
const configPath = join(tempFolder, "infisical-scan.toml");
|
||||
await writeTextToFile(configPath, content);
|
||||
|
||||
// just checking if config parses
|
||||
await scanContentAndGetFindings("", configPath);
|
||||
} catch (e) {
|
||||
throw new BadRequestError({
|
||||
message: "Unable to parse configuration: Check syntax and formatting."
|
||||
});
|
||||
} finally {
|
||||
await deleteTempFolder(tempFolder);
|
||||
}
|
||||
}
|
||||
|
||||
const [config] = await secretScanningV2DAL.configs.upsert(
|
||||
[
|
||||
{
|
||||
projectId,
|
||||
content
|
||||
}
|
||||
],
|
||||
"projectId"
|
||||
);
|
||||
|
||||
return config;
|
||||
};
|
||||
|
||||
return {
|
||||
listSecretScanningDataSourceOptions,
|
||||
listSecretScanningDataSourcesByProjectId,
|
||||
listSecretScanningDataSourcesWithDetailsByProjectId,
|
||||
findSecretScanningDataSourceById,
|
||||
findSecretScanningDataSourceByName,
|
||||
createSecretScanningDataSource,
|
||||
updateSecretScanningDataSource,
|
||||
deleteSecretScanningDataSource,
|
||||
triggerSecretScanningDataSourceScan,
|
||||
listSecretScanningResourcesByDataSourceId,
|
||||
listSecretScanningScansByDataSourceId,
|
||||
listSecretScanningResourcesWithDetailsByDataSourceId,
|
||||
listSecretScanningScansWithDetailsByDataSourceId,
|
||||
getSecretScanningUnresolvedFindingsCountByProjectId,
|
||||
listSecretScanningFindingsByProjectId,
|
||||
updateSecretScanningFindingById,
|
||||
findSecretScanningConfigByProjectId,
|
||||
upsertSecretScanningConfig,
|
||||
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue)
|
||||
};
|
||||
};
|
@@ -0,0 +1,189 @@
|
||||
import {
|
||||
TSecretScanningDataSources,
|
||||
TSecretScanningFindingsInsert,
|
||||
TSecretScanningResources,
|
||||
TSecretScanningScans
|
||||
} from "@app/db/schemas";
|
||||
import {
|
||||
TGitHubDataSource,
|
||||
TGitHubDataSourceInput,
|
||||
TGitHubDataSourceListItem,
|
||||
TGitHubDataSourceWithConnection,
|
||||
TGitHubFinding,
|
||||
TQueueGitHubResourceDiffScan
|
||||
} from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export type TSecretScanningDataSource = TGitHubDataSource;
|
||||
|
||||
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
|
||||
lastScannedAt?: Date | null;
|
||||
lastScanStatus?: SecretScanningScanStatus | null;
|
||||
lastScanStatusMessage?: string | null;
|
||||
unresolvedFindings: number;
|
||||
};
|
||||
|
||||
export type TSecretScanningResourceWithDetails = TSecretScanningResources & {
|
||||
lastScannedAt?: Date | null;
|
||||
lastScanStatus?: SecretScanningScanStatus | null;
|
||||
lastScanStatusMessage?: string | null;
|
||||
unresolvedFindings: number;
|
||||
};
|
||||
|
||||
export type TSecretScanningScanWithDetails = TSecretScanningScans & {
|
||||
unresolvedFindings: number;
|
||||
resolvedFindings: number;
|
||||
resourceName: string;
|
||||
};
|
||||
|
||||
export type TSecretScanningDataSourceWithConnection = TGitHubDataSourceWithConnection;
|
||||
|
||||
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput;
|
||||
|
||||
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem;
|
||||
|
||||
export type TSecretScanningFinding = TGitHubFinding;
|
||||
|
||||
export type TListSecretScanningDataSourcesByProjectId = {
|
||||
projectId: string;
|
||||
type?: SecretScanningDataSource;
|
||||
};
|
||||
|
||||
export type TFindSecretScanningDataSourceByIdDTO = {
|
||||
dataSourceId: string;
|
||||
type: SecretScanningDataSource;
|
||||
};
|
||||
|
||||
export type TFindSecretScanningDataSourceByNameDTO = {
|
||||
sourceName: string;
|
||||
projectId: string;
|
||||
type: SecretScanningDataSource;
|
||||
};
|
||||
|
||||
export type TCreateSecretScanningDataSourceDTO = Pick<
|
||||
TSecretScanningDataSource,
|
||||
"description" | "name" | "projectId"
|
||||
> & {
|
||||
connectionId?: string;
|
||||
type: SecretScanningDataSource;
|
||||
isAutoScanEnabled?: boolean;
|
||||
config: Partial<TSecretScanningDataSourceInput["config"]>;
|
||||
};
|
||||
|
||||
export type TUpdateSecretScanningDataSourceDTO = Partial<
|
||||
Omit<TCreateSecretScanningDataSourceDTO, "projectId" | "connectionId">
|
||||
> & {
|
||||
dataSourceId: string;
|
||||
type: SecretScanningDataSource;
|
||||
};
|
||||
|
||||
export type TDeleteSecretScanningDataSourceDTO = {
|
||||
type: SecretScanningDataSource;
|
||||
dataSourceId: string;
|
||||
};
|
||||
|
||||
export type TTriggerSecretScanningDataSourceDTO = {
|
||||
type: SecretScanningDataSource;
|
||||
dataSourceId: string;
|
||||
resourceId?: string;
|
||||
};
|
||||
|
||||
export type TQueueSecretScanningDataSourceFullScan = {
|
||||
dataSourceId: string;
|
||||
resourceId: string;
|
||||
scanId: string;
|
||||
};
|
||||
|
||||
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan;
|
||||
|
||||
export type TQueueSecretScanningSendNotification = {
|
||||
dataSource: TSecretScanningDataSources;
|
||||
resourceName: string;
|
||||
} & (
|
||||
| { status: SecretScanningScanStatus.Failed; errorMessage: string }
|
||||
| { status: SecretScanningScanStatus.Completed; numberOfSecrets: number; scanId: string; isDiffScan: boolean }
|
||||
);
|
||||
|
||||
export type TCloneRepository = {
|
||||
cloneUrl: string;
|
||||
repoPath: string;
|
||||
};
|
||||
|
||||
export type TSecretScanningFactoryListRawResources<T extends TSecretScanningDataSourceWithConnection> = (
|
||||
dataSource: T
|
||||
) => Promise<Pick<TSecretScanningResources, "externalId" | "name" | "type">[]>;
|
||||
|
||||
export type TSecretScanningFactoryGetDiffScanResourcePayload<
|
||||
P extends TQueueSecretScanningResourceDiffScan["payload"]
|
||||
> = (payload: P) => Pick<TSecretScanningResources, "externalId" | "name" | "type">;
|
||||
|
||||
export type TSecretScanningFactoryGetFullScanPath<T extends TSecretScanningDataSourceWithConnection> = (parameters: {
|
||||
dataSource: T;
|
||||
resourceName: string;
|
||||
tempFolder: string;
|
||||
}) => Promise<string>;
|
||||
|
||||
export type TSecretScanningFactoryGetDiffScanFindingsPayload<
|
||||
T extends TSecretScanningDataSourceWithConnection,
|
||||
P extends TQueueSecretScanningResourceDiffScan["payload"]
|
||||
> = (parameters: { dataSource: T; resourceName: string; payload: P; configPath?: string }) => Promise<TFindingsPayload>;
|
||||
|
||||
export type TSecretScanningDataSourceRaw = NonNullable<
|
||||
Awaited<ReturnType<TSecretScanningV2DALFactory["dataSources"]["findById"]>>
|
||||
>;
|
||||
|
||||
export type TSecretScanningFactoryInitialize<
|
||||
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
||||
C extends TSecretScanningDataSourceCredentials = undefined
|
||||
> = (
|
||||
params: {
|
||||
payload: TCreateSecretScanningDataSourceDTO;
|
||||
connection: T;
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
||||
},
|
||||
callback: (parameters: { credentials?: C; externalId?: string }) => Promise<TSecretScanningDataSourceRaw>
|
||||
) => Promise<TSecretScanningDataSourceRaw>;
|
||||
|
||||
export type TSecretScanningFactoryPostInitialization<
|
||||
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
||||
C extends TSecretScanningDataSourceCredentials = undefined
|
||||
> = (params: {
|
||||
payload: TCreateSecretScanningDataSourceDTO;
|
||||
connection: T;
|
||||
credentials: C;
|
||||
dataSourceId: string;
|
||||
}) => Promise<void>;
|
||||
|
||||
export type TSecretScanningFactory<
|
||||
T extends TSecretScanningDataSourceWithConnection,
|
||||
C extends TSecretScanningDataSourceCredentials,
|
||||
P extends TQueueSecretScanningResourceDiffScan["payload"]
|
||||
> = () => {
|
||||
listRawResources: TSecretScanningFactoryListRawResources<T>;
|
||||
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
|
||||
initialize: TSecretScanningFactoryInitialize<T["connection"] | undefined, C>;
|
||||
postInitialization: TSecretScanningFactoryPostInitialization<T["connection"] | undefined, C>;
|
||||
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
|
||||
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
|
||||
};
|
||||
|
||||
export type TFindingsPayload = Pick<TSecretScanningFindingsInsert, "details" | "fingerprint" | "severity" | "rule">[];
|
||||
export type TGetFindingsPayload = Promise<TFindingsPayload>;
|
||||
|
||||
export type TUpdateSecretScanningFindingDTO = {
|
||||
status?: SecretScanningFindingStatus;
|
||||
remarks?: string | null;
|
||||
findingId: string;
|
||||
};
|
||||
|
||||
export type TUpsertSecretScanningConfigDTO = {
|
||||
projectId: string;
|
||||
content: string | null;
|
||||
};
|
||||
|
||||
export type TSecretScanningDataSourceCredentials = undefined;
|
@@ -0,0 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
|
||||
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [GitHubDataSourceSchema]);
|
||||
|
||||
export const SecretScanningFindingSchema = z.discriminatedUnion("resourceType", [GitHubFindingSchema]);
|
@@ -65,9 +65,9 @@ export function runInfisicalScanOnRepo(repoPath: string, outputPath: string): Pr
|
||||
});
|
||||
}
|
||||
|
||||
export function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
|
||||
export function runInfisicalScan(inputPath: string, outputPath: string, configPath?: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
|
||||
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}" ${configPath ? `-c "${configPath}"` : ""}`;
|
||||
exec(command, (error) => {
|
||||
if (error && error.code !== 77) {
|
||||
reject(error);
|
||||
@@ -138,14 +138,14 @@ export async function scanFullRepoContentAndGetFindings(
|
||||
}
|
||||
}
|
||||
|
||||
export async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
|
||||
export async function scanContentAndGetFindings(textContent: string, configPath?: string): Promise<SecretMatch[]> {
|
||||
const tempFolder = await createTempFolder();
|
||||
const filePath = join(tempFolder, "content.txt");
|
||||
const findingsPath = join(tempFolder, "findings.json");
|
||||
|
||||
try {
|
||||
await writeTextToFile(filePath, textContent);
|
||||
await runInfisicalScan(filePath, findingsPath);
|
||||
await runInfisicalScan(filePath, findingsPath, configPath);
|
||||
const findingsData = await readFindingsFile(findingsPath);
|
||||
return JSON.parse(findingsData) as SecretMatch[];
|
||||
} finally {
|
||||
|
@@ -9,6 +9,7 @@ export type SecretMatch = {
|
||||
Match: string;
|
||||
Secret: string;
|
||||
File: string;
|
||||
Link: string;
|
||||
SymlinkFile: string;
|
||||
Commit: string;
|
||||
Entropy: number;
|
||||
|
@@ -117,6 +117,7 @@ export const OCIVaultSyncFns = {
|
||||
syncSecrets: async (secretSync: TOCIVaultSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
const {
|
||||
connection,
|
||||
environment,
|
||||
destinationConfig: { compartmentOcid, vaultOcid, keyOcid }
|
||||
} = secretSync;
|
||||
|
||||
@@ -213,7 +214,7 @@ export const OCIVaultSyncFns = {
|
||||
// Update and delete secrets
|
||||
for await (const [key, variable] of Object.entries(variables)) {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!matchesSchema(key, secretSync.syncOptions.keySchema)) continue;
|
||||
if (!matchesSchema(key, environment?.slug || "", secretSync.syncOptions.keySchema)) continue;
|
||||
|
||||
// Only update / delete active secrets
|
||||
if (variable.lifecycleState === vault.models.SecretSummary.LifecycleState.Active) {
|
||||
|
@@ -10,7 +10,8 @@ export const PgSqlLock = {
|
||||
KmsRootKeyInit: 2025,
|
||||
OrgGatewayRootCaInit: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-root-ca:${orgId}`),
|
||||
OrgGatewayCertExchange: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-cert-exchange:${orgId}`),
|
||||
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`)
|
||||
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`),
|
||||
CreateProject: (orgId: string) => pgAdvisoryLockHashText(`create-project:${orgId}`)
|
||||
} as const;
|
||||
|
||||
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
|
||||
@@ -36,6 +37,8 @@ export const KeyStorePrefixes = {
|
||||
`sync-integration-last-run-${projectId}-${environmentSlug}-${secretPath}` as const,
|
||||
SecretSyncLock: (syncId: string) => `secret-sync-mutex-${syncId}` as const,
|
||||
SecretRotationLock: (rotationId: string) => `secret-rotation-v2-mutex-${rotationId}` as const,
|
||||
SecretScanningLock: (dataSourceId: string, resourceExternalId: string) =>
|
||||
`secret-scanning-v2-mutex-${dataSourceId}-${resourceExternalId}` as const,
|
||||
CaOrderCertificateForSubscriberLock: (subscriberId: string) =>
|
||||
`ca-order-certificate-for-subscriber-lock-${subscriberId}` as const,
|
||||
SecretSyncLastRunTimestamp: (syncId: string) => `secret-sync-last-run-${syncId}` as const,
|
||||
|
@@ -3,6 +3,12 @@ import {
|
||||
SECRET_ROTATION_CONNECTION_MAP,
|
||||
SECRET_ROTATION_NAME_MAP
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-maps";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
AUTO_SYNC_DESCRIPTION_HELPER,
|
||||
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP,
|
||||
SECRET_SCANNING_DATA_SOURCE_NAME_MAP
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { APP_CONNECTION_NAME_MAP } from "@app/services/app-connection/app-connection-maps";
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
@@ -57,7 +63,8 @@ export enum ApiDocsTags {
|
||||
SshHostGroups = "SSH Host Groups",
|
||||
KmsKeys = "KMS Keys",
|
||||
KmsEncryption = "KMS Encryption",
|
||||
KmsSigning = "KMS Signing"
|
||||
KmsSigning = "KMS Signing",
|
||||
SecretScanning = "Secret Scanning"
|
||||
}
|
||||
|
||||
export const GROUPS = {
|
||||
@@ -82,6 +89,7 @@ export const GROUPS = {
|
||||
limit: "The number of users to return.",
|
||||
username: "The username to search for.",
|
||||
search: "The text string that user email or name will be filtered by.",
|
||||
projectId: "The ID of the project the group belongs to.",
|
||||
filterUsers:
|
||||
"Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization."
|
||||
},
|
||||
@@ -393,6 +401,8 @@ export const KUBERNETES_AUTH = {
|
||||
caCert: "The PEM-encoded CA cert for the Kubernetes API server.",
|
||||
tokenReviewerJwt:
|
||||
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
|
||||
tokenReviewMode:
|
||||
"The mode to use for token review. Must be one of: 'api', 'gateway'. If gateway is selected, the gateway must be deployed in Kubernetes, and the gateway must have the system:auth-delegator ClusterRole binding.",
|
||||
allowedNamespaces:
|
||||
"The comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
|
||||
allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.",
|
||||
@@ -410,6 +420,8 @@ export const KUBERNETES_AUTH = {
|
||||
caCert: "The new PEM-encoded CA cert for the Kubernetes API server.",
|
||||
tokenReviewerJwt:
|
||||
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
|
||||
tokenReviewMode:
|
||||
"The mode to use for token review. Must be one of: 'api', 'gateway'. If gateway is selected, the gateway must be deployed in Kubernetes, and the gateway must have the system:auth-delegator ClusterRole binding.",
|
||||
allowedNamespaces:
|
||||
"The new comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
|
||||
allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.",
|
||||
@@ -2432,3 +2444,81 @@ export const SecretRotations = {
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const SecretScanningDataSources = {
|
||||
LIST: (type?: SecretScanningDataSource) => ({
|
||||
projectId: `The ID of the project to list ${type ? SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type] : "Scanning"} Data Sources from.`
|
||||
}),
|
||||
GET_BY_ID: (type: SecretScanningDataSource) => ({
|
||||
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`
|
||||
}),
|
||||
GET_BY_NAME: (type: SecretScanningDataSource) => ({
|
||||
sourceName: `The name of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`,
|
||||
projectId: `The ID of the project the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source is located in.`
|
||||
}),
|
||||
CREATE: (type: SecretScanningDataSource) => {
|
||||
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
|
||||
const autoScanDescription = AUTO_SYNC_DESCRIPTION_HELPER[type];
|
||||
return {
|
||||
name: `The name of the ${sourceType} Data Source to create. Must be slug-friendly.`,
|
||||
description: `An optional description for the ${sourceType} Data Source.`,
|
||||
projectId: `The ID of the project to create the ${sourceType} Data Source in.`,
|
||||
connectionId: `The ID of the ${
|
||||
APP_CONNECTION_NAME_MAP[SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[type]]
|
||||
} Connection to use for this Data Source.`,
|
||||
isAutoScanEnabled: `Whether scans should be automatically performed when a ${autoScanDescription.verb} occurs to ${autoScanDescription.noun} associated with this Data Source.`,
|
||||
config: `The configuration parameters to use for this Data Source.`
|
||||
};
|
||||
},
|
||||
UPDATE: (type: SecretScanningDataSource) => {
|
||||
const typeName = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
|
||||
const autoScanDescription = AUTO_SYNC_DESCRIPTION_HELPER[type];
|
||||
|
||||
return {
|
||||
dataSourceId: `The ID of the ${typeName} Data Source to be updated.`,
|
||||
name: `The updated name of the ${typeName} Data Source. Must be slug-friendly.`,
|
||||
description: `The updated description of the ${typeName} Data Source.`,
|
||||
isAutoScanEnabled: `Whether scans should be automatically performed when a ${autoScanDescription.verb} occurs to ${autoScanDescription.noun} associated with this Data Source.`,
|
||||
config: `The updated configuration parameters to use for this Data Source.`
|
||||
};
|
||||
},
|
||||
DELETE: (type: SecretScanningDataSource) => ({
|
||||
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to be deleted.`
|
||||
}),
|
||||
SCAN: (type: SecretScanningDataSource) => ({
|
||||
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to trigger a scan for.`,
|
||||
resourceId: `The ID of the individual Data Source resource to trigger a scan for.`
|
||||
}),
|
||||
LIST_RESOURCES: (type: SecretScanningDataSource) => ({
|
||||
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to list resources from.`
|
||||
}),
|
||||
LIST_SCANS: (type: SecretScanningDataSource) => ({
|
||||
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to list scans for.`
|
||||
}),
|
||||
CONFIG: {
|
||||
GITHUB: {
|
||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const SecretScanningFindings = {
|
||||
LIST: {
|
||||
projectId: `The ID of the project to list Secret Scanning Findings from.`
|
||||
},
|
||||
UPDATE: {
|
||||
findingId: "The ID of the Secret Scanning Finding to update.",
|
||||
status: "The updated status of the specified Secret Scanning Finding.",
|
||||
remarks: "Remarks pertaining to the status of this finding."
|
||||
}
|
||||
};
|
||||
|
||||
export const SecretScanningConfigs = {
|
||||
GET_BY_PROJECT_ID: {
|
||||
projectId: `The ID of the project to retrieve the Secret Scanning Configuration for.`
|
||||
},
|
||||
UPDATE: {
|
||||
projectId: "The ID of the project to update the Secret Scanning Configuration for.",
|
||||
content: "The contents of the Secret Scanning Configuration file."
|
||||
}
|
||||
};
|
||||
|
@@ -1,5 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { QueueWorkerProfile } from "@app/lib/types";
|
||||
|
||||
import { removeTrailingSlash } from "../fn";
|
||||
import { CustomLogger } from "../logger/logger";
|
||||
import { zpStr } from "../zod";
|
||||
@@ -69,6 +71,7 @@ const envSchema = z
|
||||
ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
QUEUE_WORKERS_ENABLED: zodStrBool.default("true"),
|
||||
QUEUE_WORKER_PROFILE: z.nativeEnum(QueueWorkerProfile).default(QueueWorkerProfile.All),
|
||||
HTTPS_ENABLED: zodStrBool,
|
||||
ROTATION_DEVELOPMENT_MODE: zodStrBool.default("false").optional(),
|
||||
// smtp options
|
||||
@@ -230,6 +233,14 @@ const envSchema = z
|
||||
INF_APP_CONNECTION_GITHUB_APP_SLUG: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID: zpStr(z.string().optional()),
|
||||
|
||||
// github radar app
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// gcp app
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL: zpStr(z.string().optional()),
|
||||
|
||||
@@ -298,6 +309,13 @@ const envSchema = z
|
||||
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
|
||||
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
|
||||
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
|
||||
isSecretScanningV2Configured:
|
||||
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID) &&
|
||||
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY) &&
|
||||
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG) &&
|
||||
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID) &&
|
||||
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET) &&
|
||||
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET),
|
||||
isHsmConfigured:
|
||||
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
|
||||
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,
|
||||
|
@@ -32,3 +32,24 @@ export const shake = <RemovedKeys extends string, T = object>(
|
||||
return acc;
|
||||
}, {} as T);
|
||||
};
|
||||
|
||||
export const titleCaseToCamelCase = (obj: unknown): unknown => {
|
||||
if (typeof obj !== "object" || obj === null) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map((item: object) => titleCaseToCamelCase(item));
|
||||
}
|
||||
|
||||
const result: Record<string, unknown> = {};
|
||||
|
||||
for (const key in obj) {
|
||||
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
||||
const camelKey = key.charAt(0).toLowerCase() + key.slice(1);
|
||||
result[camelKey] = titleCaseToCamelCase((obj as Record<string, unknown>)[key]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
411
backend/src/lib/gateway/gateway.ts
Normal file
411
backend/src/lib/gateway/gateway.ts
Normal file
@@ -0,0 +1,411 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import crypto from "node:crypto";
|
||||
import net from "node:net";
|
||||
|
||||
import quicDefault, * as quicModule from "@infisical/quic";
|
||||
import axios from "axios";
|
||||
import https from "https";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
import { logger } from "../logger";
|
||||
import {
|
||||
GatewayProxyProtocol,
|
||||
IGatewayProxyOptions,
|
||||
IGatewayProxyServer,
|
||||
TGatewayTlsOptions,
|
||||
TPingGatewayAndVerifyDTO
|
||||
} from "./types";
|
||||
|
||||
const DEFAULT_MAX_RETRIES = 3;
|
||||
const DEFAULT_RETRY_DELAY = 1000; // 1 second
|
||||
|
||||
const quic = quicDefault || quicModule;
|
||||
|
||||
const parseSubjectDetails = (data: string) => {
|
||||
const values: Record<string, string> = {};
|
||||
data.split("\n").forEach((el) => {
|
||||
const [key, value] = el.split("=");
|
||||
values[key.trim()] = value.trim();
|
||||
});
|
||||
return values;
|
||||
};
|
||||
|
||||
const createQuicConnection = async (
|
||||
relayHost: string,
|
||||
relayPort: number,
|
||||
tlsOptions: TGatewayTlsOptions,
|
||||
identityId: string,
|
||||
orgId: string
|
||||
) => {
|
||||
const client = await quic.QUICClient.createQUICClient({
|
||||
host: relayHost,
|
||||
port: relayPort,
|
||||
config: {
|
||||
ca: tlsOptions.ca,
|
||||
cert: tlsOptions.cert,
|
||||
key: tlsOptions.key,
|
||||
applicationProtos: ["infisical-gateway"],
|
||||
verifyPeer: true,
|
||||
verifyCallback: async (certs) => {
|
||||
if (!certs || certs.length === 0) return quic.native.CryptoError.CertificateRequired;
|
||||
const serverCertificate = new crypto.X509Certificate(Buffer.from(certs[0]));
|
||||
const caCertificate = new crypto.X509Certificate(tlsOptions.ca);
|
||||
const isValidServerCertificate = serverCertificate.verify(caCertificate.publicKey);
|
||||
if (!isValidServerCertificate) return quic.native.CryptoError.BadCertificate;
|
||||
|
||||
const subjectDetails = parseSubjectDetails(serverCertificate.subject);
|
||||
if (subjectDetails.OU !== "Gateway" || subjectDetails.CN !== identityId || subjectDetails.O !== orgId) {
|
||||
return quic.native.CryptoError.CertificateUnknown;
|
||||
}
|
||||
|
||||
if (new Date() > new Date(serverCertificate.validTo) || new Date() < new Date(serverCertificate.validFrom)) {
|
||||
return quic.native.CryptoError.CertificateExpired;
|
||||
}
|
||||
|
||||
const formatedRelayHost =
|
||||
process.env.NODE_ENV === "development" ? relayHost.replace("host.docker.internal", "127.0.0.1") : relayHost;
|
||||
if (!serverCertificate.checkIP(formatedRelayHost)) return quic.native.CryptoError.BadCertificate;
|
||||
},
|
||||
maxIdleTimeout: 90000,
|
||||
keepAliveIntervalTime: 30000
|
||||
},
|
||||
crypto: {
|
||||
ops: {
|
||||
randomBytes: async (data) => {
|
||||
crypto.getRandomValues(new Uint8Array(data));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
||||
export const pingGatewayAndVerify = async ({
|
||||
relayHost,
|
||||
relayPort,
|
||||
tlsOptions,
|
||||
maxRetries = DEFAULT_MAX_RETRIES,
|
||||
identityId,
|
||||
orgId
|
||||
}: TPingGatewayAndVerifyDTO) => {
|
||||
let lastError: Error | null = null;
|
||||
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
|
||||
throw new BadRequestError({
|
||||
message: (err as Error)?.message,
|
||||
error: err as Error
|
||||
});
|
||||
});
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
|
||||
try {
|
||||
const stream = quicClient.connection.newStream("bidi");
|
||||
const pingWriter = stream.writable.getWriter();
|
||||
await pingWriter.write(Buffer.from("PING\n"));
|
||||
pingWriter.releaseLock();
|
||||
|
||||
// Read PONG response
|
||||
const reader = stream.readable.getReader();
|
||||
const { value, done } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
throw new Error("Gateway closed before receiving PONG");
|
||||
}
|
||||
|
||||
const response = Buffer.from(value).toString();
|
||||
|
||||
if (response !== "PONG\n" && response !== "PONG") {
|
||||
throw new Error(`Failed to Ping. Unexpected response: ${response}`);
|
||||
}
|
||||
|
||||
reader.releaseLock();
|
||||
return;
|
||||
} catch (err) {
|
||||
lastError = err as Error;
|
||||
|
||||
if (attempt < maxRetries) {
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, DEFAULT_RETRY_DELAY);
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
await quicClient.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(lastError);
|
||||
throw new BadRequestError({
|
||||
message: `Failed to ping gateway after ${maxRetries} attempts. Last error: ${lastError?.message}`
|
||||
});
|
||||
};
|
||||
|
||||
const setupProxyServer = async ({
|
||||
targetPort,
|
||||
targetHost,
|
||||
tlsOptions,
|
||||
relayHost,
|
||||
relayPort,
|
||||
identityId,
|
||||
orgId,
|
||||
protocol = GatewayProxyProtocol.Tcp,
|
||||
httpsAgent
|
||||
}: {
|
||||
targetHost: string;
|
||||
targetPort: number;
|
||||
relayPort: number;
|
||||
relayHost: string;
|
||||
tlsOptions: TGatewayTlsOptions;
|
||||
identityId: string;
|
||||
orgId: string;
|
||||
protocol?: GatewayProxyProtocol;
|
||||
httpsAgent?: https.Agent;
|
||||
}): Promise<IGatewayProxyServer> => {
|
||||
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
|
||||
throw new BadRequestError({
|
||||
error: err as Error
|
||||
});
|
||||
});
|
||||
const proxyErrorMsg = [""];
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
|
||||
let streamClosed = false;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
server.on("connection", async (clientConn) => {
|
||||
try {
|
||||
clientConn.setKeepAlive(true, 30000); // 30 seconds
|
||||
clientConn.setNoDelay(true);
|
||||
|
||||
const stream = quicClient.connection.newStream("bidi");
|
||||
|
||||
const forwardWriter = stream.writable.getWriter();
|
||||
let command: string;
|
||||
|
||||
if (protocol === GatewayProxyProtocol.Http) {
|
||||
const targetUrl = `${targetHost}:${targetPort}`; // note(daniel): targetHost MUST include the scheme (https|http)
|
||||
command = `FORWARD-HTTP ${targetUrl}`;
|
||||
logger.debug(`Using HTTP proxy mode: ${command.trim()}`);
|
||||
|
||||
// extract ca certificate from httpsAgent if present
|
||||
if (httpsAgent && targetHost.startsWith("https://")) {
|
||||
const agentOptions = httpsAgent.options;
|
||||
if (agentOptions && agentOptions.ca) {
|
||||
const caCert = Array.isArray(agentOptions.ca) ? agentOptions.ca.join("\n") : agentOptions.ca;
|
||||
const caB64 = Buffer.from(caCert as string).toString("base64");
|
||||
command += ` ca=${caB64}`;
|
||||
|
||||
const rejectUnauthorized = agentOptions.rejectUnauthorized !== false;
|
||||
command += ` verify=${rejectUnauthorized}`;
|
||||
|
||||
logger.debug(`Using HTTP proxy mode [command=${command.trim()}]`);
|
||||
}
|
||||
}
|
||||
|
||||
command += "\n";
|
||||
} else if (protocol === GatewayProxyProtocol.Tcp) {
|
||||
// For TCP mode, send FORWARD-TCP with host:port
|
||||
command = `FORWARD-TCP ${targetHost}:${targetPort}\n`;
|
||||
logger.debug(`Using TCP proxy mode: ${command.trim()}`);
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Invalid protocol: ${protocol as string}`
|
||||
});
|
||||
}
|
||||
|
||||
await forwardWriter.write(Buffer.from(command));
|
||||
forwardWriter.releaseLock();
|
||||
|
||||
// Set up bidirectional copy
|
||||
const setupCopy = () => {
|
||||
// Client to QUIC
|
||||
// eslint-disable-next-line
|
||||
(async () => {
|
||||
const writer = stream.writable.getWriter();
|
||||
|
||||
// Create a handler for client data
|
||||
clientConn.on("data", (chunk) => {
|
||||
writer.write(chunk).catch((err) => {
|
||||
proxyErrorMsg.push((err as Error)?.message);
|
||||
});
|
||||
});
|
||||
|
||||
// Handle client connection close
|
||||
clientConn.on("end", () => {
|
||||
if (!streamClosed) {
|
||||
try {
|
||||
writer.close().catch((err) => {
|
||||
logger.debug(err, "Error closing writer (already closed)");
|
||||
});
|
||||
} catch (error) {
|
||||
logger.debug(error, "Error in writer close");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
clientConn.on("error", (clientConnErr) => {
|
||||
writer.abort(clientConnErr?.message).catch((err) => {
|
||||
proxyErrorMsg.push((err as Error)?.message);
|
||||
});
|
||||
});
|
||||
})();
|
||||
|
||||
// QUIC to Client
|
||||
void (async () => {
|
||||
try {
|
||||
const reader = stream.readable.getReader();
|
||||
|
||||
let reading = true;
|
||||
while (reading) {
|
||||
const { value, done } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
reading = false;
|
||||
clientConn.end(); // Close client connection when QUIC stream ends
|
||||
break;
|
||||
}
|
||||
|
||||
// Write data to TCP client
|
||||
const canContinue = clientConn.write(Buffer.from(value));
|
||||
|
||||
// Handle backpressure
|
||||
if (!canContinue) {
|
||||
await new Promise((res) => {
|
||||
clientConn.once("drain", res);
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
proxyErrorMsg.push((err as Error)?.message);
|
||||
clientConn.destroy();
|
||||
}
|
||||
})();
|
||||
};
|
||||
|
||||
setupCopy();
|
||||
// Handle connection closure
|
||||
clientConn.on("close", () => {
|
||||
if (!streamClosed) {
|
||||
streamClosed = true;
|
||||
stream.destroy().catch((err) => {
|
||||
logger.debug(err, "Stream already destroyed during close event");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const cleanup = async () => {
|
||||
try {
|
||||
clientConn?.destroy();
|
||||
} catch (err) {
|
||||
logger.debug(err, "Error destroying client connection");
|
||||
}
|
||||
|
||||
if (!streamClosed) {
|
||||
streamClosed = true;
|
||||
try {
|
||||
await stream.destroy();
|
||||
} catch (err) {
|
||||
logger.debug(err, "Error destroying stream (might be already closed)");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
clientConn.on("error", (clientConnErr) => {
|
||||
logger.error(clientConnErr, "Client socket error");
|
||||
cleanup().catch((err) => {
|
||||
logger.error(err, "Client conn cleanup");
|
||||
});
|
||||
});
|
||||
|
||||
clientConn.on("end", () => {
|
||||
cleanup().catch((err) => {
|
||||
logger.error(err, "Client conn end");
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to establish target connection:");
|
||||
clientConn.end();
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
server.on("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
server.on("close", () => {
|
||||
quicClient?.destroy().catch((err) => {
|
||||
logger.error(err, "Failed to destroy quic client");
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(0, () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
server.close();
|
||||
reject(new Error("Failed to get server port"));
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`Gateway proxy started on port ${address.port} (${protocol} mode)`);
|
||||
resolve({
|
||||
server,
|
||||
port: address.port,
|
||||
cleanup: async () => {
|
||||
try {
|
||||
server.close();
|
||||
} catch (err) {
|
||||
logger.debug(err, "Error closing server");
|
||||
}
|
||||
|
||||
try {
|
||||
await quicClient?.destroy();
|
||||
} catch (err) {
|
||||
logger.debug(err, "Error destroying QUIC client");
|
||||
}
|
||||
},
|
||||
getProxyError: () => proxyErrorMsg.join(",")
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export const withGatewayProxy = async <T>(
|
||||
callback: (port: number, httpsAgent?: https.Agent) => Promise<T>,
|
||||
options: IGatewayProxyOptions
|
||||
): Promise<T> => {
|
||||
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId, protocol, httpsAgent } = options;
|
||||
|
||||
// Setup the proxy server
|
||||
const { port, cleanup, getProxyError } = await setupProxyServer({
|
||||
targetHost,
|
||||
targetPort,
|
||||
relayPort,
|
||||
relayHost,
|
||||
tlsOptions,
|
||||
identityId,
|
||||
orgId,
|
||||
protocol,
|
||||
httpsAgent
|
||||
});
|
||||
|
||||
try {
|
||||
// Execute the callback with the allocated port
|
||||
return await callback(port, httpsAgent);
|
||||
} catch (err) {
|
||||
const proxyErrorMessage = getProxyError();
|
||||
if (proxyErrorMessage) {
|
||||
logger.error(new Error(proxyErrorMessage), "Failed to proxy");
|
||||
}
|
||||
logger.error(err, "Failed to do gateway");
|
||||
let errorMessage = proxyErrorMessage || (err as Error)?.message;
|
||||
if (axios.isAxiosError(err) && (err.response?.data as { message?: string })?.message) {
|
||||
errorMessage = (err.response?.data as { message: string }).message;
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: errorMessage });
|
||||
} finally {
|
||||
// Ensure cleanup happens regardless of success or failure
|
||||
await cleanup();
|
||||
}
|
||||
};
|
@@ -1,392 +1,2 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import crypto from "node:crypto";
|
||||
import net from "node:net";
|
||||
|
||||
import quicDefault, * as quicModule from "@infisical/quic";
|
||||
import axios from "axios";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
import { logger } from "../logger";
|
||||
|
||||
const DEFAULT_MAX_RETRIES = 3;
|
||||
const DEFAULT_RETRY_DELAY = 1000; // 1 second
|
||||
|
||||
const quic = quicDefault || quicModule;
|
||||
|
||||
const parseSubjectDetails = (data: string) => {
|
||||
const values: Record<string, string> = {};
|
||||
data.split("\n").forEach((el) => {
|
||||
const [key, value] = el.split("=");
|
||||
values[key.trim()] = value.trim();
|
||||
});
|
||||
return values;
|
||||
};
|
||||
|
||||
type TTlsOption = { ca: string; cert: string; key: string };
|
||||
|
||||
const createQuicConnection = async (
|
||||
relayHost: string,
|
||||
relayPort: number,
|
||||
tlsOptions: TTlsOption,
|
||||
identityId: string,
|
||||
orgId: string
|
||||
) => {
|
||||
const client = await quic.QUICClient.createQUICClient({
|
||||
host: relayHost,
|
||||
port: relayPort,
|
||||
config: {
|
||||
ca: tlsOptions.ca,
|
||||
cert: tlsOptions.cert,
|
||||
key: tlsOptions.key,
|
||||
applicationProtos: ["infisical-gateway"],
|
||||
verifyPeer: true,
|
||||
verifyCallback: async (certs) => {
|
||||
if (!certs || certs.length === 0) return quic.native.CryptoError.CertificateRequired;
|
||||
const serverCertificate = new crypto.X509Certificate(Buffer.from(certs[0]));
|
||||
const caCertificate = new crypto.X509Certificate(tlsOptions.ca);
|
||||
const isValidServerCertificate = serverCertificate.verify(caCertificate.publicKey);
|
||||
if (!isValidServerCertificate) return quic.native.CryptoError.BadCertificate;
|
||||
|
||||
const subjectDetails = parseSubjectDetails(serverCertificate.subject);
|
||||
if (subjectDetails.OU !== "Gateway" || subjectDetails.CN !== identityId || subjectDetails.O !== orgId) {
|
||||
return quic.native.CryptoError.CertificateUnknown;
|
||||
}
|
||||
|
||||
if (new Date() > new Date(serverCertificate.validTo) || new Date() < new Date(serverCertificate.validFrom)) {
|
||||
return quic.native.CryptoError.CertificateExpired;
|
||||
}
|
||||
|
||||
const formatedRelayHost =
|
||||
process.env.NODE_ENV === "development" ? relayHost.replace("host.docker.internal", "127.0.0.1") : relayHost;
|
||||
if (!serverCertificate.checkIP(formatedRelayHost)) return quic.native.CryptoError.BadCertificate;
|
||||
},
|
||||
maxIdleTimeout: 90000,
|
||||
keepAliveIntervalTime: 30000
|
||||
},
|
||||
crypto: {
|
||||
ops: {
|
||||
randomBytes: async (data) => {
|
||||
crypto.getRandomValues(new Uint8Array(data));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
||||
type TPingGatewayAndVerifyDTO = {
|
||||
relayHost: string;
|
||||
relayPort: number;
|
||||
tlsOptions: TTlsOption;
|
||||
maxRetries?: number;
|
||||
identityId: string;
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export const pingGatewayAndVerify = async ({
|
||||
relayHost,
|
||||
relayPort,
|
||||
tlsOptions,
|
||||
maxRetries = DEFAULT_MAX_RETRIES,
|
||||
identityId,
|
||||
orgId
|
||||
}: TPingGatewayAndVerifyDTO) => {
|
||||
let lastError: Error | null = null;
|
||||
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
|
||||
throw new BadRequestError({
|
||||
message: (err as Error)?.message,
|
||||
error: err as Error
|
||||
});
|
||||
});
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
|
||||
try {
|
||||
const stream = quicClient.connection.newStream("bidi");
|
||||
const pingWriter = stream.writable.getWriter();
|
||||
await pingWriter.write(Buffer.from("PING\n"));
|
||||
pingWriter.releaseLock();
|
||||
|
||||
// Read PONG response
|
||||
const reader = stream.readable.getReader();
|
||||
const { value, done } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
throw new Error("Gateway closed before receiving PONG");
|
||||
}
|
||||
|
||||
const response = Buffer.from(value).toString();
|
||||
|
||||
if (response !== "PONG\n" && response !== "PONG") {
|
||||
throw new Error(`Failed to Ping. Unexpected response: ${response}`);
|
||||
}
|
||||
|
||||
reader.releaseLock();
|
||||
return;
|
||||
} catch (err) {
|
||||
lastError = err as Error;
|
||||
|
||||
if (attempt < maxRetries) {
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, DEFAULT_RETRY_DELAY);
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
await quicClient.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(lastError);
|
||||
throw new BadRequestError({
|
||||
message: `Failed to ping gateway after ${maxRetries} attempts. Last error: ${lastError?.message}`
|
||||
});
|
||||
};
|
||||
|
||||
interface TProxyServer {
|
||||
server: net.Server;
|
||||
port: number;
|
||||
cleanup: () => Promise<void>;
|
||||
getProxyError: () => string;
|
||||
}
|
||||
|
||||
const setupProxyServer = async ({
|
||||
targetPort,
|
||||
targetHost,
|
||||
tlsOptions,
|
||||
relayHost,
|
||||
relayPort,
|
||||
identityId,
|
||||
orgId
|
||||
}: {
|
||||
targetHost: string;
|
||||
targetPort: number;
|
||||
relayPort: number;
|
||||
relayHost: string;
|
||||
tlsOptions: TTlsOption;
|
||||
identityId: string;
|
||||
orgId: string;
|
||||
}): Promise<TProxyServer> => {
|
||||
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
|
||||
throw new BadRequestError({
|
||||
error: err as Error
|
||||
});
|
||||
});
|
||||
const proxyErrorMsg = [""];
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
|
||||
let streamClosed = false;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
server.on("connection", async (clientConn) => {
|
||||
try {
|
||||
clientConn.setKeepAlive(true, 30000); // 30 seconds
|
||||
clientConn.setNoDelay(true);
|
||||
|
||||
const stream = quicClient.connection.newStream("bidi");
|
||||
// Send FORWARD-TCP command
|
||||
const forwardWriter = stream.writable.getWriter();
|
||||
await forwardWriter.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`));
|
||||
forwardWriter.releaseLock();
|
||||
|
||||
// Set up bidirectional copy
|
||||
const setupCopy = () => {
|
||||
// Client to QUIC
|
||||
// eslint-disable-next-line
|
||||
(async () => {
|
||||
const writer = stream.writable.getWriter();
|
||||
|
||||
// Create a handler for client data
|
||||
clientConn.on("data", (chunk) => {
|
||||
writer.write(chunk).catch((err) => {
|
||||
proxyErrorMsg.push((err as Error)?.message);
|
||||
});
|
||||
});
|
||||
|
||||
// Handle client connection close
|
||||
clientConn.on("end", () => {
|
||||
if (!streamClosed) {
|
||||
try {
|
||||
writer.close().catch((err) => {
|
||||
logger.debug(err, "Error closing writer (already closed)");
|
||||
});
|
||||
} catch (error) {
|
||||
logger.debug(error, "Error in writer close");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
clientConn.on("error", (clientConnErr) => {
|
||||
writer.abort(clientConnErr?.message).catch((err) => {
|
||||
proxyErrorMsg.push((err as Error)?.message);
|
||||
});
|
||||
});
|
||||
})();
|
||||
|
||||
// QUIC to Client
|
||||
void (async () => {
|
||||
try {
|
||||
const reader = stream.readable.getReader();
|
||||
|
||||
let reading = true;
|
||||
while (reading) {
|
||||
const { value, done } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
reading = false;
|
||||
clientConn.end(); // Close client connection when QUIC stream ends
|
||||
break;
|
||||
}
|
||||
|
||||
// Write data to TCP client
|
||||
const canContinue = clientConn.write(Buffer.from(value));
|
||||
|
||||
// Handle backpressure
|
||||
if (!canContinue) {
|
||||
await new Promise((res) => {
|
||||
clientConn.once("drain", res);
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
proxyErrorMsg.push((err as Error)?.message);
|
||||
clientConn.destroy();
|
||||
}
|
||||
})();
|
||||
};
|
||||
|
||||
setupCopy();
|
||||
// Handle connection closure
|
||||
clientConn.on("close", () => {
|
||||
if (!streamClosed) {
|
||||
streamClosed = true;
|
||||
stream.destroy().catch((err) => {
|
||||
logger.debug(err, "Stream already destroyed during close event");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const cleanup = async () => {
|
||||
try {
|
||||
clientConn?.destroy();
|
||||
} catch (err) {
|
||||
logger.debug(err, "Error destroying client connection");
|
||||
}
|
||||
|
||||
if (!streamClosed) {
|
||||
streamClosed = true;
|
||||
try {
|
||||
await stream.destroy();
|
||||
} catch (err) {
|
||||
logger.debug(err, "Error destroying stream (might be already closed)");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
clientConn.on("error", (clientConnErr) => {
|
||||
logger.error(clientConnErr, "Client socket error");
|
||||
cleanup().catch((err) => {
|
||||
logger.error(err, "Client conn cleanup");
|
||||
});
|
||||
});
|
||||
|
||||
clientConn.on("end", () => {
|
||||
cleanup().catch((err) => {
|
||||
logger.error(err, "Client conn end");
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to establish target connection:");
|
||||
clientConn.end();
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
server.on("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
server.on("close", () => {
|
||||
quicClient?.destroy().catch((err) => {
|
||||
logger.error(err, "Failed to destroy quic client");
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(0, () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
server.close();
|
||||
reject(new Error("Failed to get server port"));
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info("Gateway proxy started");
|
||||
resolve({
|
||||
server,
|
||||
port: address.port,
|
||||
cleanup: async () => {
|
||||
try {
|
||||
server.close();
|
||||
} catch (err) {
|
||||
logger.debug(err, "Error closing server");
|
||||
}
|
||||
|
||||
try {
|
||||
await quicClient?.destroy();
|
||||
} catch (err) {
|
||||
logger.debug(err, "Error destroying QUIC client");
|
||||
}
|
||||
},
|
||||
getProxyError: () => proxyErrorMsg.join(",")
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
interface ProxyOptions {
|
||||
targetHost: string;
|
||||
targetPort: number;
|
||||
relayHost: string;
|
||||
relayPort: number;
|
||||
tlsOptions: TTlsOption;
|
||||
identityId: string;
|
||||
orgId: string;
|
||||
}
|
||||
|
||||
export const withGatewayProxy = async <T>(
|
||||
callback: (port: number) => Promise<T>,
|
||||
options: ProxyOptions
|
||||
): Promise<T> => {
|
||||
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId } = options;
|
||||
|
||||
// Setup the proxy server
|
||||
const { port, cleanup, getProxyError } = await setupProxyServer({
|
||||
targetHost,
|
||||
targetPort,
|
||||
relayPort,
|
||||
relayHost,
|
||||
tlsOptions,
|
||||
identityId,
|
||||
orgId
|
||||
});
|
||||
|
||||
try {
|
||||
// Execute the callback with the allocated port
|
||||
return await callback(port);
|
||||
} catch (err) {
|
||||
const proxyErrorMessage = getProxyError();
|
||||
if (proxyErrorMessage) {
|
||||
logger.error(new Error(proxyErrorMessage), "Failed to proxy");
|
||||
}
|
||||
logger.error(err, "Failed to do gateway");
|
||||
let errorMessage = proxyErrorMessage || (err as Error)?.message;
|
||||
if (axios.isAxiosError(err) && (err.response?.data as { message?: string })?.message) {
|
||||
errorMessage = (err.response?.data as { message: string }).message;
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: errorMessage });
|
||||
} finally {
|
||||
// Ensure cleanup happens regardless of success or failure
|
||||
await cleanup();
|
||||
}
|
||||
};
|
||||
export { pingGatewayAndVerify, withGatewayProxy } from "./gateway";
|
||||
export { GatewayHttpProxyActions, GatewayProxyProtocol } from "./types";
|
||||
|
42
backend/src/lib/gateway/types.ts
Normal file
42
backend/src/lib/gateway/types.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import net from "node:net";
|
||||
|
||||
import https from "https";
|
||||
|
||||
export type TGatewayTlsOptions = { ca: string; cert: string; key: string };
|
||||
|
||||
export enum GatewayProxyProtocol {
|
||||
Http = "http",
|
||||
Tcp = "tcp"
|
||||
}
|
||||
|
||||
export enum GatewayHttpProxyActions {
|
||||
InjectGatewayK8sServiceAccountToken = "inject-k8s-sa-auth-token"
|
||||
}
|
||||
|
||||
export interface IGatewayProxyOptions {
|
||||
targetHost: string;
|
||||
targetPort: number;
|
||||
relayHost: string;
|
||||
relayPort: number;
|
||||
tlsOptions: TGatewayTlsOptions;
|
||||
identityId: string;
|
||||
orgId: string;
|
||||
protocol: GatewayProxyProtocol;
|
||||
httpsAgent?: https.Agent;
|
||||
}
|
||||
|
||||
export type TPingGatewayAndVerifyDTO = {
|
||||
relayHost: string;
|
||||
relayPort: number;
|
||||
tlsOptions: TGatewayTlsOptions;
|
||||
maxRetries?: number;
|
||||
identityId: string;
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export interface IGatewayProxyServer {
|
||||
server: net.Server;
|
||||
port: number;
|
||||
cleanup: () => Promise<void>;
|
||||
getProxyError: () => string;
|
||||
}
|
@@ -179,13 +179,18 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
|
||||
throw new DatabaseError({ error, name: "batchInsert" });
|
||||
}
|
||||
},
|
||||
upsert: async (data: readonly Tables[Tname]["insert"][], onConflictField: keyof Tables[Tname]["base"], tx?: Knex) => {
|
||||
upsert: async (
|
||||
data: readonly Tables[Tname]["insert"][],
|
||||
onConflictField: keyof Tables[Tname]["base"] | Array<keyof Tables[Tname]["base"]>,
|
||||
tx?: Knex,
|
||||
mergeColumns?: (keyof Knex.ResolveTableType<Knex.TableType<Tname>, "update">)[] | undefined
|
||||
) => {
|
||||
try {
|
||||
if (!data.length) return [];
|
||||
const res = await (tx || db)(tableName)
|
||||
.insert(data as never)
|
||||
.onConflict(onConflictField as never)
|
||||
.merge()
|
||||
.merge(mergeColumns)
|
||||
.returning("*");
|
||||
return res;
|
||||
} catch (error) {
|
||||
|
@@ -9,3 +9,5 @@ export const DistinguishedNameRegex =
|
||||
export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]{2,}$/);
|
||||
|
||||
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
|
||||
|
||||
export const GitHubRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
|
||||
|
@@ -78,3 +78,9 @@ export type OrgServiceActor = {
|
||||
authMethod: ActorAuthMethod;
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export enum QueueWorkerProfile {
|
||||
All = "all",
|
||||
Standard = "standard",
|
||||
SecretScanning = "secret-scanning"
|
||||
}
|
||||
|
@@ -11,9 +11,15 @@ import {
|
||||
TScanFullRepoEventPayload,
|
||||
TScanPushEventPayload
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import {
|
||||
TQueueSecretScanningDataSourceFullScan,
|
||||
TQueueSecretScanningResourceDiffScan,
|
||||
TQueueSecretScanningSendNotification
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { buildRedisFromConfig, TRedisConfigKeys } from "@app/lib/config/redis";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueWorkerProfile } from "@app/lib/types";
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
import {
|
||||
TFailedIntegrationSyncEmailsPayload,
|
||||
@@ -54,7 +60,8 @@ export enum QueueName {
|
||||
ImportSecretsFromExternalSource = "import-secrets-from-external-source",
|
||||
AppConnectionSecretSync = "app-connection-secret-sync",
|
||||
SecretRotationV2 = "secret-rotation-v2",
|
||||
InvalidateCache = "invalidate-cache"
|
||||
InvalidateCache = "invalidate-cache",
|
||||
SecretScanningV2 = "secret-scanning-v2"
|
||||
}
|
||||
|
||||
export enum QueueJobs {
|
||||
@@ -88,6 +95,9 @@ export enum QueueJobs {
|
||||
SecretRotationV2RotateSecrets = "secret-rotation-v2-rotate-secrets",
|
||||
SecretRotationV2SendNotification = "secret-rotation-v2-send-notification",
|
||||
InvalidateCache = "invalidate-cache",
|
||||
SecretScanningV2FullScan = "secret-scanning-v2-full-scan",
|
||||
SecretScanningV2DiffScan = "secret-scanning-v2-diff-scan",
|
||||
SecretScanningV2SendNotification = "secret-scanning-v2-notification",
|
||||
CaOrderCertificateForSubscriber = "ca-order-certificate-for-subscriber",
|
||||
PkiSubscriberDailyAutoRenewal = "pki-subscriber-daily-auto-renewal"
|
||||
}
|
||||
@@ -250,6 +260,19 @@ export type TQueueJobTypes = {
|
||||
};
|
||||
};
|
||||
};
|
||||
[QueueName.SecretScanningV2]:
|
||||
| {
|
||||
name: QueueJobs.SecretScanningV2FullScan;
|
||||
payload: TQueueSecretScanningDataSourceFullScan;
|
||||
}
|
||||
| {
|
||||
name: QueueJobs.SecretScanningV2DiffScan;
|
||||
payload: TQueueSecretScanningResourceDiffScan;
|
||||
}
|
||||
| {
|
||||
name: QueueJobs.SecretScanningV2SendNotification;
|
||||
payload: TQueueSecretScanningSendNotification;
|
||||
};
|
||||
[QueueName.CaLifecycle]: {
|
||||
name: QueueJobs.CaOrderCertificateForSubscriber;
|
||||
payload: {
|
||||
@@ -263,6 +286,37 @@ export type TQueueJobTypes = {
|
||||
};
|
||||
};
|
||||
|
||||
const SECRET_SCANNING_JOBS = [
|
||||
QueueJobs.SecretScanningV2FullScan,
|
||||
QueueJobs.SecretScanningV2DiffScan,
|
||||
QueueJobs.SecretScanningV2SendNotification,
|
||||
QueueJobs.SecretScan
|
||||
];
|
||||
|
||||
const NON_STANDARD_JOBS = [...SECRET_SCANNING_JOBS];
|
||||
|
||||
const SECRET_SCANNING_QUEUES = [
|
||||
QueueName.SecretScanningV2,
|
||||
QueueName.SecretFullRepoScan,
|
||||
QueueName.SecretPushEventScan
|
||||
];
|
||||
|
||||
const NON_STANDARD_QUEUES = [...SECRET_SCANNING_QUEUES];
|
||||
|
||||
const isQueueEnabled = (name: QueueName) => {
|
||||
const appCfg = getConfig();
|
||||
switch (appCfg.QUEUE_WORKER_PROFILE) {
|
||||
case QueueWorkerProfile.Standard:
|
||||
return !NON_STANDARD_QUEUES.includes(name);
|
||||
case QueueWorkerProfile.SecretScanning:
|
||||
return SECRET_SCANNING_QUEUES.includes(name);
|
||||
case QueueWorkerProfile.All:
|
||||
default:
|
||||
// allow all
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
|
||||
export const queueServiceFactory = (
|
||||
redisCfg: TRedisConfigKeys,
|
||||
@@ -319,7 +373,7 @@ export const queueServiceFactory = (
|
||||
});
|
||||
|
||||
const appCfg = getConfig();
|
||||
if (appCfg.QUEUE_WORKERS_ENABLED) {
|
||||
if (appCfg.QUEUE_WORKERS_ENABLED && isQueueEnabled(name)) {
|
||||
workerContainer[name] = new Worker<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>(name, jobFn, {
|
||||
...queueSettings,
|
||||
connection
|
||||
@@ -338,6 +392,30 @@ export const queueServiceFactory = (
|
||||
throw new Error(`${jobName} queue is already initialized`);
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (!appCfg.QUEUE_WORKERS_ENABLED) return;
|
||||
|
||||
switch (appCfg.QUEUE_WORKER_PROFILE) {
|
||||
case QueueWorkerProfile.Standard:
|
||||
if (NON_STANDARD_JOBS.includes(jobName)) {
|
||||
// only process standard jobs
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
case QueueWorkerProfile.SecretScanning:
|
||||
if (!SECRET_SCANNING_JOBS.includes(jobName)) {
|
||||
// only process secret scanning jobs
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
case QueueWorkerProfile.All:
|
||||
default:
|
||||
// allow all
|
||||
}
|
||||
|
||||
await pgBoss.createQueue(jobName);
|
||||
queueContainerPg[jobName] = true;
|
||||
|
||||
@@ -357,7 +435,7 @@ export const queueServiceFactory = (
|
||||
listener: WorkerListener<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>[U]
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
if (!appCfg.QUEUE_WORKERS_ENABLED) {
|
||||
if (!appCfg.QUEUE_WORKERS_ENABLED || !isQueueEnabled(name)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@@ -11,7 +11,7 @@ export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
|
||||
return {
|
||||
errorResponseBuilder: (_, context) => {
|
||||
throw new RateLimitError({
|
||||
message: `Rate limit exceeded. Please try again in ${context.after}`
|
||||
message: `Rate limit exceeded. Please try again in ${Math.ceil(context.ttl / 1000)} seconds`
|
||||
});
|
||||
},
|
||||
timeWindow: 60 * 1000,
|
||||
@@ -113,3 +113,12 @@ export const requestAccessLimit: RateLimitOptions = {
|
||||
max: 10,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const smtpRateLimit = ({
|
||||
keyGenerator = (req) => req.realIp
|
||||
}: Pick<RateLimitOptions, "keyGenerator"> = {}): RateLimitOptions => ({
|
||||
timeWindow: 40 * 1000,
|
||||
hook: "preValidation",
|
||||
max: 2,
|
||||
keyGenerator
|
||||
});
|
||||
|
66
backend/src/server/plugins/secret-scanner-v2.ts
Normal file
66
backend/src/server/plugins/secret-scanner-v2.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import type { EmitterWebhookEventName } from "@octokit/webhooks/dist-types/types";
|
||||
import { PushEvent } from "@octokit/webhooks-types";
|
||||
import { Probot } from "probot";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
|
||||
export const registerSecretScanningV2Webhooks = async (server: FastifyZodProvider) => {
|
||||
const probotApp = (app: Probot) => {
|
||||
app.on("installation.deleted", async (context) => {
|
||||
const { payload } = context;
|
||||
const { installation } = payload;
|
||||
|
||||
await server.services.secretScanningV2.github.handleInstallationDeletedEvent(installation.id);
|
||||
});
|
||||
|
||||
app.on("installation", async (context) => {
|
||||
const { payload } = context;
|
||||
logger.info({ repositories: payload.repositories }, "Installed secret scanner to");
|
||||
});
|
||||
|
||||
app.on("push", async (context) => {
|
||||
const { payload } = context;
|
||||
await server.services.secretScanningV2.github.handlePushEvent(payload as PushEvent);
|
||||
});
|
||||
};
|
||||
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (!appCfg.isSecretScanningV2Configured) {
|
||||
logger.info("Secret Scanning V2 is not configured. Skipping registration of secret scanning v2 webhooks.");
|
||||
return;
|
||||
}
|
||||
|
||||
const probot = new Probot({
|
||||
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID as string,
|
||||
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY as string,
|
||||
secret: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET as string
|
||||
});
|
||||
|
||||
await probot.load(probotApp);
|
||||
|
||||
// github push event webhook
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/github",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
const eventName = req.headers["x-github-event"] as EmitterWebhookEventName;
|
||||
const signatureSHA256 = req.headers["x-hub-signature-256"] as string;
|
||||
const id = req.headers["x-github-delivery"] as string;
|
||||
|
||||
await probot.webhooks.verifyAndReceive({
|
||||
id,
|
||||
name: eventName,
|
||||
payload: JSON.stringify(req.body),
|
||||
signature: signatureSHA256
|
||||
});
|
||||
|
||||
return res.send("ok");
|
||||
}
|
||||
});
|
||||
};
|
@@ -92,6 +92,9 @@ import { gitAppInstallSessionDALFactory } from "@app/ee/services/secret-scanning
|
||||
import { secretScanningDALFactory } from "@app/ee/services/secret-scanning/secret-scanning-dal";
|
||||
import { secretScanningQueueFactory } from "@app/ee/services/secret-scanning/secret-scanning-queue";
|
||||
import { secretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { secretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import { secretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
|
||||
import { secretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
|
||||
import { secretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { snapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal";
|
||||
import { snapshotFolderDALFactory } from "@app/ee/services/secret-snapshot/snapshot-folder-dal";
|
||||
@@ -118,6 +121,7 @@ import { getConfig, TEnvConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { registerSecretScanningV2Webhooks } from "@app/server/plugins/secret-scanner-v2";
|
||||
import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue/access-token-queue";
|
||||
import { apiKeyDALFactory } from "@app/services/api-key/api-key-dal";
|
||||
import { apiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||
@@ -312,6 +316,9 @@ export const registerRoutes = async (
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
|
||||
await server.register(registerSecretScanningV2Webhooks, {
|
||||
prefix: "/secret-scanning/webhooks"
|
||||
});
|
||||
|
||||
// db layers
|
||||
const userDAL = userDALFactory(db);
|
||||
@@ -459,6 +466,7 @@ export const registerRoutes = async (
|
||||
const secretRotationV2DAL = secretRotationV2DALFactory(db, folderDAL);
|
||||
const microsoftTeamsIntegrationDAL = microsoftTeamsIntegrationDALFactory(db);
|
||||
const projectMicrosoftTeamsConfigDAL = projectMicrosoftTeamsConfigDALFactory(db);
|
||||
const secretScanningV2DAL = secretScanningV2DALFactory(db);
|
||||
|
||||
const permissionService = permissionServiceFactory({
|
||||
permissionDAL,
|
||||
@@ -1784,6 +1792,26 @@ export const registerRoutes = async (
|
||||
smtpService
|
||||
});
|
||||
|
||||
const secretScanningV2Queue = await secretScanningV2QueueServiceFactory({
|
||||
auditLogService,
|
||||
secretScanningV2DAL,
|
||||
queueService,
|
||||
projectDAL,
|
||||
projectMembershipDAL,
|
||||
smtpService,
|
||||
kmsService,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const secretScanningV2Service = secretScanningV2ServiceFactory({
|
||||
permissionService,
|
||||
appConnectionService,
|
||||
licenseService,
|
||||
secretScanningV2DAL,
|
||||
secretScanningV2Queue,
|
||||
kmsService
|
||||
});
|
||||
|
||||
await superAdminService.initServerCfg();
|
||||
|
||||
// setup the communication with license key server
|
||||
@@ -1898,7 +1926,8 @@ export const registerRoutes = async (
|
||||
secretRotationV2: secretRotationV2Service,
|
||||
microsoftTeams: microsoftTeamsService,
|
||||
assumePrivileges: assumePrivilegeService,
|
||||
githubOrgSync: githubOrgSyncConfigService
|
||||
githubOrgSync: githubOrgSyncConfigService,
|
||||
secretScanningV2: secretScanningV2Service
|
||||
});
|
||||
|
||||
const cronJobs: CronJob[] = [];
|
||||
|
@@ -33,6 +33,10 @@ import {
|
||||
} from "@app/services/app-connection/databricks";
|
||||
import { GcpConnectionListItemSchema, SanitizedGcpConnectionSchema } from "@app/services/app-connection/gcp";
|
||||
import { GitHubConnectionListItemSchema, SanitizedGitHubConnectionSchema } from "@app/services/app-connection/github";
|
||||
import {
|
||||
GitHubRadarConnectionListItemSchema,
|
||||
SanitizedGitHubRadarConnectionSchema
|
||||
} from "@app/services/app-connection/github-radar";
|
||||
import {
|
||||
HCVaultConnectionListItemSchema,
|
||||
SanitizedHCVaultConnectionSchema
|
||||
@@ -67,6 +71,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
const SanitizedAppConnectionSchema = z.union([
|
||||
...SanitizedAwsConnectionSchema.options,
|
||||
...SanitizedGitHubConnectionSchema.options,
|
||||
...SanitizedGitHubRadarConnectionSchema.options,
|
||||
...SanitizedGcpConnectionSchema.options,
|
||||
...SanitizedAzureKeyVaultConnectionSchema.options,
|
||||
...SanitizedAzureAppConfigurationConnectionSchema.options,
|
||||
@@ -91,6 +96,7 @@ const SanitizedAppConnectionSchema = z.union([
|
||||
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
AwsConnectionListItemSchema,
|
||||
GitHubConnectionListItemSchema,
|
||||
GitHubRadarConnectionListItemSchema,
|
||||
GcpConnectionListItemSchema,
|
||||
AzureKeyVaultConnectionListItemSchema,
|
||||
AzureAppConfigurationConnectionListItemSchema,
|
||||
|
@@ -0,0 +1,54 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateGitHubRadarConnectionSchema,
|
||||
SanitizedGitHubRadarConnectionSchema,
|
||||
UpdateGitHubRadarConnectionSchema
|
||||
} from "@app/services/app-connection/github-radar";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerGitHubRadarConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.GitHubRadar,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedGitHubRadarConnectionSchema,
|
||||
createSchema: CreateGitHubRadarConnectionSchema,
|
||||
updateSchema: UpdateGitHubRadarConnectionSchema
|
||||
});
|
||||
|
||||
// The below endpoints are not exposed and for Infisical App use
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/repositories`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
repositories: z.object({ id: z.number(), name: z.string() }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
|
||||
const repositories = await server.services.appConnection.githubRadar.listRepositories(
|
||||
connectionId,
|
||||
req.permission
|
||||
);
|
||||
|
||||
return { repositories };
|
||||
}
|
||||
});
|
||||
};
|
@@ -11,6 +11,7 @@ import { registerCamundaConnectionRouter } from "./camunda-connection-router";
|
||||
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
|
||||
import { registerGcpConnectionRouter } from "./gcp-connection-router";
|
||||
import { registerGitHubConnectionRouter } from "./github-connection-router";
|
||||
import { registerGitHubRadarConnectionRouter } from "./github-radar-connection-router";
|
||||
import { registerHCVaultConnectionRouter } from "./hc-vault-connection-router";
|
||||
import { registerHumanitecConnectionRouter } from "./humanitec-connection-router";
|
||||
import { registerLdapConnectionRouter } from "./ldap-connection-router";
|
||||
@@ -28,6 +29,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
||||
{
|
||||
[AppConnection.AWS]: registerAwsConnectionRouter,
|
||||
[AppConnection.GitHub]: registerGitHubConnectionRouter,
|
||||
[AppConnection.GitHubRadar]: registerGitHubRadarConnectionRouter,
|
||||
[AppConnection.GCP]: registerGcpConnectionRouter,
|
||||
[AppConnection.AzureKeyVault]: registerAzureKeyVaultConnectionRouter,
|
||||
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
|
||||
|
@@ -8,6 +8,7 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
||||
import { IdentityKubernetesAuthTokenReviewMode } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-types";
|
||||
import { isSuperAdmin } from "@app/services/super-admin/super-admin-fns";
|
||||
|
||||
const IdentityKubernetesAuthResponseSchema = IdentityKubernetesAuthsSchema.pick({
|
||||
@@ -18,6 +19,7 @@ const IdentityKubernetesAuthResponseSchema = IdentityKubernetesAuthsSchema.pick(
|
||||
accessTokenTrustedIps: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
tokenReviewMode: true,
|
||||
identityId: true,
|
||||
kubernetesHost: true,
|
||||
allowedNamespaces: true,
|
||||
@@ -124,6 +126,10 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
),
|
||||
caCert: z.string().trim().default("").describe(KUBERNETES_AUTH.ATTACH.caCert),
|
||||
tokenReviewerJwt: z.string().trim().optional().describe(KUBERNETES_AUTH.ATTACH.tokenReviewerJwt),
|
||||
tokenReviewMode: z
|
||||
.nativeEnum(IdentityKubernetesAuthTokenReviewMode)
|
||||
.default(IdentityKubernetesAuthTokenReviewMode.Api)
|
||||
.describe(KUBERNETES_AUTH.ATTACH.tokenReviewMode),
|
||||
allowedNamespaces: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNamespaces), // TODO: validation
|
||||
allowedNames: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNames),
|
||||
allowedAudience: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedAudience),
|
||||
@@ -157,10 +163,22 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
.default(0)
|
||||
.describe(KUBERNETES_AUTH.ATTACH.accessTokenNumUsesLimit)
|
||||
})
|
||||
.refine(
|
||||
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
|
||||
"Access Token TTL cannot be greater than Access Token Max TTL."
|
||||
),
|
||||
.superRefine((data, ctx) => {
|
||||
if (data.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway && !data.gatewayId) {
|
||||
ctx.addIssue({
|
||||
path: ["gatewayId"],
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "When token review mode is set to Gateway, a gateway must be selected"
|
||||
});
|
||||
}
|
||||
if (data.accessTokenTTL > data.accessTokenMaxTTL) {
|
||||
ctx.addIssue({
|
||||
path: ["accessTokenTTL"],
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "Access Token TTL cannot be greater than Access Token Max TTL."
|
||||
});
|
||||
}
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
identityKubernetesAuth: IdentityKubernetesAuthResponseSchema
|
||||
@@ -247,6 +265,10 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
),
|
||||
caCert: z.string().trim().optional().describe(KUBERNETES_AUTH.UPDATE.caCert),
|
||||
tokenReviewerJwt: z.string().trim().nullable().optional().describe(KUBERNETES_AUTH.UPDATE.tokenReviewerJwt),
|
||||
tokenReviewMode: z
|
||||
.nativeEnum(IdentityKubernetesAuthTokenReviewMode)
|
||||
.optional()
|
||||
.describe(KUBERNETES_AUTH.UPDATE.tokenReviewMode),
|
||||
allowedNamespaces: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNamespaces), // TODO: validation
|
||||
allowedNames: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNames),
|
||||
allowedAudience: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedAudience),
|
||||
@@ -280,10 +302,26 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
.optional()
|
||||
.describe(KUBERNETES_AUTH.UPDATE.accessTokenMaxTTL)
|
||||
})
|
||||
.refine(
|
||||
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
|
||||
"Access Token TTL cannot be greater than Access Token Max TTL."
|
||||
),
|
||||
.superRefine((data, ctx) => {
|
||||
if (
|
||||
data.tokenReviewMode &&
|
||||
data.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway &&
|
||||
!data.gatewayId
|
||||
) {
|
||||
ctx.addIssue({
|
||||
path: ["gatewayId"],
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "When token review mode is set to Gateway, a gateway must be selected"
|
||||
});
|
||||
}
|
||||
if (data.accessTokenMaxTTL && data.accessTokenTTL ? data.accessTokenTTL > data.accessTokenMaxTTL : false) {
|
||||
ctx.addIssue({
|
||||
path: ["accessTokenTTL"],
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "Access Token TTL cannot be greater than Access Token Max TTL."
|
||||
});
|
||||
}
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
identityKubernetesAuth: IdentityKubernetesAuthResponseSchema
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { OrgMembershipRole, ProjectMembershipRole, UsersSchema } from "@app/db/schemas";
|
||||
import { inviteUserRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { inviteUserRateLimit, smtpRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
@@ -11,7 +11,7 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/signup",
|
||||
config: {
|
||||
rateLimit: inviteUserRateLimit
|
||||
rateLimit: smtpRateLimit()
|
||||
},
|
||||
method: "POST",
|
||||
schema: {
|
||||
@@ -81,7 +81,10 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/signup-resend",
|
||||
config: {
|
||||
rateLimit: inviteUserRateLimit
|
||||
rateLimit: smtpRateLimit({
|
||||
keyGenerator: (req) =>
|
||||
(req.body as { membershipId?: string })?.membershipId?.trim().substring(0, 100) ?? req.realIp
|
||||
})
|
||||
},
|
||||
method: "POST",
|
||||
schema: {
|
||||
|
@@ -2,9 +2,9 @@ import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { readLimit, smtpRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { SanitizedProjectSchema } from "../sanitizedSchemas";
|
||||
|
||||
@@ -47,7 +47,9 @@ export const registerOrgAdminRouter = async (server: FastifyZodProvider) => {
|
||||
method: "POST",
|
||||
url: "/projects/:projectId/grant-admin-access",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
rateLimit: smtpRateLimit({
|
||||
keyGenerator: (req) => (req.auth.actor === ActorType.USER ? req.auth.userId : req.realIp)
|
||||
})
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
|
@@ -2,10 +2,10 @@ import { z } from "zod";
|
||||
|
||||
import { BackupPrivateKeySchema, UsersSchema } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { authRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { authRateLimit, smtpRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { validateSignUpAuthorization } from "@app/services/auth/auth-fns";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
import { UserEncryption } from "@app/services/user/user-types";
|
||||
|
||||
export const registerPasswordRouter = async (server: FastifyZodProvider) => {
|
||||
@@ -80,7 +80,9 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
|
||||
method: "POST",
|
||||
url: "/email/password-reset",
|
||||
config: {
|
||||
rateLimit: authRateLimit
|
||||
rateLimit: smtpRateLimit({
|
||||
keyGenerator: (req) => (req.body as { email?: string })?.email?.trim().substring(0, 100) ?? req.realIp
|
||||
})
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
@@ -224,7 +226,9 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
|
||||
method: "POST",
|
||||
url: "/email/password-setup",
|
||||
config: {
|
||||
rateLimit: authRateLimit
|
||||
rateLimit: smtpRateLimit({
|
||||
keyGenerator: (req) => (req.auth.actor === ActorType.USER ? req.auth.userId : req.realIp)
|
||||
})
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
@@ -233,6 +237,7 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
await server.services.password.sendPasswordSetupEmail(req.permission);
|
||||
|
||||
@@ -267,6 +272,7 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req, res) => {
|
||||
await server.services.password.setupPassword(req.body, req.permission);
|
||||
|
||||
|
@@ -160,7 +160,14 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
.default("false")
|
||||
.transform((value) => value === "true"),
|
||||
type: z
|
||||
.enum([ProjectType.SecretManager, ProjectType.KMS, ProjectType.CertificateManager, ProjectType.SSH, "all"])
|
||||
.enum([
|
||||
ProjectType.SecretManager,
|
||||
ProjectType.KMS,
|
||||
ProjectType.CertificateManager,
|
||||
ProjectType.SSH,
|
||||
ProjectType.SecretScanning,
|
||||
"all"
|
||||
])
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
@@ -173,7 +180,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const workspaces = await server.services.project.getProjects({
|
||||
includeRoles: req.query.includeRoles,
|
||||
|
@@ -4,9 +4,11 @@ import {
|
||||
GroupProjectMembershipsSchema,
|
||||
GroupsSchema,
|
||||
ProjectMembershipRole,
|
||||
ProjectUserMembershipRolesSchema
|
||||
ProjectUserMembershipRolesSchema,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { ApiDocsTags, PROJECTS } from "@app/lib/api-docs";
|
||||
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
|
||||
import { ApiDocsTags, GROUPS, PROJECTS } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@@ -301,4 +303,61 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) =>
|
||||
return { groupMembership };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:projectId/groups/:groupId/users",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.ProjectGroups],
|
||||
description: "Return project group users",
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(GROUPS.LIST_USERS.projectId),
|
||||
groupId: z.string().trim().describe(GROUPS.LIST_USERS.id)
|
||||
}),
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
|
||||
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
|
||||
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
users: UsersSchema.pick({
|
||||
email: true,
|
||||
username: true,
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
id: true
|
||||
})
|
||||
.merge(
|
||||
z.object({
|
||||
isPartOfGroup: z.boolean(),
|
||||
joinedGroupAt: z.date().nullable()
|
||||
})
|
||||
)
|
||||
.array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { users, totalCount } = await server.services.groupProject.listProjectGroupUsers({
|
||||
id: req.params.groupId,
|
||||
projectId: req.params.projectId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
});
|
||||
|
||||
return { users, totalCount };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -2,7 +2,7 @@ import { z } from "zod";
|
||||
|
||||
import { AuthTokenSessionsSchema, UserEncryptionKeysSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { ApiKeysSchema } from "@app/db/schemas/api-keys";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { authRateLimit, readLimit, smtpRateLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMethod, AuthMode, MfaMethod } from "@app/services/auth/auth-type";
|
||||
import { sanitizedOrganizationSchema } from "@app/services/org/org-schema";
|
||||
@@ -12,7 +12,9 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
method: "POST",
|
||||
url: "/me/emails/code",
|
||||
config: {
|
||||
rateLimit: authRateLimit
|
||||
rateLimit: smtpRateLimit({
|
||||
keyGenerator: (req) => (req.body as { username?: string })?.username?.trim().substring(0, 100) ?? req.realIp
|
||||
})
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
|
@@ -3,7 +3,7 @@ import { z } from "zod";
|
||||
import { UsersSchema } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { ForbiddenRequestError } from "@app/lib/errors";
|
||||
import { authRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { authRateLimit, smtpRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { GenericResourceNameSchema } from "@app/server/lib/schemas";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
@@ -13,7 +13,9 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
|
||||
url: "/email/signup",
|
||||
method: "POST",
|
||||
config: {
|
||||
rateLimit: authRateLimit
|
||||
rateLimit: smtpRateLimit({
|
||||
keyGenerator: (req) => (req.body as { email?: string })?.email?.trim().substring(0, 100) ?? req.realIp
|
||||
})
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
|
@@ -1,5 +1,6 @@
|
||||
export enum AppConnection {
|
||||
GitHub = "github",
|
||||
GitHubRadar = "github-radar",
|
||||
AWS = "aws",
|
||||
Databricks = "databricks",
|
||||
GCP = "gcp",
|
||||
|
@@ -52,6 +52,11 @@ import {
|
||||
} from "./databricks";
|
||||
import { GcpConnectionMethod, getGcpConnectionListItem, validateGcpConnectionCredentials } from "./gcp";
|
||||
import { getGitHubConnectionListItem, GitHubConnectionMethod, validateGitHubConnectionCredentials } from "./github";
|
||||
import {
|
||||
getGitHubRadarConnectionListItem,
|
||||
GitHubRadarConnectionMethod,
|
||||
validateGitHubRadarConnectionCredentials
|
||||
} from "./github-radar";
|
||||
import {
|
||||
getHCVaultConnectionListItem,
|
||||
HCVaultConnectionMethod,
|
||||
@@ -89,6 +94,7 @@ export const listAppConnectionOptions = () => {
|
||||
return [
|
||||
getAwsConnectionListItem(),
|
||||
getGitHubConnectionListItem(),
|
||||
getGitHubRadarConnectionListItem(),
|
||||
getGcpConnectionListItem(),
|
||||
getAzureKeyVaultConnectionListItem(),
|
||||
getAzureAppConfigurationConnectionListItem(),
|
||||
@@ -160,6 +166,7 @@ export const validateAppConnectionCredentials = async (
|
||||
[AppConnection.AWS]: validateAwsConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Databricks]: validateDatabricksConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.GitHub]: validateGitHubConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.GitHubRadar]: validateGitHubRadarConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.GCP]: validateGcpConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.AzureKeyVault]: validateAzureKeyVaultConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.AzureAppConfiguration]:
|
||||
@@ -188,6 +195,7 @@ export const validateAppConnectionCredentials = async (
|
||||
export const getAppConnectionMethodName = (method: TAppConnection["method"]) => {
|
||||
switch (method) {
|
||||
case GitHubConnectionMethod.App:
|
||||
case GitHubRadarConnectionMethod.App:
|
||||
return "GitHub App";
|
||||
case AzureKeyVaultConnectionMethod.OAuth:
|
||||
case AzureAppConfigurationConnectionMethod.OAuth:
|
||||
@@ -258,6 +266,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
|
||||
[AppConnection.AWS]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Databricks]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.GitHub]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.GitHubRadar]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.GCP]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureKeyVault]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureAppConfiguration]: platformManagedCredentialsNotSupported,
|
||||
|
@@ -3,6 +3,7 @@ import { AppConnection, AppConnectionPlanType } from "./app-connection-enums";
|
||||
export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
[AppConnection.AWS]: "AWS",
|
||||
[AppConnection.GitHub]: "GitHub",
|
||||
[AppConnection.GitHubRadar]: "GitHub Radar",
|
||||
[AppConnection.GCP]: "GCP",
|
||||
[AppConnection.AzureKeyVault]: "Azure Key Vault",
|
||||
[AppConnection.AzureAppConfiguration]: "Azure App Configuration",
|
||||
@@ -27,6 +28,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanType> = {
|
||||
[AppConnection.AWS]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.GitHub]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.GitHubRadar]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.GCP]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureKeyVault]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureAppConfiguration]: AppConnectionPlanType.Regular,
|
||||
|
@@ -19,6 +19,7 @@ import {
|
||||
validateAppConnectionCredentials
|
||||
} from "@app/services/app-connection/app-connection-fns";
|
||||
import { auth0ConnectionService } from "@app/services/app-connection/auth0/auth0-connection-service";
|
||||
import { githubRadarConnectionService } from "@app/services/app-connection/github-radar/github-radar-connection-service";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
import { ValidateOnePassConnectionCredentialsSchema } from "./1password";
|
||||
@@ -49,6 +50,7 @@ import { ValidateGcpConnectionCredentialsSchema } from "./gcp";
|
||||
import { gcpConnectionService } from "./gcp/gcp-connection-service";
|
||||
import { ValidateGitHubConnectionCredentialsSchema } from "./github";
|
||||
import { githubConnectionService } from "./github/github-connection-service";
|
||||
import { ValidateGitHubRadarConnectionCredentialsSchema } from "./github-radar";
|
||||
import { ValidateHCVaultConnectionCredentialsSchema } from "./hc-vault";
|
||||
import { hcVaultConnectionService } from "./hc-vault/hc-vault-connection-service";
|
||||
import { ValidateHumanitecConnectionCredentialsSchema } from "./humanitec";
|
||||
@@ -78,6 +80,7 @@ export type TAppConnectionServiceFactory = ReturnType<typeof appConnectionServic
|
||||
const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAppConnectionCredentialsSchema> = {
|
||||
[AppConnection.AWS]: ValidateAwsConnectionCredentialsSchema,
|
||||
[AppConnection.GitHub]: ValidateGitHubConnectionCredentialsSchema,
|
||||
[AppConnection.GitHubRadar]: ValidateGitHubRadarConnectionCredentialsSchema,
|
||||
[AppConnection.GCP]: ValidateGcpConnectionCredentialsSchema,
|
||||
[AppConnection.AzureKeyVault]: ValidateAzureKeyVaultConnectionCredentialsSchema,
|
||||
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema,
|
||||
@@ -486,6 +489,7 @@ export const appConnectionServiceFactory = ({
|
||||
connectAppConnectionById,
|
||||
listAvailableAppConnectionsForUser,
|
||||
github: githubConnectionService(connectAppConnectionById),
|
||||
githubRadar: githubRadarConnectionService(connectAppConnectionById),
|
||||
gcp: gcpConnectionService(connectAppConnectionById),
|
||||
databricks: databricksConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
aws: awsConnectionService(connectAppConnectionById),
|
||||
|
@@ -69,6 +69,12 @@ import {
|
||||
TGitHubConnectionInput,
|
||||
TValidateGitHubConnectionCredentialsSchema
|
||||
} from "./github";
|
||||
import {
|
||||
TGitHubRadarConnection,
|
||||
TGitHubRadarConnectionConfig,
|
||||
TGitHubRadarConnectionInput,
|
||||
TValidateGitHubRadarConnectionCredentialsSchema
|
||||
} from "./github-radar";
|
||||
import {
|
||||
THCVaultConnection,
|
||||
THCVaultConnectionConfig,
|
||||
@@ -122,6 +128,7 @@ import {
|
||||
export type TAppConnection = { id: string } & (
|
||||
| TAwsConnection
|
||||
| TGitHubConnection
|
||||
| TGitHubRadarConnection
|
||||
| TGcpConnection
|
||||
| TAzureKeyVaultConnection
|
||||
| TAzureAppConfigurationConnection
|
||||
@@ -150,6 +157,7 @@ export type TSqlConnection = TPostgresConnection | TMsSqlConnection | TMySqlConn
|
||||
export type TAppConnectionInput = { id: string } & (
|
||||
| TAwsConnectionInput
|
||||
| TGitHubConnectionInput
|
||||
| TGitHubRadarConnectionInput
|
||||
| TGcpConnectionInput
|
||||
| TAzureKeyVaultConnectionInput
|
||||
| TAzureAppConfigurationConnectionInput
|
||||
@@ -185,6 +193,7 @@ export type TUpdateAppConnectionDTO = Partial<Omit<TCreateAppConnectionDTO, "met
|
||||
export type TAppConnectionConfig =
|
||||
| TAwsConnectionConfig
|
||||
| TGitHubConnectionConfig
|
||||
| TGitHubRadarConnectionConfig
|
||||
| TGcpConnectionConfig
|
||||
| TAzureKeyVaultConnectionConfig
|
||||
| TAzureAppConfigurationConnectionConfig
|
||||
@@ -206,6 +215,7 @@ export type TAppConnectionConfig =
|
||||
export type TValidateAppConnectionCredentialsSchema =
|
||||
| TValidateAwsConnectionCredentialsSchema
|
||||
| TValidateGitHubConnectionCredentialsSchema
|
||||
| TValidateGitHubRadarConnectionCredentialsSchema
|
||||
| TValidateGcpConnectionCredentialsSchema
|
||||
| TValidateAzureKeyVaultConnectionCredentialsSchema
|
||||
| TValidateAzureAppConfigurationConnectionCredentialsSchema
|
||||
|
@@ -0,0 +1,3 @@
|
||||
export enum GitHubRadarConnectionMethod {
|
||||
App = "github-app"
|
||||
}
|
@@ -0,0 +1,166 @@
|
||||
import { createAppAuth } from "@octokit/auth-app";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { AxiosResponse } from "axios";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError, ForbiddenRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { getAppConnectionMethodName } from "@app/services/app-connection/app-connection-fns";
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import { GitHubRadarConnectionMethod } from "./github-radar-connection-enums";
|
||||
import {
|
||||
TGitHubRadarConnection,
|
||||
TGitHubRadarConnectionConfig,
|
||||
TGitHubRadarRepository
|
||||
} from "./github-radar-connection-types";
|
||||
|
||||
export const getGitHubRadarConnectionListItem = () => {
|
||||
const { INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG } = getConfig();
|
||||
|
||||
return {
|
||||
name: "GitHub Radar" as const,
|
||||
app: AppConnection.GitHubRadar as const,
|
||||
methods: Object.values(GitHubRadarConnectionMethod) as [GitHubRadarConnectionMethod.App],
|
||||
appClientSlug: INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG
|
||||
};
|
||||
};
|
||||
|
||||
export const getGitHubRadarClient = (appConnection: TGitHubRadarConnection) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const { method, credentials } = appConnection;
|
||||
|
||||
let client: Octokit;
|
||||
|
||||
switch (method) {
|
||||
case GitHubRadarConnectionMethod.App:
|
||||
if (!appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID || !appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY) {
|
||||
throw new InternalServerError({
|
||||
message: `GitHub ${getAppConnectionMethodName(method).replace(
|
||||
"GitHub",
|
||||
""
|
||||
)} environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
client = new Octokit({
|
||||
authStrategy: createAppAuth,
|
||||
auth: {
|
||||
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
|
||||
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
|
||||
installationId: credentials.installationId
|
||||
}
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled GitHub Radar connection method: ${method as GitHubRadarConnectionMethod}`
|
||||
});
|
||||
}
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
export const listGitHubRadarRepositories = async (appConnection: TGitHubRadarConnection) => {
|
||||
const client = getGitHubRadarClient(appConnection);
|
||||
|
||||
const repositories: TGitHubRadarRepository[] = await client.paginate("GET /installation/repositories");
|
||||
|
||||
return repositories;
|
||||
};
|
||||
|
||||
type TokenRespData = {
|
||||
access_token: string;
|
||||
scope: string;
|
||||
token_type: string;
|
||||
error?: string;
|
||||
};
|
||||
|
||||
export const validateGitHubRadarConnectionCredentials = async (config: TGitHubRadarConnectionConfig) => {
|
||||
const { credentials, method } = config;
|
||||
|
||||
const { INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID, INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET, SITE_URL } =
|
||||
getConfig();
|
||||
|
||||
if (!INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID || !INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET) {
|
||||
throw new InternalServerError({
|
||||
message: `GitHub ${getAppConnectionMethodName(method).replace(
|
||||
"GitHub",
|
||||
""
|
||||
)} environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<TokenRespData>;
|
||||
|
||||
try {
|
||||
tokenResp = await request.get<TokenRespData>("https://github.com/login/oauth/access_token", {
|
||||
params: {
|
||||
client_id: INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET,
|
||||
code: credentials.code,
|
||||
redirect_uri: `${SITE_URL}/organization/app-connections/github-radar/oauth/callback`
|
||||
},
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
});
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
});
|
||||
}
|
||||
|
||||
if (tokenResp.status !== 200) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate credentials: GitHub responded with a status code of ${tokenResp.status} (${tokenResp.statusText}). Verify credentials and try again.`
|
||||
});
|
||||
}
|
||||
|
||||
if (method === GitHubRadarConnectionMethod.App) {
|
||||
const installationsResp = await request.get<{
|
||||
installations: {
|
||||
id: number;
|
||||
account: {
|
||||
login: string;
|
||||
type: string;
|
||||
id: number;
|
||||
};
|
||||
}[];
|
||||
}>(IntegrationUrls.GITHUB_USER_INSTALLATIONS, {
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `Bearer ${tokenResp.data.access_token}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
const matchingInstallation = installationsResp.data.installations.find(
|
||||
(installation) => installation.id === +credentials.installationId
|
||||
);
|
||||
|
||||
if (!matchingInstallation) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "User does not have access to the provided installation"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenResp.data.access_token) {
|
||||
throw new InternalServerError({ message: `Missing access token: ${tokenResp.data.error}` });
|
||||
}
|
||||
|
||||
switch (method) {
|
||||
case GitHubRadarConnectionMethod.App:
|
||||
return {
|
||||
installationId: credentials.installationId
|
||||
};
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled GitHub connection method: ${method as GitHubRadarConnectionMethod}`
|
||||
});
|
||||
}
|
||||
};
|
@@ -0,0 +1,66 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { GitHubRadarConnectionMethod } from "./github-radar-connection-enums";
|
||||
|
||||
export const GitHubRadarConnectionInputCredentialsSchema = z.object({
|
||||
code: z.string().trim().min(1, "GitHub Radar App code required"),
|
||||
installationId: z.string().min(1, "GitHub Radar App Installation ID required")
|
||||
});
|
||||
|
||||
export const GitHubRadarConnectionOutputCredentialsSchema = z.object({
|
||||
installationId: z.string()
|
||||
});
|
||||
|
||||
export const ValidateGitHubRadarConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
.literal(GitHubRadarConnectionMethod.App)
|
||||
.describe(AppConnections.CREATE(AppConnection.GitHubRadar).method),
|
||||
credentials: GitHubRadarConnectionInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.GitHubRadar).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateGitHubRadarConnectionSchema = ValidateGitHubRadarConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.GitHubRadar)
|
||||
);
|
||||
|
||||
export const UpdateGitHubRadarConnectionSchema = z
|
||||
.object({
|
||||
credentials: GitHubRadarConnectionInputCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.GitHubRadar).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.GitHubRadar));
|
||||
|
||||
const BaseGitHubRadarConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.GitHubRadar) });
|
||||
|
||||
export const GitHubRadarConnectionSchema = BaseGitHubRadarConnectionSchema.extend({
|
||||
method: z.literal(GitHubRadarConnectionMethod.App),
|
||||
credentials: GitHubRadarConnectionOutputCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedGitHubRadarConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseGitHubRadarConnectionSchema.extend({
|
||||
method: z.literal(GitHubRadarConnectionMethod.App),
|
||||
credentials: GitHubRadarConnectionOutputCredentialsSchema.pick({})
|
||||
})
|
||||
]);
|
||||
|
||||
export const GitHubRadarConnectionListItemSchema = z.object({
|
||||
name: z.literal("GitHub Radar"),
|
||||
app: z.literal(AppConnection.GitHubRadar),
|
||||
// the below is preferable but currently breaks with our zod to json schema parser
|
||||
// methods: z.tuple([z.literal(GitHubConnectionMethod.App), z.literal(GitHubConnectionMethod.OAuth)]),
|
||||
methods: z.nativeEnum(GitHubRadarConnectionMethod).array(),
|
||||
appClientSlug: z.string().optional()
|
||||
});
|
@@ -0,0 +1,24 @@
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { listGitHubRadarRepositories } from "@app/services/app-connection/github-radar/github-radar-connection-fns";
|
||||
import { TGitHubRadarConnection } from "@app/services/app-connection/github-radar/github-radar-connection-types";
|
||||
|
||||
type TGetAppConnectionFunc = (
|
||||
app: AppConnection,
|
||||
connectionId: string,
|
||||
actor: OrgServiceActor
|
||||
) => Promise<TGitHubRadarConnection>;
|
||||
|
||||
export const githubRadarConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||
const listRepositories = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.GitHubRadar, connectionId, actor);
|
||||
|
||||
const repositories = await listGitHubRadarRepositories(appConnection);
|
||||
|
||||
return repositories.map((repo) => ({ id: repo.id, name: repo.full_name }));
|
||||
};
|
||||
|
||||
return {
|
||||
listRepositories
|
||||
};
|
||||
};
|
@@ -0,0 +1,28 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
CreateGitHubRadarConnectionSchema,
|
||||
GitHubRadarConnectionSchema,
|
||||
ValidateGitHubRadarConnectionCredentialsSchema
|
||||
} from "./github-radar-connection-schemas";
|
||||
|
||||
export type TGitHubRadarConnection = z.infer<typeof GitHubRadarConnectionSchema>;
|
||||
|
||||
export type TGitHubRadarConnectionInput = z.infer<typeof CreateGitHubRadarConnectionSchema> & {
|
||||
app: AppConnection.GitHubRadar;
|
||||
};
|
||||
|
||||
export type TValidateGitHubRadarConnectionCredentialsSchema = typeof ValidateGitHubRadarConnectionCredentialsSchema;
|
||||
|
||||
export type TGitHubRadarConnectionConfig = DiscriminativePick<
|
||||
TGitHubRadarConnectionInput,
|
||||
"method" | "app" | "credentials"
|
||||
>;
|
||||
|
||||
export type TGitHubRadarRepository = {
|
||||
id: number;
|
||||
full_name: string;
|
||||
};
|
@@ -0,0 +1,4 @@
|
||||
export * from "./github-radar-connection-enums";
|
||||
export * from "./github-radar-connection-fns";
|
||||
export * from "./github-radar-connection-schemas";
|
||||
export * from "./github-radar-connection-types";
|
@@ -13,7 +13,12 @@ import { LdapConnectionMethod, LdapProvider } from "./ldap-connection-enums";
|
||||
|
||||
export const LdapConnectionSimpleBindCredentialsSchema = z.object({
|
||||
provider: z.nativeEnum(LdapProvider).describe(AppConnections.CREDENTIALS.LDAP.provider),
|
||||
url: z.string().trim().min(1, "URL required").regex(LdapUrlRegex).describe(AppConnections.CREDENTIALS.LDAP.url),
|
||||
url: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "URL required")
|
||||
.refine((value) => LdapUrlRegex.test(value), "Invalid LDAP URL")
|
||||
.describe(AppConnections.CREDENTIALS.LDAP.url),
|
||||
dn: z
|
||||
.string()
|
||||
.trim()
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { ActionProjectType, ProjectMembershipRole, SecretKeyEncoding, TGroups } from "@app/db/schemas";
|
||||
import { TListProjectGroupUsersDTO } from "@app/ee/services/group/group-types";
|
||||
import {
|
||||
constructPermissionErrorMessage,
|
||||
validatePrivilegeChangeOperation
|
||||
@@ -42,7 +43,7 @@ type TGroupProjectServiceFactoryDep = {
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "findLatestProjectKey" | "delete" | "insertMany" | "transaction">;
|
||||
projectRoleDAL: Pick<TProjectRoleDALFactory, "find">;
|
||||
projectBotDAL: TProjectBotDALFactory;
|
||||
groupDAL: Pick<TGroupDALFactory, "findOne">;
|
||||
groupDAL: Pick<TGroupDALFactory, "findOne" | "findAllGroupPossibleMembers">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getProjectPermissionByRole">;
|
||||
};
|
||||
|
||||
@@ -471,11 +472,54 @@ export const groupProjectServiceFactory = ({
|
||||
return groupMembership;
|
||||
};
|
||||
|
||||
const listProjectGroupUsers = async ({
|
||||
id,
|
||||
projectId,
|
||||
offset,
|
||||
limit,
|
||||
username,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
search,
|
||||
filter
|
||||
}: TListProjectGroupUsersDTO) => {
|
||||
const project = await projectDAL.findById(projectId);
|
||||
|
||||
if (!project) {
|
||||
throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
|
||||
|
||||
const { members, totalCount } = await groupDAL.findAllGroupPossibleMembers({
|
||||
orgId: project.orgId,
|
||||
groupId: id,
|
||||
offset,
|
||||
limit,
|
||||
username,
|
||||
search,
|
||||
filter
|
||||
});
|
||||
|
||||
return { users: members, totalCount };
|
||||
};
|
||||
|
||||
return {
|
||||
addGroupToProject,
|
||||
updateGroupInProject,
|
||||
removeGroupFromProject,
|
||||
listGroupsInProject,
|
||||
getGroupInProject
|
||||
getGroupInProject,
|
||||
listProjectGroupUsers
|
||||
};
|
||||
};
|
||||
|
@@ -20,8 +20,9 @@ import {
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { withGatewayProxy } from "@app/lib/gateway";
|
||||
import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||
@@ -33,6 +34,7 @@ import { validateIdentityUpdateForSuperAdminPrivileges } from "../super-admin/su
|
||||
import { TIdentityKubernetesAuthDALFactory } from "./identity-kubernetes-auth-dal";
|
||||
import { extractK8sUsername } from "./identity-kubernetes-auth-fns";
|
||||
import {
|
||||
IdentityKubernetesAuthTokenReviewMode,
|
||||
TAttachKubernetesAuthDTO,
|
||||
TCreateTokenReviewResponse,
|
||||
TGetKubernetesAuthDTO,
|
||||
@@ -72,19 +74,25 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
gatewayId: string;
|
||||
targetHost: string;
|
||||
targetPort: number;
|
||||
caCert?: string;
|
||||
reviewTokenThroughGateway: boolean;
|
||||
},
|
||||
gatewayCallback: (host: string, port: number) => Promise<T>
|
||||
gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent) => Promise<T>
|
||||
): Promise<T> => {
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
|
||||
const callbackResult = await withGatewayProxy(
|
||||
async (port) => {
|
||||
// Needs to be https protocol or the kubernetes API server will fail with "Client sent an HTTP request to an HTTPS server"
|
||||
const res = await gatewayCallback("https://localhost", port);
|
||||
async (port, httpsAgent) => {
|
||||
const res = await gatewayCallback(
|
||||
inputs.reviewTokenThroughGateway ? "http://localhost" : "https://localhost",
|
||||
port,
|
||||
httpsAgent
|
||||
);
|
||||
return res;
|
||||
},
|
||||
{
|
||||
protocol: inputs.reviewTokenThroughGateway ? GatewayProxyProtocol.Http : GatewayProxyProtocol.Tcp,
|
||||
targetHost: inputs.targetHost,
|
||||
targetPort: inputs.targetPort,
|
||||
relayHost,
|
||||
@@ -95,7 +103,12 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
},
|
||||
// we always pass this, because its needed for both tcp and http protocol
|
||||
httpsAgent: new https.Agent({
|
||||
ca: inputs.caCert,
|
||||
rejectUnauthorized: Boolean(inputs.caCert)
|
||||
})
|
||||
}
|
||||
);
|
||||
|
||||
@@ -129,17 +142,29 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
caCert = decryptor({ cipherTextBlob: identityKubernetesAuth.encryptedKubernetesCaCertificate }).toString();
|
||||
}
|
||||
|
||||
let tokenReviewerJwt = "";
|
||||
if (identityKubernetesAuth.encryptedKubernetesTokenReviewerJwt) {
|
||||
tokenReviewerJwt = decryptor({
|
||||
cipherTextBlob: identityKubernetesAuth.encryptedKubernetesTokenReviewerJwt
|
||||
}).toString();
|
||||
} else {
|
||||
// if no token reviewer is provided means the incoming token has to act as reviewer
|
||||
tokenReviewerJwt = serviceAccountJwt;
|
||||
}
|
||||
const tokenReviewCallbackRaw = async (host: string = identityKubernetesAuth.kubernetesHost, port?: number) => {
|
||||
logger.info({ host, port }, "tokenReviewCallbackRaw: Processing kubernetes token review using raw API");
|
||||
let tokenReviewerJwt = "";
|
||||
if (identityKubernetesAuth.encryptedKubernetesTokenReviewerJwt) {
|
||||
tokenReviewerJwt = decryptor({
|
||||
cipherTextBlob: identityKubernetesAuth.encryptedKubernetesTokenReviewerJwt
|
||||
}).toString();
|
||||
} else {
|
||||
// if no token reviewer is provided means the incoming token has to act as reviewer
|
||||
tokenReviewerJwt = serviceAccountJwt;
|
||||
}
|
||||
|
||||
let servername = identityKubernetesAuth.kubernetesHost;
|
||||
if (servername.startsWith("https://") || servername.startsWith("http://")) {
|
||||
servername = new RE2("^https?:\\/\\/").replace(servername, "");
|
||||
}
|
||||
|
||||
// get the last colon index, if it has a port, remove it, including the colon
|
||||
const lastColonIndex = servername.lastIndexOf(":");
|
||||
if (lastColonIndex !== -1) {
|
||||
servername = servername.substring(0, lastColonIndex);
|
||||
}
|
||||
|
||||
const tokenReviewCallback = async (host: string = identityKubernetesAuth.kubernetesHost, port?: number) => {
|
||||
const baseUrl = port ? `${host}:${port}` : host;
|
||||
|
||||
const res = await axios
|
||||
@@ -160,10 +185,10 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
},
|
||||
signal: AbortSignal.timeout(10000),
|
||||
timeout: 10000,
|
||||
// if ca cert, rejectUnauthorized: true
|
||||
httpsAgent: new https.Agent({
|
||||
ca: caCert,
|
||||
rejectUnauthorized: !!caCert
|
||||
rejectUnauthorized: Boolean(caCert),
|
||||
servername
|
||||
})
|
||||
}
|
||||
)
|
||||
@@ -186,24 +211,119 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
return res.data;
|
||||
};
|
||||
|
||||
let { kubernetesHost } = identityKubernetesAuth;
|
||||
const tokenReviewCallbackThroughGateway = async (
|
||||
host: string = identityKubernetesAuth.kubernetesHost,
|
||||
port?: number,
|
||||
httpsAgent?: https.Agent
|
||||
) => {
|
||||
logger.info(
|
||||
{
|
||||
host,
|
||||
port
|
||||
},
|
||||
"tokenReviewCallbackThroughGateway: Processing kubernetes token review using gateway"
|
||||
);
|
||||
|
||||
if (kubernetesHost.startsWith("https://") || kubernetesHost.startsWith("http://")) {
|
||||
kubernetesHost = new RE2("^https?:\\/\\/").replace(kubernetesHost, "");
|
||||
const baseUrl = port ? `${host}:${port}` : host;
|
||||
|
||||
const res = await axios
|
||||
.post<TCreateTokenReviewResponse>(
|
||||
`${baseUrl}/apis/authentication.k8s.io/v1/tokenreviews`,
|
||||
{
|
||||
apiVersion: "authentication.k8s.io/v1",
|
||||
kind: "TokenReview",
|
||||
spec: {
|
||||
token: serviceAccountJwt,
|
||||
...(identityKubernetesAuth.allowedAudience ? { audiences: [identityKubernetesAuth.allowedAudience] } : {})
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken
|
||||
},
|
||||
signal: AbortSignal.timeout(10000),
|
||||
timeout: 10000,
|
||||
...(httpsAgent ? { httpsAgent } : {})
|
||||
}
|
||||
)
|
||||
.catch((err) => {
|
||||
if (err instanceof AxiosError) {
|
||||
if (err.response) {
|
||||
let { message } = err?.response?.data as unknown as { message?: string };
|
||||
|
||||
if (!message && typeof err.response.data === "string") {
|
||||
message = err.response.data;
|
||||
}
|
||||
|
||||
if (message) {
|
||||
throw new UnauthorizedError({
|
||||
message,
|
||||
name: "KubernetesTokenReviewRequestError"
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
|
||||
return res.data;
|
||||
};
|
||||
|
||||
let data: TCreateTokenReviewResponse | undefined;
|
||||
|
||||
if (identityKubernetesAuth.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway) {
|
||||
const { kubernetesHost } = identityKubernetesAuth;
|
||||
const lastColonIndex = kubernetesHost.lastIndexOf(":");
|
||||
const k8sHost = kubernetesHost.substring(0, lastColonIndex);
|
||||
const k8sPort = kubernetesHost.substring(lastColonIndex + 1);
|
||||
|
||||
if (!identityKubernetesAuth.gatewayId) {
|
||||
throw new BadRequestError({
|
||||
message: "Gateway ID is required when token review mode is set to Gateway"
|
||||
});
|
||||
}
|
||||
|
||||
data = await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: identityKubernetesAuth.gatewayId,
|
||||
targetHost: k8sHost, // note(daniel): must include the protocol (https|http)
|
||||
targetPort: k8sPort ? Number(k8sPort) : 443,
|
||||
caCert,
|
||||
reviewTokenThroughGateway: true
|
||||
},
|
||||
tokenReviewCallbackThroughGateway
|
||||
);
|
||||
} else if (identityKubernetesAuth.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Api) {
|
||||
let { kubernetesHost } = identityKubernetesAuth;
|
||||
if (kubernetesHost.startsWith("https://") || kubernetesHost.startsWith("http://")) {
|
||||
kubernetesHost = new RE2("^https?:\\/\\/").replace(kubernetesHost, "");
|
||||
}
|
||||
|
||||
const [k8sHost, k8sPort] = kubernetesHost.split(":");
|
||||
|
||||
data = identityKubernetesAuth.gatewayId
|
||||
? await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: identityKubernetesAuth.gatewayId,
|
||||
targetHost: k8sHost,
|
||||
targetPort: k8sPort ? Number(k8sPort) : 443,
|
||||
reviewTokenThroughGateway: false
|
||||
},
|
||||
tokenReviewCallbackRaw
|
||||
)
|
||||
: await tokenReviewCallbackRaw();
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Invalid token review mode: ${identityKubernetesAuth.tokenReviewMode}`
|
||||
});
|
||||
}
|
||||
|
||||
const [k8sHost, k8sPort] = kubernetesHost.split(":");
|
||||
|
||||
const data = identityKubernetesAuth.gatewayId
|
||||
? await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: identityKubernetesAuth.gatewayId,
|
||||
targetHost: k8sHost,
|
||||
targetPort: k8sPort ? Number(k8sPort) : 443
|
||||
},
|
||||
tokenReviewCallback
|
||||
)
|
||||
: await tokenReviewCallback();
|
||||
if (!data) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to review token"
|
||||
});
|
||||
}
|
||||
|
||||
if ("error" in data.status)
|
||||
throw new UnauthorizedError({ message: data.status.error, name: "KubernetesTokenReviewError" });
|
||||
@@ -298,6 +418,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
kubernetesHost,
|
||||
caCert,
|
||||
tokenReviewerJwt,
|
||||
tokenReviewMode,
|
||||
allowedNamespaces,
|
||||
allowedNames,
|
||||
allowedAudience,
|
||||
@@ -384,6 +505,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
{
|
||||
identityId: identityMembershipOrg.identityId,
|
||||
kubernetesHost,
|
||||
tokenReviewMode,
|
||||
allowedNamespaces,
|
||||
allowedNames,
|
||||
allowedAudience,
|
||||
@@ -410,6 +532,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
kubernetesHost,
|
||||
caCert,
|
||||
tokenReviewerJwt,
|
||||
tokenReviewMode,
|
||||
allowedNamespaces,
|
||||
allowedNames,
|
||||
allowedAudience,
|
||||
@@ -492,6 +615,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
|
||||
const updateQuery: TIdentityKubernetesAuthsUpdate = {
|
||||
kubernetesHost,
|
||||
tokenReviewMode,
|
||||
allowedNamespaces,
|
||||
allowedNames,
|
||||
allowedAudience,
|
||||
|
@@ -5,11 +5,17 @@ export type TLoginKubernetesAuthDTO = {
|
||||
jwt: string;
|
||||
};
|
||||
|
||||
export enum IdentityKubernetesAuthTokenReviewMode {
|
||||
Api = "api",
|
||||
Gateway = "gateway"
|
||||
}
|
||||
|
||||
export type TAttachKubernetesAuthDTO = {
|
||||
identityId: string;
|
||||
kubernetesHost: string;
|
||||
caCert: string;
|
||||
tokenReviewerJwt?: string;
|
||||
tokenReviewMode: IdentityKubernetesAuthTokenReviewMode;
|
||||
allowedNamespaces: string;
|
||||
allowedNames: string;
|
||||
allowedAudience: string;
|
||||
@@ -26,6 +32,7 @@ export type TUpdateKubernetesAuthDTO = {
|
||||
kubernetesHost?: string;
|
||||
caCert?: string;
|
||||
tokenReviewerJwt?: string | null;
|
||||
tokenReviewMode?: IdentityKubernetesAuthTokenReviewMode;
|
||||
allowedNamespaces?: string;
|
||||
allowedNames?: string;
|
||||
allowedAudience?: string;
|
||||
|
@@ -412,7 +412,15 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
]
|
||||
});
|
||||
return members;
|
||||
|
||||
return members.map((el) => ({
|
||||
...el,
|
||||
roles: el.roles.sort((a, b) => {
|
||||
const roleA = (a.customRoleName || a.role).toLowerCase();
|
||||
const roleB = (b.customRoleName || b.role).toLowerCase();
|
||||
return roleA.localeCompare(roleB);
|
||||
})
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByProjectId" });
|
||||
}
|
||||
|
@@ -92,7 +92,8 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
db.ref("temporaryAccessEndTime").withSchema(TableName.ProjectUserMembershipRole),
|
||||
db.ref("name").as("projectName").withSchema(TableName.Project)
|
||||
)
|
||||
.where({ isGhost: false });
|
||||
.where({ isGhost: false })
|
||||
.orderBy(`${TableName.Users}.username` as "username");
|
||||
|
||||
const members = sqlNestRelationships({
|
||||
data: docs,
|
||||
@@ -149,7 +150,14 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
]
|
||||
});
|
||||
return members;
|
||||
return members.map((el) => ({
|
||||
...el,
|
||||
roles: el.roles.sort((a, b) => {
|
||||
const roleA = (a.customRoleName || a.role).toLowerCase();
|
||||
const roleB = (b.customRoleName || b.role).toLowerCase();
|
||||
return roleA.localeCompare(roleB);
|
||||
})
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find all project members" });
|
||||
}
|
||||
|
@@ -22,6 +22,56 @@ export type TProjectDALFactory = ReturnType<typeof projectDALFactory>;
|
||||
export const projectDALFactory = (db: TDbClient) => {
|
||||
const projectOrm = ormify(db, TableName.Project);
|
||||
|
||||
const findIdentityProjects = async (identityId: string, orgId: string, projectType: ProjectType | "all") => {
|
||||
try {
|
||||
const workspaces = await db(TableName.IdentityProjectMembership)
|
||||
.where({ identityId })
|
||||
.join(TableName.Project, `${TableName.IdentityProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.where(`${TableName.Project}.orgId`, orgId)
|
||||
.andWhere((qb) => {
|
||||
if (projectType !== "all") {
|
||||
void qb.where(`${TableName.Project}.type`, projectType);
|
||||
}
|
||||
})
|
||||
.leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
|
||||
.select(
|
||||
selectAllTableCols(TableName.Project),
|
||||
db.ref("id").withSchema(TableName.Project).as("_id"),
|
||||
db.ref("id").withSchema(TableName.Environment).as("envId"),
|
||||
db.ref("slug").withSchema(TableName.Environment).as("envSlug"),
|
||||
db.ref("name").withSchema(TableName.Environment).as("envName")
|
||||
)
|
||||
.orderBy([
|
||||
{ column: `${TableName.Project}.name`, order: "asc" },
|
||||
{ column: `${TableName.Environment}.position`, order: "asc" }
|
||||
]);
|
||||
|
||||
const nestedWorkspaces = sqlNestRelationships({
|
||||
data: workspaces,
|
||||
key: "id",
|
||||
parentMapper: ({ _id, ...el }) => ({ _id, ...ProjectsSchema.parse(el) }),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "envId",
|
||||
label: "environments" as const,
|
||||
mapper: ({ envId: id, envSlug: slug, envName: name }) => ({
|
||||
id,
|
||||
slug,
|
||||
name
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return nestedWorkspaces.map((workspace) => ({
|
||||
...workspace,
|
||||
organization: workspace.orgId
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find identity projects" });
|
||||
}
|
||||
};
|
||||
|
||||
const findUserProjects = async (userId: string, orgId: string, projectType: ProjectType | "all") => {
|
||||
try {
|
||||
const workspaces = await db
|
||||
@@ -443,6 +493,7 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
return {
|
||||
...projectOrm,
|
||||
findUserProjects,
|
||||
findIdentityProjects,
|
||||
setProjectUpgradeStatus,
|
||||
findAllProjectsByIdentity,
|
||||
findProjectGhostUser,
|
||||
|
@@ -30,7 +30,7 @@ import { TSshCertificateDALFactory } from "@app/ee/services/ssh-certificate/ssh-
|
||||
import { TSshCertificateTemplateDALFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-dal";
|
||||
import { TSshHostDALFactory } from "@app/ee/services/ssh-host/ssh-host-dal";
|
||||
import { TSshHostGroupDALFactory } from "@app/ee/services/ssh-host-group/ssh-host-group-dal";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
@@ -259,16 +259,17 @@ export const projectServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
|
||||
|
||||
const plan = await licenseService.getPlan(organization.id);
|
||||
if (plan.workspaceLimit !== null && plan.workspacesUsed >= plan.workspaceLimit) {
|
||||
// case: limit imposed on number of workspaces allowed
|
||||
// case: number of workspaces used exceeds the number of workspaces allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create workspace due to plan limit reached. Upgrade plan to add more workspaces."
|
||||
});
|
||||
}
|
||||
|
||||
const results = await (trx || projectDAL).transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.CreateProject(organization.id)]);
|
||||
|
||||
const plan = await licenseService.getPlan(organization.id);
|
||||
if (plan.workspaceLimit !== null && plan.workspacesUsed >= plan.workspaceLimit) {
|
||||
// case: limit imposed on number of workspaces allowed
|
||||
// case: number of workspaces used exceeds the number of workspaces allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create workspace due to plan limit reached. Upgrade plan to add more workspaces."
|
||||
});
|
||||
}
|
||||
const ghostUser = await orgService.addGhostUser(organization.id, tx);
|
||||
|
||||
if (kmsKeyId) {
|
||||
@@ -573,12 +574,16 @@ export const projectServiceFactory = ({
|
||||
|
||||
const getProjects = async ({
|
||||
actorId,
|
||||
actor,
|
||||
includeRoles,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
type = ProjectType.SecretManager
|
||||
}: TListProjectsDTO) => {
|
||||
const workspaces = await projectDAL.findUserProjects(actorId, actorOrgId, type);
|
||||
const workspaces =
|
||||
actor === ActorType.IDENTITY
|
||||
? await projectDAL.findIdentityProjects(actorId, actorOrgId, type)
|
||||
: await projectDAL.findUserProjects(actorId, actorOrgId, type);
|
||||
|
||||
if (includeRoles) {
|
||||
const { permission } = await permissionService.getUserOrgPermission(
|
||||
|
@@ -127,6 +127,7 @@ export const OnePassSyncFns = {
|
||||
syncSecrets: async (secretSync: TOnePassSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
const {
|
||||
connection,
|
||||
environment,
|
||||
destinationConfig: { vaultId }
|
||||
} = secretSync;
|
||||
|
||||
@@ -164,7 +165,7 @@ export const OnePassSyncFns = {
|
||||
|
||||
for await (const [key, variable] of Object.entries(items)) {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!matchesSchema(key, secretSync.syncOptions.keySchema)) continue;
|
||||
if (!matchesSchema(key, environment?.slug || "", secretSync.syncOptions.keySchema)) continue;
|
||||
|
||||
if (!(key in secretMap)) {
|
||||
try {
|
||||
|
@@ -294,7 +294,7 @@ const deleteParametersBatch = async (
|
||||
|
||||
export const AwsParameterStoreSyncFns = {
|
||||
syncSecrets: async (secretSync: TAwsParameterStoreSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
const { destinationConfig, syncOptions } = secretSync;
|
||||
const { destinationConfig, syncOptions, environment } = secretSync;
|
||||
|
||||
const ssm = await getSSM(secretSync);
|
||||
|
||||
@@ -391,7 +391,7 @@ export const AwsParameterStoreSyncFns = {
|
||||
const [key, parameter] = entry;
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!matchesSchema(key, syncOptions.keySchema)) continue;
|
||||
if (!matchesSchema(key, environment?.slug || "", syncOptions.keySchema)) continue;
|
||||
|
||||
if (!(key in secretMap) || !secretMap[key].value) {
|
||||
parametersToDelete.push(parameter);
|
||||
|
@@ -57,7 +57,11 @@ const sleep = async () =>
|
||||
setTimeout(resolve, 1000);
|
||||
});
|
||||
|
||||
const getSecretsRecord = async (client: SecretsManagerClient, keySchema?: string): Promise<TAwsSecretsRecord> => {
|
||||
const getSecretsRecord = async (
|
||||
client: SecretsManagerClient,
|
||||
environment: string,
|
||||
keySchema?: string
|
||||
): Promise<TAwsSecretsRecord> => {
|
||||
const awsSecretsRecord: TAwsSecretsRecord = {};
|
||||
let hasNext = true;
|
||||
let nextToken: string | undefined;
|
||||
@@ -72,7 +76,7 @@ const getSecretsRecord = async (client: SecretsManagerClient, keySchema?: string
|
||||
|
||||
if (output.SecretList) {
|
||||
output.SecretList.forEach((secretEntry) => {
|
||||
if (secretEntry.Name && matchesSchema(secretEntry.Name, keySchema)) {
|
||||
if (secretEntry.Name && matchesSchema(secretEntry.Name, environment, keySchema)) {
|
||||
awsSecretsRecord[secretEntry.Name] = secretEntry;
|
||||
}
|
||||
});
|
||||
@@ -307,11 +311,11 @@ const processTags = ({
|
||||
|
||||
export const AwsSecretsManagerSyncFns = {
|
||||
syncSecrets: async (secretSync: TAwsSecretsManagerSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
const { destinationConfig, syncOptions } = secretSync;
|
||||
const { destinationConfig, syncOptions, environment } = secretSync;
|
||||
|
||||
const client = await getSecretsManagerClient(secretSync);
|
||||
|
||||
const awsSecretsRecord = await getSecretsRecord(client, syncOptions.keySchema);
|
||||
const awsSecretsRecord = await getSecretsRecord(client, environment?.slug || "", syncOptions.keySchema);
|
||||
|
||||
const awsValuesRecord = await getSecretValuesRecord(client, awsSecretsRecord);
|
||||
|
||||
@@ -401,7 +405,7 @@ export const AwsSecretsManagerSyncFns = {
|
||||
|
||||
for await (const secretKey of Object.keys(awsSecretsRecord)) {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!matchesSchema(secretKey, syncOptions.keySchema)) continue;
|
||||
if (!matchesSchema(secretKey, environment?.slug || "", syncOptions.keySchema)) continue;
|
||||
|
||||
if (!(secretKey in secretMap) || !secretMap[secretKey].value) {
|
||||
try {
|
||||
@@ -468,7 +472,11 @@ export const AwsSecretsManagerSyncFns = {
|
||||
getSecrets: async (secretSync: TAwsSecretsManagerSyncWithCredentials): Promise<TSecretMap> => {
|
||||
const client = await getSecretsManagerClient(secretSync);
|
||||
|
||||
const awsSecretsRecord = await getSecretsRecord(client, secretSync.syncOptions.keySchema);
|
||||
const awsSecretsRecord = await getSecretsRecord(
|
||||
client,
|
||||
secretSync.environment?.slug || "",
|
||||
secretSync.syncOptions.keySchema
|
||||
);
|
||||
const awsValuesRecord = await getSecretValuesRecord(client, awsSecretsRecord);
|
||||
|
||||
const { destinationConfig } = secretSync;
|
||||
@@ -503,11 +511,11 @@ export const AwsSecretsManagerSyncFns = {
|
||||
}
|
||||
},
|
||||
removeSecrets: async (secretSync: TAwsSecretsManagerSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
const { destinationConfig, syncOptions } = secretSync;
|
||||
const { destinationConfig, syncOptions, environment } = secretSync;
|
||||
|
||||
const client = await getSecretsManagerClient(secretSync);
|
||||
|
||||
const awsSecretsRecord = await getSecretsRecord(client, syncOptions.keySchema);
|
||||
const awsSecretsRecord = await getSecretsRecord(client, environment?.slug || "", syncOptions.keySchema);
|
||||
|
||||
if (destinationConfig.mappingBehavior === AwsSecretsManagerSyncMappingBehavior.OneToOne) {
|
||||
for await (const secretKey of Object.keys(awsSecretsRecord)) {
|
||||
|
@@ -141,7 +141,7 @@ export const azureAppConfigurationSyncFactory = ({
|
||||
|
||||
for await (const key of Object.keys(azureAppConfigSecrets)) {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!matchesSchema(key, secretSync.syncOptions.keySchema)) continue;
|
||||
if (!matchesSchema(key, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema)) continue;
|
||||
|
||||
const azureSecret = azureAppConfigSecrets[key];
|
||||
if (
|
||||
|
@@ -194,7 +194,7 @@ export const azureKeyVaultSyncFactory = ({ kmsService, appConnectionDAL }: TAzur
|
||||
|
||||
for await (const deleteSecretKey of deleteSecrets.filter(
|
||||
(secret) =>
|
||||
matchesSchema(secret, secretSync.syncOptions.keySchema) &&
|
||||
matchesSchema(secret, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema) &&
|
||||
!setSecrets.find((setSecret) => setSecret.key === secret)
|
||||
)) {
|
||||
await request.delete(`${secretSync.destinationConfig.vaultBaseUrl}/secrets/${deleteSecretKey}?api-version=7.3`, {
|
||||
|
@@ -118,7 +118,7 @@ export const camundaSyncFactory = ({ kmsService, appConnectionDAL }: TCamundaSec
|
||||
|
||||
for await (const secret of Object.keys(camundaSecrets)) {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!matchesSchema(secret, secretSync.syncOptions.keySchema)) continue;
|
||||
if (!matchesSchema(secret, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema)) continue;
|
||||
|
||||
if (!(secret in secretMap) || !secretMap[secret].value) {
|
||||
try {
|
||||
|
@@ -117,7 +117,7 @@ export const databricksSyncFactory = ({ kmsService, appConnectionDAL }: TDatabri
|
||||
|
||||
for await (const secret of databricksSecretKeys) {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!matchesSchema(secret.key, secretSync.syncOptions.keySchema)) continue;
|
||||
if (!matchesSchema(secret.key, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema)) continue;
|
||||
|
||||
if (!(secret.key in secretMap)) {
|
||||
await deleteDatabricksSecrets({
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user