mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-25 14:07:47 +00:00
Compare commits
54 Commits
misc/updat
...
gateway-ne
Author | SHA1 | Date | |
---|---|---|---|
|
9cc17452fa | ||
|
93ba6f7b58 | ||
|
6fddecdf82 | ||
|
99e2c85f8f | ||
|
6e1504dc73 | ||
|
07d930f608 | ||
|
696bbcb072 | ||
|
6c52847dec | ||
|
caeda09b21 | ||
|
1201baf35c | ||
|
5d5f843a9f | ||
|
caca23b56c | ||
|
01ea22f167 | ||
|
83c53b9d5a | ||
|
8cc457d49a | ||
|
540374f543 | ||
|
4edb90d644 | ||
|
1a7151aba7 | ||
|
80d2d9d2cf | ||
|
4268fdea44 | ||
|
781965767d | ||
|
fef7e43869 | ||
|
9e651a58e3 | ||
|
0fbf8efd3a | ||
|
dcb77bbdd4 | ||
|
36f7e7d81b | ||
|
8f97b3ad87 | ||
|
be80444ec2 | ||
|
6f2043dc26 | ||
|
95fcf560a5 | ||
|
d8ee05bfba | ||
|
274952544f | ||
|
d23beaedf1 | ||
|
817e762e6b | ||
|
ce5712606f | ||
|
ce67e5f137 | ||
|
440c45fd42 | ||
|
893a042c25 | ||
|
f3fb65fcc3 | ||
|
c0add863be | ||
|
98ab969356 | ||
|
d4523b0ca4 | ||
|
2be8c47ae8 | ||
|
8730d14104 | ||
|
d924580599 | ||
|
22f32e060b | ||
|
b4f26aac25 | ||
|
b634a6c371 | ||
|
080ae5ce6f | ||
|
dfcf613023 | ||
|
3ae2ec1f51 | ||
|
ce4e35e908 | ||
|
4773336a04 | ||
|
e6c97510ca |
@@ -107,6 +107,14 @@ INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
||||
|
||||
#github radar app connection
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
|
||||
|
||||
#gcp app connection
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
|
2
backend/src/@types/fastify.d.ts
vendored
2
backend/src/@types/fastify.d.ts
vendored
@@ -37,6 +37,7 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
|
||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { TSecretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
||||
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
|
||||
@@ -271,6 +272,7 @@ declare module "fastify" {
|
||||
microsoftTeams: TMicrosoftTeamsServiceFactory;
|
||||
assumePrivileges: TAssumePrivilegeServiceFactory;
|
||||
githubOrgSync: TGithubOrgSyncServiceFactory;
|
||||
secretScanningV2: TSecretScanningV2ServiceFactory;
|
||||
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
|
||||
pkiTemplate: TPkiTemplatesServiceFactory;
|
||||
};
|
||||
|
40
backend/src/@types/knex.d.ts
vendored
40
backend/src/@types/knex.d.ts
vendored
@@ -336,9 +336,24 @@ import {
|
||||
TSecretRotationV2SecretMappingsInsert,
|
||||
TSecretRotationV2SecretMappingsUpdate,
|
||||
TSecrets,
|
||||
TSecretScanningConfigs,
|
||||
TSecretScanningConfigsInsert,
|
||||
TSecretScanningConfigsUpdate,
|
||||
TSecretScanningDataSources,
|
||||
TSecretScanningDataSourcesInsert,
|
||||
TSecretScanningDataSourcesUpdate,
|
||||
TSecretScanningFindings,
|
||||
TSecretScanningFindingsInsert,
|
||||
TSecretScanningFindingsUpdate,
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate,
|
||||
TSecretScanningResources,
|
||||
TSecretScanningResourcesInsert,
|
||||
TSecretScanningResourcesUpdate,
|
||||
TSecretScanningScans,
|
||||
TSecretScanningScansInsert,
|
||||
TSecretScanningScansUpdate,
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate,
|
||||
@@ -1107,5 +1122,30 @@ declare module "knex/types/tables" {
|
||||
TGithubOrgSyncConfigsInsert,
|
||||
TGithubOrgSyncConfigsUpdate
|
||||
>;
|
||||
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningDataSources,
|
||||
TSecretScanningDataSourcesInsert,
|
||||
TSecretScanningDataSourcesUpdate
|
||||
>;
|
||||
[TableName.SecretScanningResource]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningResources,
|
||||
TSecretScanningResourcesInsert,
|
||||
TSecretScanningResourcesUpdate
|
||||
>;
|
||||
[TableName.SecretScanningScan]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningScans,
|
||||
TSecretScanningScansInsert,
|
||||
TSecretScanningScansUpdate
|
||||
>;
|
||||
[TableName.SecretScanningFinding]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningFindings,
|
||||
TSecretScanningFindingsInsert,
|
||||
TSecretScanningFindingsUpdate
|
||||
>;
|
||||
[TableName.SecretScanningConfig]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningConfigs,
|
||||
TSecretScanningConfigsInsert,
|
||||
TSecretScanningConfigsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
107
backend/src/db/migrations/20250517002225_secret-scanning-v2.ts
Normal file
107
backend/src/db/migrations/20250517002225_secret-scanning-v2.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||
import {
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningDataSource))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningDataSource, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("externalId").index(); // if we need a unique way of identifying this data source from an external resource
|
||||
t.string("name", 48).notNullable();
|
||||
t.string("description");
|
||||
t.string("type").notNullable();
|
||||
t.jsonb("config").notNullable();
|
||||
t.binary("encryptedCredentials"); // webhook credentials, etc.
|
||||
t.uuid("connectionId");
|
||||
t.boolean("isAutoScanEnabled").defaultTo(true);
|
||||
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
t.boolean("isDisconnected").notNullable().defaultTo(false);
|
||||
t.unique(["projectId", "name"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningResource))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningResource, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("externalId").notNullable();
|
||||
t.string("name").notNullable();
|
||||
t.string("type").notNullable();
|
||||
t.uuid("dataSourceId").notNullable();
|
||||
t.foreign("dataSourceId").references("id").inTable(TableName.SecretScanningDataSource).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
t.unique(["dataSourceId", "externalId"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningResource);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningScan))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningScan, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("status").notNullable().defaultTo(SecretScanningScanStatus.Queued);
|
||||
t.string("statusMessage", 1024);
|
||||
t.string("type").notNullable();
|
||||
t.uuid("resourceId").notNullable();
|
||||
t.foreign("resourceId").references("id").inTable(TableName.SecretScanningResource).onDelete("CASCADE");
|
||||
t.timestamp("createdAt").defaultTo(knex.fn.now());
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningFinding))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningFinding, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("dataSourceName").notNullable();
|
||||
t.string("dataSourceType").notNullable();
|
||||
t.string("resourceName").notNullable();
|
||||
t.string("resourceType").notNullable();
|
||||
t.string("rule").notNullable();
|
||||
t.string("severity").notNullable();
|
||||
t.string("status").notNullable().defaultTo(SecretScanningFindingStatus.Unresolved);
|
||||
t.string("remarks");
|
||||
t.string("fingerprint").notNullable();
|
||||
t.jsonb("details").notNullable();
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("scanId");
|
||||
t.foreign("scanId").references("id").inTable(TableName.SecretScanningScan).onDelete("SET NULL");
|
||||
t.timestamps(true, true, true);
|
||||
t.unique(["projectId", "fingerprint"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningFinding);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningConfig))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("projectId").notNullable().unique();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.string("content", 5000);
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningConfig);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningFinding);
|
||||
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningFinding);
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningScan);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningResource);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningResource);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningDataSource);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningConfig);
|
||||
}
|
@@ -111,7 +111,12 @@ export * from "./secret-rotation-outputs";
|
||||
export * from "./secret-rotation-v2-secret-mappings";
|
||||
export * from "./secret-rotations";
|
||||
export * from "./secret-rotations-v2";
|
||||
export * from "./secret-scanning-configs";
|
||||
export * from "./secret-scanning-data-sources";
|
||||
export * from "./secret-scanning-findings";
|
||||
export * from "./secret-scanning-git-risks";
|
||||
export * from "./secret-scanning-resources";
|
||||
export * from "./secret-scanning-scans";
|
||||
export * from "./secret-sharing";
|
||||
export * from "./secret-snapshot-folders";
|
||||
export * from "./secret-snapshot-secrets";
|
||||
|
@@ -159,7 +159,12 @@ export enum TableName {
|
||||
MicrosoftTeamsIntegrations = "microsoft_teams_integrations",
|
||||
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
|
||||
SecretReminderRecipients = "secret_reminder_recipients",
|
||||
GithubOrgSyncConfig = "github_org_sync_configs"
|
||||
GithubOrgSyncConfig = "github_org_sync_configs",
|
||||
SecretScanningDataSource = "secret_scanning_data_sources",
|
||||
SecretScanningResource = "secret_scanning_resources",
|
||||
SecretScanningScan = "secret_scanning_scans",
|
||||
SecretScanningFinding = "secret_scanning_findings",
|
||||
SecretScanningConfig = "secret_scanning_configs"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
@@ -248,7 +253,8 @@ export enum ProjectType {
|
||||
SecretManager = "secret-manager",
|
||||
CertificateManager = "cert-manager",
|
||||
KMS = "kms",
|
||||
SSH = "ssh"
|
||||
SSH = "ssh",
|
||||
SecretScanning = "secret-scanning"
|
||||
}
|
||||
|
||||
export enum ActionProjectType {
|
||||
@@ -256,6 +262,7 @@ export enum ActionProjectType {
|
||||
CertificateManager = ProjectType.CertificateManager,
|
||||
KMS = ProjectType.KMS,
|
||||
SSH = ProjectType.SSH,
|
||||
SecretScanning = ProjectType.SecretScanning,
|
||||
// project operations that happen on all types
|
||||
Any = "any"
|
||||
}
|
||||
|
20
backend/src/db/schemas/secret-scanning-configs.ts
Normal file
20
backend/src/db/schemas/secret-scanning-configs.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
projectId: z.string(),
|
||||
content: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretScanningConfigs = z.infer<typeof SecretScanningConfigsSchema>;
|
||||
export type TSecretScanningConfigsInsert = Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningConfigsUpdate = Partial<Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>>;
|
32
backend/src/db/schemas/secret-scanning-data-sources.ts
Normal file
32
backend/src/db/schemas/secret-scanning-data-sources.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningDataSourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
externalId: z.string().nullable().optional(),
|
||||
name: z.string(),
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
config: z.unknown(),
|
||||
encryptedCredentials: zodBuffer.nullable().optional(),
|
||||
connectionId: z.string().uuid().nullable().optional(),
|
||||
isAutoScanEnabled: z.boolean().default(true).nullable().optional(),
|
||||
projectId: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isDisconnected: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TSecretScanningDataSources = z.infer<typeof SecretScanningDataSourcesSchema>;
|
||||
export type TSecretScanningDataSourcesInsert = Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningDataSourcesUpdate = Partial<
|
||||
Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
32
backend/src/db/schemas/secret-scanning-findings.ts
Normal file
32
backend/src/db/schemas/secret-scanning-findings.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningFindingsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
dataSourceName: z.string(),
|
||||
dataSourceType: z.string(),
|
||||
resourceName: z.string(),
|
||||
resourceType: z.string(),
|
||||
rule: z.string(),
|
||||
severity: z.string(),
|
||||
status: z.string().default("unresolved"),
|
||||
remarks: z.string().nullable().optional(),
|
||||
fingerprint: z.string(),
|
||||
details: z.unknown(),
|
||||
projectId: z.string(),
|
||||
scanId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretScanningFindings = z.infer<typeof SecretScanningFindingsSchema>;
|
||||
export type TSecretScanningFindingsInsert = Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningFindingsUpdate = Partial<
|
||||
Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>
|
||||
>;
|
24
backend/src/db/schemas/secret-scanning-resources.ts
Normal file
24
backend/src/db/schemas/secret-scanning-resources.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningResourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
externalId: z.string(),
|
||||
name: z.string(),
|
||||
type: z.string(),
|
||||
dataSourceId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretScanningResources = z.infer<typeof SecretScanningResourcesSchema>;
|
||||
export type TSecretScanningResourcesInsert = Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningResourcesUpdate = Partial<
|
||||
Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
21
backend/src/db/schemas/secret-scanning-scans.ts
Normal file
21
backend/src/db/schemas/secret-scanning-scans.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningScansSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
status: z.string().default("queued"),
|
||||
statusMessage: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
resourceId: z.string().uuid(),
|
||||
createdAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretScanningScans = z.infer<typeof SecretScanningScansSchema>;
|
||||
export type TSecretScanningScansInsert = Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningScansUpdate = Partial<Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>>;
|
@@ -2,6 +2,10 @@ import {
|
||||
registerSecretRotationV2Router,
|
||||
SECRET_ROTATION_REGISTER_ROUTER_MAP
|
||||
} from "@app/ee/routes/v2/secret-rotation-v2-routers";
|
||||
import {
|
||||
registerSecretScanningV2Router,
|
||||
SECRET_SCANNING_REGISTER_ROUTER_MAP
|
||||
} from "@app/ee/routes/v2/secret-scanning-v2-routers";
|
||||
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
@@ -31,4 +35,17 @@ export const registerV2EERoutes = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
{ prefix: "/secret-rotations" }
|
||||
);
|
||||
|
||||
await server.register(
|
||||
async (secretScanningV2Router) => {
|
||||
// register generic secret scanning endpoints
|
||||
await secretScanningV2Router.register(registerSecretScanningV2Router);
|
||||
|
||||
// register service-specific secret scanning endpoints (gitlab/github, etc.)
|
||||
for await (const [type, router] of Object.entries(SECRET_SCANNING_REGISTER_ROUTER_MAP)) {
|
||||
await secretScanningV2Router.register(router, { prefix: `data-sources/${type}` });
|
||||
}
|
||||
},
|
||||
{ prefix: "/secret-scanning" }
|
||||
);
|
||||
};
|
||||
|
@@ -0,0 +1,16 @@
|
||||
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
|
||||
import {
|
||||
CreateGitHubDataSourceSchema,
|
||||
GitHubDataSourceSchema,
|
||||
UpdateGitHubDataSourceSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export const registerGitHubSecretScanningRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretScanningEndpoints({
|
||||
type: SecretScanningDataSource.GitHub,
|
||||
server,
|
||||
responseSchema: GitHubDataSourceSchema,
|
||||
createSchema: CreateGitHubDataSourceSchema,
|
||||
updateSchema: UpdateGitHubDataSourceSchema
|
||||
});
|
12
backend/src/ee/routes/v2/secret-scanning-v2-routers/index.ts
Normal file
12
backend/src/ee/routes/v2/secret-scanning-v2-routers/index.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
|
||||
|
||||
export * from "./secret-scanning-v2-router";
|
||||
|
||||
export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
|
||||
SecretScanningDataSource,
|
||||
(server: FastifyZodProvider) => Promise<void>
|
||||
> = {
|
||||
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter
|
||||
};
|
@@ -0,0 +1,593 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningResourcesSchema, SecretScanningScansSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { SECRET_SCANNING_DATA_SOURCE_NAME_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
|
||||
import {
|
||||
TSecretScanningDataSource,
|
||||
TSecretScanningDataSourceInput
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { ApiDocsTags, SecretScanningDataSources } from "@app/lib/api-docs";
|
||||
import { startsWithVowel } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerSecretScanningEndpoints = <
|
||||
T extends TSecretScanningDataSource,
|
||||
I extends TSecretScanningDataSourceInput
|
||||
>({
|
||||
server,
|
||||
type,
|
||||
createSchema,
|
||||
updateSchema,
|
||||
responseSchema
|
||||
}: {
|
||||
type: SecretScanningDataSource;
|
||||
server: FastifyZodProvider;
|
||||
createSchema: z.ZodType<{
|
||||
name: string;
|
||||
projectId: string;
|
||||
connectionId?: string;
|
||||
config: Partial<I["config"]>;
|
||||
description?: string | null;
|
||||
isAutoScanEnabled?: boolean;
|
||||
}>;
|
||||
updateSchema: z.ZodType<{
|
||||
name?: string;
|
||||
config?: Partial<I["config"]>;
|
||||
description?: string | null;
|
||||
isAutoScanEnabled?: boolean;
|
||||
}>;
|
||||
responseSchema: z.ZodTypeAny;
|
||||
}) => {
|
||||
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `List the ${sourceType} Data Sources for the specified project.`,
|
||||
querystring: z.object({
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretScanningDataSources.LIST(type).projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSources: responseSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId }
|
||||
} = req;
|
||||
|
||||
const dataSources = (await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
|
||||
{ projectId, type },
|
||||
req.permission
|
||||
)) as T[];
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
|
||||
metadata: {
|
||||
type,
|
||||
count: dataSources.length,
|
||||
dataSourceIds: dataSources.map((source) => source.id)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Get the specified ${sourceType} Data Source by ID.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.GET_BY_ID(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceById(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/data-source-name/:dataSourceName`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Get the specified ${sourceType} Data Source by name and project ID.`,
|
||||
params: z.object({
|
||||
sourceName: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Data Source name required")
|
||||
.describe(SecretScanningDataSources.GET_BY_NAME(type).sourceName)
|
||||
}),
|
||||
querystring: z.object({
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretScanningDataSources.GET_BY_NAME(type).projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { sourceName } = req.params;
|
||||
const { projectId } = req.query;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceByName(
|
||||
{ sourceName, projectId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
type
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Create ${
|
||||
startsWithVowel(sourceType) ? "an" : "a"
|
||||
} ${sourceType} Data Source for the specified project.`,
|
||||
body: createSchema,
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const dataSource = (await server.services.secretScanningV2.createSecretScanningDataSource(
|
||||
{ ...req.body, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
type,
|
||||
...req.body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:dataSourceId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Update the specified ${sourceType} Data Source.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.UPDATE(type).dataSourceId)
|
||||
}),
|
||||
body: updateSchema,
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.updateSecretScanningDataSource(
|
||||
{ ...req.body, dataSourceId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
...req.body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: `/:dataSourceId`,
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Delete the specified ${sourceType} Data Source.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.DELETE(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.deleteSecretScanningDataSource(
|
||||
{ type, dataSourceId },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE,
|
||||
metadata: {
|
||||
type,
|
||||
dataSourceId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: `/:dataSourceId/scan`,
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Trigger a scan for the specified ${sourceType} Data Source.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
|
||||
{ type, dataSourceId },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
|
||||
metadata: {
|
||||
type,
|
||||
dataSourceId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: `/:dataSourceId/resources/:resourceId/scan`,
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Trigger a scan for the specified ${sourceType} Data Source resource.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId),
|
||||
resourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).resourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSource: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId, resourceId } = req.params;
|
||||
|
||||
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
|
||||
{ type, dataSourceId, resourceId },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: dataSource.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
|
||||
metadata: {
|
||||
type,
|
||||
dataSourceId,
|
||||
resourceId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSource };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId/resources",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Get the resources associated with the specified ${sourceType} Data Source by ID.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_RESOURCES(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ resources: SecretScanningResourcesSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const { resources, projectId } = await server.services.secretScanningV2.listSecretScanningResourcesByDataSourceId(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
resourceIds: resources.map((resource) => resource.id),
|
||||
count: resources.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { resources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId/scans",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: `Get the scans associated with the specified ${sourceType} Data Source by ID.`,
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_SCANS(type).dataSourceId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ scans: SecretScanningScansSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const { scans, projectId } = await server.services.secretScanningV2.listSecretScanningScansByDataSourceId(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_SCAN_LIST,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
count: scans.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { scans };
|
||||
}
|
||||
});
|
||||
|
||||
// not exposed, for UI only
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId/resources-dashboard",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
resources: SecretScanningResourcesSchema.extend({
|
||||
lastScannedAt: z.date().nullish(),
|
||||
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
|
||||
lastScanStatusMessage: z.string().nullish(),
|
||||
unresolvedFindings: z.number()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const { resources, projectId } =
|
||||
await server.services.secretScanningV2.listSecretScanningResourcesWithDetailsByDataSourceId(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
resourceIds: resources.map((resource) => resource.id),
|
||||
count: resources.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { resources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:dataSourceId/scans-dashboard",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
params: z.object({
|
||||
dataSourceId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
scans: SecretScanningScansSchema.extend({
|
||||
unresolvedFindings: z.number(),
|
||||
resolvedFindings: z.number(),
|
||||
resourceName: z.string()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { dataSourceId } = req.params;
|
||||
|
||||
const { scans, projectId } =
|
||||
await server.services.secretScanningV2.listSecretScanningScansWithDetailsByDataSourceId(
|
||||
{ dataSourceId, type },
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_SCAN_LIST,
|
||||
metadata: {
|
||||
dataSourceId,
|
||||
type,
|
||||
count: scans.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { scans };
|
||||
}
|
||||
});
|
||||
};
|
@@ -0,0 +1,366 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningConfigsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import {
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
SecretScanningDataSourceSchema,
|
||||
SecretScanningFindingSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-union-schemas";
|
||||
import {
|
||||
ApiDocsTags,
|
||||
SecretScanningConfigs,
|
||||
SecretScanningDataSources,
|
||||
SecretScanningFindings
|
||||
} from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]);
|
||||
|
||||
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/data-sources/options",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "List the available Secret Scanning Data Source Options.",
|
||||
response: {
|
||||
200: z.object({
|
||||
dataSourceOptions: SecretScanningDataSourceOptionsSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: () => {
|
||||
const dataSourceOptions = server.services.secretScanningV2.listSecretScanningDataSourceOptions();
|
||||
return { dataSourceOptions };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/data-sources",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "List all the Secret Scanning Data Sources for the specified project.",
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningDataSources.LIST().projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ dataSources: SecretScanningDataSourceSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
|
||||
{ projectId },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
|
||||
metadata: {
|
||||
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
|
||||
count: dataSources.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/findings",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "List all the Secret Scanning Findings for the specified project.",
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ findings: SecretScanningFindingSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const findings = await server.services.secretScanningV2.listSecretScanningFindingsByProjectId(
|
||||
projectId,
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_FINDING_LIST,
|
||||
metadata: {
|
||||
findingIds: findings.map((finding) => finding.id),
|
||||
count: findings.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { findings };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/findings/:findingId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "Update the specified Secret Scanning Finding.",
|
||||
params: z.object({
|
||||
findingId: z.string().trim().min(1, "Finding ID required").describe(SecretScanningFindings.UPDATE.findingId)
|
||||
}),
|
||||
body: z.object({
|
||||
status: z.nativeEnum(SecretScanningFindingStatus).optional().describe(SecretScanningFindings.UPDATE.status),
|
||||
remarks: z.string().nullish().describe(SecretScanningFindings.UPDATE.remarks)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ finding: SecretScanningFindingSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
params: { findingId },
|
||||
body,
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const { finding, projectId } = await server.services.secretScanningV2.updateSecretScanningFindingById(
|
||||
{ findingId, ...body },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_FINDING_UPDATE,
|
||||
metadata: {
|
||||
findingId,
|
||||
...body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { finding };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/configs",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "Get the Secret Scanning Config for the specified project.",
|
||||
querystring: z.object({
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretScanningConfigs.GET_BY_PROJECT_ID.projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
config: z.object({ content: z.string().nullish(), projectId: z.string(), updatedAt: z.date().nullish() })
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const config = await server.services.secretScanningV2.findSecretScanningConfigByProjectId(projectId, permission);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_CONFIG_GET
|
||||
}
|
||||
});
|
||||
|
||||
return { config };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/configs",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
description: "Update the specified Secret Scanning Configuration.",
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningConfigs.UPDATE.projectId)
|
||||
}),
|
||||
body: z.object({
|
||||
content: z.string().nullable().describe(SecretScanningConfigs.UPDATE.content)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ config: SecretScanningConfigsSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
body,
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const config = await server.services.secretScanningV2.upsertSecretScanningConfig(
|
||||
{ projectId, ...body },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_CONFIG_UPDATE,
|
||||
metadata: body
|
||||
}
|
||||
});
|
||||
|
||||
return { config };
|
||||
}
|
||||
});
|
||||
|
||||
// not exposed, for UI only
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/data-sources-dashboard",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
dataSources: z
|
||||
.intersection(
|
||||
SecretScanningDataSourceSchema,
|
||||
z.object({
|
||||
lastScannedAt: z.date().nullish(),
|
||||
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
|
||||
lastScanStatusMessage: z.string().nullish(),
|
||||
unresolvedFindings: z.number().nullish()
|
||||
})
|
||||
)
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesWithDetailsByProjectId(
|
||||
{ projectId },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
|
||||
metadata: {
|
||||
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
|
||||
count: dataSources.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { dataSources };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/unresolved-findings-count",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
tags: [ApiDocsTags.SecretScanning],
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ unresolvedFindings: z.number() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const unresolvedFindings =
|
||||
await server.services.secretScanningV2.getSecretScanningUnresolvedFindingsCountByProjectId(
|
||||
projectId,
|
||||
permission
|
||||
);
|
||||
|
||||
return { unresolvedFindings };
|
||||
}
|
||||
});
|
||||
};
|
@@ -10,6 +10,18 @@ import {
|
||||
TSecretRotationV2Raw,
|
||||
TUpdateSecretRotationV2DTO
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningScanStatus,
|
||||
SecretScanningScanType
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
TCreateSecretScanningDataSourceDTO,
|
||||
TDeleteSecretScanningDataSourceDTO,
|
||||
TTriggerSecretScanningDataSourceDTO,
|
||||
TUpdateSecretScanningDataSourceDTO,
|
||||
TUpdateSecretScanningFindingDTO
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
|
||||
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
|
||||
@@ -381,6 +393,20 @@ export enum EventType {
|
||||
PROJECT_ASSUME_PRIVILEGE_SESSION_START = "project-assume-privileges-session-start",
|
||||
PROJECT_ASSUME_PRIVILEGE_SESSION_END = "project-assume-privileges-session-end",
|
||||
|
||||
SECRET_SCANNING_DATA_SOURCE_LIST = "secret-scanning-data-source-list",
|
||||
SECRET_SCANNING_DATA_SOURCE_CREATE = "secret-scanning-data-source-create",
|
||||
SECRET_SCANNING_DATA_SOURCE_UPDATE = "secret-scanning-data-source-update",
|
||||
SECRET_SCANNING_DATA_SOURCE_DELETE = "secret-scanning-data-source-delete",
|
||||
SECRET_SCANNING_DATA_SOURCE_GET = "secret-scanning-data-source-get",
|
||||
SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN = "secret-scanning-data-source-trigger-scan",
|
||||
SECRET_SCANNING_DATA_SOURCE_SCAN = "secret-scanning-data-source-scan",
|
||||
SECRET_SCANNING_RESOURCE_LIST = "secret-scanning-resource-list",
|
||||
SECRET_SCANNING_SCAN_LIST = "secret-scanning-scan-list",
|
||||
SECRET_SCANNING_FINDING_LIST = "secret-scanning-finding-list",
|
||||
SECRET_SCANNING_FINDING_UPDATE = "secret-scanning-finding-update",
|
||||
SECRET_SCANNING_CONFIG_GET = "secret-scanning-config-get",
|
||||
SECRET_SCANNING_CONFIG_UPDATE = "secret-scanning-config-update",
|
||||
|
||||
UPDATE_ORG = "update-org",
|
||||
|
||||
CREATE_PROJECT = "create-project",
|
||||
@@ -2953,6 +2979,101 @@ interface MicrosoftTeamsWorkflowIntegrationUpdateEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceListEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST;
|
||||
metadata: {
|
||||
type?: SecretScanningDataSource;
|
||||
count: number;
|
||||
dataSourceIds: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceGetEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET;
|
||||
metadata: {
|
||||
type: SecretScanningDataSource;
|
||||
dataSourceId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceCreateEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE;
|
||||
metadata: Omit<TCreateSecretScanningDataSourceDTO, "projectId"> & { dataSourceId: string };
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceUpdateEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE;
|
||||
metadata: TUpdateSecretScanningDataSourceDTO;
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceDeleteEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE;
|
||||
metadata: TDeleteSecretScanningDataSourceDTO;
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceTriggerScanEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN;
|
||||
metadata: TTriggerSecretScanningDataSourceDTO;
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceScanEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN;
|
||||
metadata: {
|
||||
scanId: string;
|
||||
resourceId: string;
|
||||
resourceType: string;
|
||||
dataSourceId: string;
|
||||
dataSourceType: string;
|
||||
scanStatus: SecretScanningScanStatus;
|
||||
scanType: SecretScanningScanType;
|
||||
numberOfSecretsDetected?: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningResourceListEvent {
|
||||
type: EventType.SECRET_SCANNING_RESOURCE_LIST;
|
||||
metadata: {
|
||||
type: SecretScanningDataSource;
|
||||
dataSourceId: string;
|
||||
resourceIds: string[];
|
||||
count: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningScanListEvent {
|
||||
type: EventType.SECRET_SCANNING_SCAN_LIST;
|
||||
metadata: {
|
||||
type: SecretScanningDataSource;
|
||||
dataSourceId: string;
|
||||
count: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningFindingListEvent {
|
||||
type: EventType.SECRET_SCANNING_FINDING_LIST;
|
||||
metadata: {
|
||||
findingIds: string[];
|
||||
count: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningFindingUpdateEvent {
|
||||
type: EventType.SECRET_SCANNING_FINDING_UPDATE;
|
||||
metadata: TUpdateSecretScanningFindingDTO;
|
||||
}
|
||||
|
||||
interface SecretScanningConfigUpdateEvent {
|
||||
type: EventType.SECRET_SCANNING_CONFIG_UPDATE;
|
||||
metadata: {
|
||||
content: string | null;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningConfigReadEvent {
|
||||
type: EventType.SECRET_SCANNING_CONFIG_GET;
|
||||
metadata?: Record<string, never>; // not needed, based off projectId
|
||||
}
|
||||
|
||||
interface OrgUpdateEvent {
|
||||
type: EventType.UPDATE_ORG;
|
||||
metadata: {
|
||||
@@ -3276,6 +3397,19 @@ export type Event =
|
||||
| MicrosoftTeamsWorkflowIntegrationGetEvent
|
||||
| MicrosoftTeamsWorkflowIntegrationListEvent
|
||||
| MicrosoftTeamsWorkflowIntegrationUpdateEvent
|
||||
| SecretScanningDataSourceListEvent
|
||||
| SecretScanningDataSourceGetEvent
|
||||
| SecretScanningDataSourceCreateEvent
|
||||
| SecretScanningDataSourceUpdateEvent
|
||||
| SecretScanningDataSourceDeleteEvent
|
||||
| SecretScanningDataSourceTriggerScanEvent
|
||||
| SecretScanningDataSourceScanEvent
|
||||
| SecretScanningResourceListEvent
|
||||
| SecretScanningScanListEvent
|
||||
| SecretScanningFindingListEvent
|
||||
| SecretScanningFindingUpdateEvent
|
||||
| SecretScanningConfigUpdateEvent
|
||||
| SecretScanningConfigReadEvent
|
||||
| OrgUpdateEvent
|
||||
| ProjectCreateEvent
|
||||
| ProjectUpdateEvent
|
||||
|
@@ -17,6 +17,7 @@ import { SapAseProvider } from "./sap-ase";
|
||||
import { SapHanaProvider } from "./sap-hana";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
import { TotpProvider } from "./totp";
|
||||
import { VerticaProvider } from "./vertica";
|
||||
|
||||
type TBuildDynamicSecretProviderDTO = {
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
@@ -40,5 +41,6 @@ export const buildDynamicSecretProviders = ({
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
|
||||
[DynamicSecretProviders.Totp]: TotpProvider(),
|
||||
[DynamicSecretProviders.SapAse]: SapAseProvider(),
|
||||
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService })
|
||||
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService }),
|
||||
[DynamicSecretProviders.Vertica]: VerticaProvider({ gatewayService })
|
||||
});
|
||||
|
@@ -16,7 +16,8 @@ export enum SqlProviders {
|
||||
MySQL = "mysql2",
|
||||
Oracle = "oracledb",
|
||||
MsSQL = "mssql",
|
||||
SapAse = "sap-ase"
|
||||
SapAse = "sap-ase",
|
||||
Vertica = "vertica"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
@@ -293,6 +294,39 @@ export const DynamicSecretKubernetesSchema = z.object({
|
||||
audiences: z.array(z.string().trim().min(1))
|
||||
});
|
||||
|
||||
export const DynamicSecretVerticaSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
database: z.string().trim(),
|
||||
gatewayId: z.string().nullable().optional(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
passwordRequirements: z
|
||||
.object({
|
||||
length: z.number().min(1).max(250),
|
||||
required: z
|
||||
.object({
|
||||
lowercase: z.number().min(0),
|
||||
uppercase: z.number().min(0),
|
||||
digits: z.number().min(0),
|
||||
symbols: z.number().min(0)
|
||||
})
|
||||
.refine((data) => {
|
||||
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
|
||||
return total <= 250;
|
||||
}, "Sum of required characters cannot exceed 250"),
|
||||
allowedSymbols: z.string().optional()
|
||||
})
|
||||
.refine((data) => {
|
||||
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
|
||||
return total <= data.length;
|
||||
}, "Sum of required characters cannot exceed the total length")
|
||||
.optional()
|
||||
.describe("Password generation requirements")
|
||||
});
|
||||
|
||||
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.URL),
|
||||
@@ -337,7 +371,8 @@ export enum DynamicSecretProviders {
|
||||
Snowflake = "snowflake",
|
||||
Totp = "totp",
|
||||
SapAse = "sap-ase",
|
||||
Kubernetes = "kubernetes"
|
||||
Kubernetes = "kubernetes",
|
||||
Vertica = "vertica"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
@@ -356,7 +391,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Vertica), inputs: DynamicSecretVerticaSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
367
backend/src/ee/services/dynamic-secret/providers/vertica.ts
Normal file
367
backend/src/ee/services/dynamic-secret/providers/vertica.ts
Normal file
@@ -0,0 +1,367 @@
|
||||
import { randomInt } from "crypto";
|
||||
import handlebars from "handlebars";
|
||||
import knex, { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretVerticaSchema, PasswordRequirements, TDynamicProviderFns } from "./models";
|
||||
|
||||
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
||||
|
||||
interface VersionResult {
|
||||
version: string;
|
||||
}
|
||||
|
||||
interface SessionResult {
|
||||
session_id?: string;
|
||||
}
|
||||
|
||||
interface DatabaseQueryResult {
|
||||
rows?: Array<Record<string, unknown>>;
|
||||
}
|
||||
|
||||
// Extended Knex client interface to handle Vertica-specific overrides
|
||||
interface VerticaKnexClient extends Knex {
|
||||
client: {
|
||||
parseVersion?: () => string;
|
||||
};
|
||||
}
|
||||
|
||||
const DEFAULT_PASSWORD_REQUIREMENTS = {
|
||||
length: 48,
|
||||
required: {
|
||||
lowercase: 1,
|
||||
uppercase: 1,
|
||||
digits: 1,
|
||||
symbols: 0
|
||||
},
|
||||
allowedSymbols: "-_.~!*"
|
||||
};
|
||||
|
||||
const generatePassword = (requirements?: PasswordRequirements) => {
|
||||
const finalReqs = requirements || DEFAULT_PASSWORD_REQUIREMENTS;
|
||||
|
||||
try {
|
||||
const { length, required, allowedSymbols } = finalReqs;
|
||||
|
||||
const chars = {
|
||||
lowercase: "abcdefghijklmnopqrstuvwxyz",
|
||||
uppercase: "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
|
||||
digits: "0123456789",
|
||||
symbols: allowedSymbols || "-_.~!*"
|
||||
};
|
||||
|
||||
const parts: string[] = [];
|
||||
|
||||
if (required.lowercase > 0) {
|
||||
parts.push(
|
||||
...Array(required.lowercase)
|
||||
.fill(0)
|
||||
.map(() => chars.lowercase[randomInt(chars.lowercase.length)])
|
||||
);
|
||||
}
|
||||
|
||||
if (required.uppercase > 0) {
|
||||
parts.push(
|
||||
...Array(required.uppercase)
|
||||
.fill(0)
|
||||
.map(() => chars.uppercase[randomInt(chars.uppercase.length)])
|
||||
);
|
||||
}
|
||||
|
||||
if (required.digits > 0) {
|
||||
parts.push(
|
||||
...Array(required.digits)
|
||||
.fill(0)
|
||||
.map(() => chars.digits[randomInt(chars.digits.length)])
|
||||
);
|
||||
}
|
||||
|
||||
if (required.symbols > 0) {
|
||||
parts.push(
|
||||
...Array(required.symbols)
|
||||
.fill(0)
|
||||
.map(() => chars.symbols[randomInt(chars.symbols.length)])
|
||||
);
|
||||
}
|
||||
|
||||
const requiredTotal = Object.values(required).reduce<number>((a, b) => a + b, 0);
|
||||
const remainingLength = Math.max(length - requiredTotal, 0);
|
||||
|
||||
const allowedChars = Object.entries(chars)
|
||||
.filter(([key]) => required[key as keyof typeof required] > 0)
|
||||
.map(([, value]) => value)
|
||||
.join("");
|
||||
|
||||
parts.push(
|
||||
...Array(remainingLength)
|
||||
.fill(0)
|
||||
.map(() => allowedChars[randomInt(allowedChars.length)])
|
||||
);
|
||||
|
||||
// shuffle the array to mix up the characters
|
||||
for (let i = parts.length - 1; i > 0; i -= 1) {
|
||||
const j = randomInt(i + 1);
|
||||
[parts[i], parts[j]] = [parts[j], parts[i]];
|
||||
}
|
||||
|
||||
return parts.join("");
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
throw new Error(`Failed to generate password: ${message}`);
|
||||
}
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const randomUsername = `inf_${alphaNumericNanoId(25)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
});
|
||||
};
|
||||
|
||||
type TVerticaProviderDTO = {
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
};
|
||||
|
||||
export const VerticaProvider = ({ gatewayService }: TVerticaProviderDTO): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretVerticaSchema.parseAsync(inputs);
|
||||
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.gatewayId));
|
||||
validateHandlebarTemplate("Vertica creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password"].includes(val)
|
||||
});
|
||||
if (providerInputs.revocationStatement) {
|
||||
validateHandlebarTemplate("Vertica revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
}
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretVerticaSchema> & { hostIp: string }) => {
|
||||
const config = {
|
||||
client: "pg",
|
||||
connection: {
|
||||
host: providerInputs.hostIp,
|
||||
port: providerInputs.port,
|
||||
database: providerInputs.database,
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
ssl: false
|
||||
},
|
||||
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT,
|
||||
pool: {
|
||||
min: 0,
|
||||
max: 1,
|
||||
acquireTimeoutMillis: 30000,
|
||||
createTimeoutMillis: 30000,
|
||||
destroyTimeoutMillis: 5000,
|
||||
idleTimeoutMillis: 30000,
|
||||
reapIntervalMillis: 1000,
|
||||
createRetryIntervalMillis: 100
|
||||
},
|
||||
// Disable version checking for Vertica compatibility
|
||||
version: "9.6.0" // Fake a compatible PostgreSQL version
|
||||
};
|
||||
|
||||
const client = knex(config) as VerticaKnexClient;
|
||||
|
||||
// Override the version parsing to prevent errors with Vertica
|
||||
if (client.client && typeof client.client.parseVersion !== "undefined") {
|
||||
client.client.parseVersion = () => "9.6.0";
|
||||
}
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const gatewayProxyWrapper = async (
|
||||
providerInputs: z.infer<typeof DynamicSecretVerticaSchema>,
|
||||
gatewayCallback: (host: string, port: number) => Promise<void>
|
||||
) => {
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(providerInputs.gatewayId as string);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
await withGatewayProxy(
|
||||
async (port) => {
|
||||
await gatewayCallback("localhost", port);
|
||||
},
|
||||
{
|
||||
targetHost: providerInputs.host,
|
||||
targetPort: providerInputs.port,
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
let isConnected = false;
|
||||
|
||||
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
|
||||
let client: VerticaKnexClient | null = null;
|
||||
|
||||
try {
|
||||
client = await $getClient({ ...providerInputs, hostIp: host, port });
|
||||
|
||||
const clientResult: DatabaseQueryResult = await client.raw("SELECT version() AS version");
|
||||
|
||||
const resultFromSelectedDatabase = clientResult.rows?.[0] as VersionResult | undefined;
|
||||
|
||||
if (!resultFromSelectedDatabase?.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate Vertica connection, version query failed"
|
||||
});
|
||||
}
|
||||
|
||||
isConnected = true;
|
||||
} finally {
|
||||
if (client) await client.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
if (providerInputs.gatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
|
||||
const { inputs, usernameTemplate } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const password = generatePassword(providerInputs.passwordRequirements);
|
||||
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
let client: VerticaKnexClient | null = null;
|
||||
|
||||
try {
|
||||
client = await $getClient({ ...providerInputs, hostIp: host, port });
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password
|
||||
});
|
||||
|
||||
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
|
||||
|
||||
// Execute queries sequentially to maintain transaction integrity
|
||||
for (const query of queries) {
|
||||
const trimmedQuery = query.trim();
|
||||
if (trimmedQuery) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await client.raw(trimmedQuery);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (client) await client.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
if (providerInputs.gatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
let client: VerticaKnexClient | null = null;
|
||||
|
||||
try {
|
||||
client = await $getClient({ ...providerInputs, hostIp: host, port });
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
|
||||
username
|
||||
});
|
||||
|
||||
const queries = revokeStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
|
||||
|
||||
// Check for active sessions and close them
|
||||
try {
|
||||
const sessionResult: DatabaseQueryResult = await client.raw(
|
||||
"SELECT session_id FROM sessions WHERE user_name = ?",
|
||||
[username]
|
||||
);
|
||||
|
||||
const activeSessions = (sessionResult.rows || []) as SessionResult[];
|
||||
|
||||
// Close all sessions in parallel since they're independent operations
|
||||
if (activeSessions.length > 0) {
|
||||
const sessionClosePromises = activeSessions.map(async (session) => {
|
||||
try {
|
||||
await client!.raw("SELECT close_session(?)", [session.session_id]);
|
||||
} catch (error) {
|
||||
// Continue if session is already closed
|
||||
logger.error(error, `Failed to close session ${session.session_id}`);
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.allSettled(sessionClosePromises);
|
||||
}
|
||||
} catch (error) {
|
||||
// Continue if we can't query sessions (permissions, etc.)
|
||||
logger.error(error, "Could not query/close active sessions");
|
||||
}
|
||||
|
||||
// Execute revocation queries sequentially to maintain transaction integrity
|
||||
for (const query of queries) {
|
||||
const trimmedQuery = query.trim();
|
||||
if (trimmedQuery) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await client.raw(trimmedQuery);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (client) await client.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
if (providerInputs.gatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_: unknown, username: string) => {
|
||||
// No need for renewal
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@@ -56,6 +56,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
kmip: false,
|
||||
gateway: false,
|
||||
sshHostGroups: false,
|
||||
secretScanning: false,
|
||||
enterpriseSecretSyncs: false,
|
||||
enterpriseAppConnections: false
|
||||
});
|
||||
|
@@ -72,6 +72,7 @@ export type TFeatureSet = {
|
||||
kmip: false;
|
||||
gateway: false;
|
||||
sshHostGroups: false;
|
||||
secretScanning: false;
|
||||
enterpriseSecretSyncs: false;
|
||||
enterpriseAppConnections: false;
|
||||
};
|
||||
|
@@ -13,6 +13,9 @@ import {
|
||||
ProjectPermissionPkiTemplateActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSecretRotationActions,
|
||||
ProjectPermissionSecretScanningConfigActions,
|
||||
ProjectPermissionSecretScanningDataSourceActions,
|
||||
ProjectPermissionSecretScanningFindingActions,
|
||||
ProjectPermissionSecretSyncActions,
|
||||
ProjectPermissionSet,
|
||||
ProjectPermissionSshHostActions,
|
||||
@@ -148,6 +151,7 @@ const buildAdminPermissionRules = () => {
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
@@ -219,6 +223,29 @@ const buildAdminPermissionRules = () => {
|
||||
ProjectPermissionSub.SecretRotation
|
||||
);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretScanningDataSourceActions.Create,
|
||||
ProjectPermissionSecretScanningDataSourceActions.Edit,
|
||||
ProjectPermissionSecretScanningDataSourceActions.Delete,
|
||||
ProjectPermissionSecretScanningDataSourceActions.Read,
|
||||
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadResources
|
||||
],
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
can(
|
||||
[ProjectPermissionSecretScanningFindingActions.Read, ProjectPermissionSecretScanningFindingActions.Update],
|
||||
ProjectPermissionSub.SecretScanningFindings
|
||||
);
|
||||
|
||||
can(
|
||||
[ProjectPermissionSecretScanningConfigActions.Read, ProjectPermissionSecretScanningConfigActions.Update],
|
||||
ProjectPermissionSub.SecretScanningConfigs
|
||||
);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
@@ -228,6 +255,7 @@ const buildMemberPermissionRules = () => {
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
@@ -399,6 +427,23 @@ const buildMemberPermissionRules = () => {
|
||||
ProjectPermissionSub.SecretSyncs
|
||||
);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretScanningDataSourceActions.Read,
|
||||
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadResources
|
||||
],
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
can(
|
||||
[ProjectPermissionSecretScanningFindingActions.Read, ProjectPermissionSecretScanningFindingActions.Update],
|
||||
ProjectPermissionSub.SecretScanningFindings
|
||||
);
|
||||
|
||||
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
@@ -435,6 +480,19 @@ const buildViewerPermissionRules = () => {
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificateTemplates);
|
||||
can(ProjectPermissionSecretSyncActions.Read, ProjectPermissionSub.SecretSyncs);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretScanningDataSourceActions.Read,
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadResources
|
||||
],
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
can([ProjectPermissionSecretScanningFindingActions.Read], ProjectPermissionSub.SecretScanningFindings);
|
||||
|
||||
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
|
@@ -132,6 +132,26 @@ export enum ProjectPermissionKmipActions {
|
||||
GenerateClientCertificates = "generate-client-certificates"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSecretScanningDataSourceActions {
|
||||
Read = "read-data-sources",
|
||||
Create = "create-data-sources",
|
||||
Edit = "edit-data-sources",
|
||||
Delete = "delete-data-sources",
|
||||
TriggerScans = "trigger-data-source-scans",
|
||||
ReadScans = "read-data-source-scans",
|
||||
ReadResources = "read-data-source-resources"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSecretScanningFindingActions {
|
||||
Read = "read-findings",
|
||||
Update = "update-findings"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSecretScanningConfigActions {
|
||||
Read = "read-configs",
|
||||
Update = "update-configs"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSub {
|
||||
Role = "role",
|
||||
Member = "member",
|
||||
@@ -167,7 +187,10 @@ export enum ProjectPermissionSub {
|
||||
Kms = "kms",
|
||||
Cmek = "cmek",
|
||||
SecretSyncs = "secret-syncs",
|
||||
Kmip = "kmip"
|
||||
Kmip = "kmip",
|
||||
SecretScanningDataSources = "secret-scanning-data-sources",
|
||||
SecretScanningFindings = "secret-scanning-findings",
|
||||
SecretScanningConfigs = "secret-scanning-configs"
|
||||
}
|
||||
|
||||
export type SecretSubjectFields = {
|
||||
@@ -301,7 +324,10 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Project]
|
||||
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
|
||||
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
|
||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms];
|
||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms]
|
||||
| [ProjectPermissionSecretScanningDataSourceActions, ProjectPermissionSub.SecretScanningDataSources]
|
||||
| [ProjectPermissionSecretScanningFindingActions, ProjectPermissionSub.SecretScanningFindings]
|
||||
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs];
|
||||
|
||||
const SECRET_PATH_MISSING_SLASH_ERR_MSG = "Invalid Secret Path; it must start with a '/'";
|
||||
const SECRET_PATH_PERMISSION_OPERATOR_SCHEMA = z.union([
|
||||
@@ -631,6 +657,26 @@ const GeneralPermissionSchema = [
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionKmipActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z
|
||||
.literal(ProjectPermissionSub.SecretScanningDataSources)
|
||||
.describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningDataSourceActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.SecretScanningFindings).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningFindingActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.SecretScanningConfigs).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningConfigActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
})
|
||||
];
|
||||
|
||||
|
@@ -0,0 +1,9 @@
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
|
||||
name: "GitHub",
|
||||
type: SecretScanningDataSource.GitHub,
|
||||
connection: AppConnection.GitHubRadar
|
||||
};
|
@@ -0,0 +1,230 @@
|
||||
import { join } from "path";
|
||||
import { ProbotOctokit } from "probot";
|
||||
|
||||
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningFindingSeverity,
|
||||
SecretScanningResource
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
cloneRepository,
|
||||
convertPatchLineToFileLineNumber,
|
||||
replaceNonChangesWithNewlines
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import {
|
||||
TSecretScanningFactoryGetDiffScanFindingsPayload,
|
||||
TSecretScanningFactoryGetDiffScanResourcePayload,
|
||||
TSecretScanningFactoryGetFullScanPath,
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryPostInitialization
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||
import { GitHubRepositoryRegex } from "@app/lib/regex";
|
||||
import { listGitHubRadarRepositories, TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
|
||||
|
||||
import { TGitHubDataSourceWithConnection, TQueueGitHubResourceDiffScan } from "./github-secret-scanning-types";
|
||||
|
||||
export const GitHubSecretScanningFactory = () => {
|
||||
const initialize: TSecretScanningFactoryInitialize<TGitHubRadarConnection> = async (
|
||||
{ connection, secretScanningV2DAL },
|
||||
callback
|
||||
) => {
|
||||
const externalId = connection.credentials.installationId;
|
||||
|
||||
const existingDataSource = await secretScanningV2DAL.dataSources.findOne({
|
||||
externalId,
|
||||
type: SecretScanningDataSource.GitHub
|
||||
});
|
||||
|
||||
if (existingDataSource)
|
||||
throw new BadRequestError({
|
||||
message: `A Data Source already exists for this GitHub Radar Connection in the Project with ID "${existingDataSource.projectId}"`
|
||||
});
|
||||
|
||||
return callback({
|
||||
externalId
|
||||
});
|
||||
};
|
||||
|
||||
const postInitialization: TSecretScanningFactoryPostInitialization<TGitHubRadarConnection> = async () => {
|
||||
// no post-initialization required
|
||||
};
|
||||
|
||||
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
|
||||
dataSource
|
||||
) => {
|
||||
const {
|
||||
connection,
|
||||
config: { includeRepos }
|
||||
} = dataSource;
|
||||
|
||||
const repos = await listGitHubRadarRepositories(connection);
|
||||
|
||||
const filteredRepos: typeof repos = [];
|
||||
if (includeRepos.includes("*")) {
|
||||
filteredRepos.push(...repos);
|
||||
} else {
|
||||
filteredRepos.push(...repos.filter((repo) => includeRepos.includes(repo.full_name)));
|
||||
}
|
||||
|
||||
return filteredRepos.map(({ id, full_name }) => ({
|
||||
name: full_name,
|
||||
externalId: id.toString(),
|
||||
type: SecretScanningResource.Repository
|
||||
}));
|
||||
};
|
||||
|
||||
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TGitHubDataSourceWithConnection> = async ({
|
||||
dataSource,
|
||||
resourceName,
|
||||
tempFolder
|
||||
}) => {
|
||||
const appCfg = getConfig();
|
||||
const {
|
||||
connection: {
|
||||
credentials: { installationId }
|
||||
}
|
||||
} = dataSource;
|
||||
|
||||
const octokit = new ProbotOctokit({
|
||||
auth: {
|
||||
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
|
||||
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
|
||||
installationId
|
||||
}
|
||||
});
|
||||
|
||||
const {
|
||||
data: { token }
|
||||
} = await octokit.apps.createInstallationAccessToken({
|
||||
installation_id: Number(installationId)
|
||||
});
|
||||
|
||||
const repoPath = join(tempFolder, "repo.git");
|
||||
|
||||
if (!GitHubRepositoryRegex.test(resourceName)) {
|
||||
throw new Error("Invalid GitHub repository name");
|
||||
}
|
||||
|
||||
await cloneRepository({
|
||||
cloneUrl: `https://x-access-token:${token}@github.com/${resourceName}.git`,
|
||||
repoPath
|
||||
});
|
||||
|
||||
return repoPath;
|
||||
};
|
||||
|
||||
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
|
||||
TQueueGitHubResourceDiffScan["payload"]
|
||||
> = ({ repository }) => {
|
||||
return {
|
||||
name: repository.full_name,
|
||||
externalId: repository.id.toString(),
|
||||
type: SecretScanningResource.Repository
|
||||
};
|
||||
};
|
||||
|
||||
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
|
||||
TGitHubDataSourceWithConnection,
|
||||
TQueueGitHubResourceDiffScan["payload"]
|
||||
> = async ({ dataSource, payload, resourceName, configPath }) => {
|
||||
const appCfg = getConfig();
|
||||
const {
|
||||
connection: {
|
||||
credentials: { installationId }
|
||||
}
|
||||
} = dataSource;
|
||||
|
||||
const octokit = new ProbotOctokit({
|
||||
auth: {
|
||||
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
|
||||
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
|
||||
installationId
|
||||
}
|
||||
});
|
||||
|
||||
const { commits, repository } = payload;
|
||||
|
||||
const [owner, repo] = repository.full_name.split("/");
|
||||
|
||||
const allFindings: SecretMatch[] = [];
|
||||
|
||||
for (const commit of commits) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const commitData = await octokit.repos.getCommit({
|
||||
owner,
|
||||
repo,
|
||||
ref: commit.id
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!commitData.data.files) continue;
|
||||
|
||||
for (const file of commitData.data.files) {
|
||||
if ((file.status === "added" || file.status === "modified") && file.patch) {
|
||||
// eslint-disable-next-line
|
||||
const findings = await scanContentAndGetFindings(
|
||||
replaceNonChangesWithNewlines(`\n${file.patch}`),
|
||||
configPath
|
||||
);
|
||||
|
||||
const adjustedFindings = findings.map((finding) => {
|
||||
const startLine = convertPatchLineToFileLineNumber(file.patch!, finding.StartLine);
|
||||
const endLine =
|
||||
finding.StartLine === finding.EndLine
|
||||
? startLine
|
||||
: convertPatchLineToFileLineNumber(file.patch!, finding.EndLine);
|
||||
const startColumn = finding.StartColumn - 1; // subtract 1 for +
|
||||
const endColumn = finding.EndColumn - 1; // subtract 1 for +
|
||||
|
||||
return {
|
||||
...finding,
|
||||
StartLine: startLine,
|
||||
EndLine: endLine,
|
||||
StartColumn: startColumn,
|
||||
EndColumn: endColumn,
|
||||
File: file.filename,
|
||||
Commit: commit.id,
|
||||
Author: commit.author.name,
|
||||
Email: commit.author.email ?? "",
|
||||
Message: commit.message,
|
||||
Fingerprint: `${commit.id}:${file.filename}:${finding.RuleID}:${startLine}:${startColumn}`,
|
||||
Date: commit.timestamp,
|
||||
Link: `https://github.com/${resourceName}/blob/${commit.id}/${file.filename}#L${startLine}`
|
||||
};
|
||||
});
|
||||
|
||||
allFindings.push(...adjustedFindings);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return allFindings.map(
|
||||
({
|
||||
// discard match and secret as we don't want to store
|
||||
Match,
|
||||
Secret,
|
||||
...finding
|
||||
}) => ({
|
||||
details: titleCaseToCamelCase(finding),
|
||||
fingerprint: finding.Fingerprint,
|
||||
severity: SecretScanningFindingSeverity.High,
|
||||
rule: finding.RuleID
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
return {
|
||||
initialize,
|
||||
postInitialization,
|
||||
listRawResources,
|
||||
getFullScanPath,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload
|
||||
};
|
||||
};
|
@@ -0,0 +1,85 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningResource
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
BaseCreateSecretScanningDataSourceSchema,
|
||||
BaseSecretScanningDataSourceSchema,
|
||||
BaseSecretScanningFindingSchema,
|
||||
BaseUpdateSecretScanningDataSourceSchema,
|
||||
GitRepositoryScanFindingDetailsSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
|
||||
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
||||
import { GitHubRepositoryRegex } from "@app/lib/regex";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const GitHubDataSourceConfigSchema = z.object({
|
||||
includeRepos: z
|
||||
.array(
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(256)
|
||||
.refine((value) => value === "*" || GitHubRepositoryRegex.test(value), "Invalid repository name format")
|
||||
)
|
||||
.nonempty("One or more repositories required")
|
||||
.max(100, "Cannot configure more than 100 repositories")
|
||||
.default(["*"])
|
||||
.describe(SecretScanningDataSources.CONFIG.GITHUB.includeRepos)
|
||||
});
|
||||
|
||||
export const GitHubDataSourceSchema = BaseSecretScanningDataSourceSchema({
|
||||
type: SecretScanningDataSource.GitHub,
|
||||
isConnectionRequired: true
|
||||
})
|
||||
.extend({
|
||||
config: GitHubDataSourceConfigSchema
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitHub"
|
||||
})
|
||||
);
|
||||
|
||||
export const CreateGitHubDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
|
||||
type: SecretScanningDataSource.GitHub,
|
||||
isConnectionRequired: true
|
||||
})
|
||||
.extend({
|
||||
config: GitHubDataSourceConfigSchema
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitHub"
|
||||
})
|
||||
);
|
||||
|
||||
export const UpdateGitHubDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(SecretScanningDataSource.GitHub)
|
||||
.extend({
|
||||
config: GitHubDataSourceConfigSchema.optional()
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitHub"
|
||||
})
|
||||
);
|
||||
|
||||
export const GitHubDataSourceListItemSchema = z
|
||||
.object({
|
||||
name: z.literal("GitHub"),
|
||||
connection: z.literal(AppConnection.GitHubRadar),
|
||||
type: z.literal(SecretScanningDataSource.GitHub)
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitHub"
|
||||
})
|
||||
);
|
||||
|
||||
export const GitHubFindingSchema = BaseSecretScanningFindingSchema.extend({
|
||||
resourceType: z.literal(SecretScanningResource.Repository),
|
||||
dataSourceType: z.literal(SecretScanningDataSource.GitHub),
|
||||
details: GitRepositoryScanFindingDetailsSchema
|
||||
});
|
@@ -0,0 +1,87 @@
|
||||
import { PushEvent } from "@octokit/webhooks-types";
|
||||
|
||||
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TGitHubDataSource } from "./github-secret-scanning-types";
|
||||
|
||||
export const githubSecretScanningService = (
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory,
|
||||
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">
|
||||
) => {
|
||||
const handleInstallationDeletedEvent = async (installationId: number) => {
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findOne({
|
||||
externalId: String(installationId),
|
||||
type: SecretScanningDataSource.GitHub
|
||||
});
|
||||
|
||||
if (!dataSource) {
|
||||
logger.error(
|
||||
`secretScanningV2RemoveEvent: GitHub - Could not find data source [installationId=${installationId}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`secretScanningV2RemoveEvent: GitHub - installation deleted [installationId=${installationId}] [dataSourceId=${dataSource.id}]`
|
||||
);
|
||||
|
||||
await secretScanningV2DAL.dataSources.updateById(dataSource.id, {
|
||||
isDisconnected: true
|
||||
});
|
||||
};
|
||||
|
||||
const handlePushEvent = async (payload: PushEvent) => {
|
||||
const { commits, repository, installation } = payload;
|
||||
|
||||
if (!commits || !repository || !installation) {
|
||||
logger.warn(
|
||||
`secretScanningV2PushEvent: GitHub - Insufficient data [commits=${commits?.length ?? 0}] [repository=${repository.name}] [installationId=${installation?.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
|
||||
externalId: String(installation.id),
|
||||
type: SecretScanningDataSource.GitHub
|
||||
})) as TGitHubDataSource | undefined;
|
||||
|
||||
if (!dataSource) {
|
||||
logger.error(
|
||||
`secretScanningV2PushEvent: GitHub - Could not find data source [installationId=${installation.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const {
|
||||
isAutoScanEnabled,
|
||||
config: { includeRepos }
|
||||
} = dataSource;
|
||||
|
||||
if (!isAutoScanEnabled) {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitHub - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [installationId=${installation.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (includeRepos.includes("*") || includeRepos.includes(repository.full_name)) {
|
||||
await secretScanningV2Queue.queueResourceDiffScan({
|
||||
dataSourceType: SecretScanningDataSource.GitHub,
|
||||
payload,
|
||||
dataSourceId: dataSource.id
|
||||
});
|
||||
} else {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitHub - ignoring due to repository not being present in config [installationId=${installation.id}] [dataSourceId=${dataSource.id}]`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
handlePushEvent,
|
||||
handleInstallationDeletedEvent
|
||||
};
|
||||
};
|
@@ -0,0 +1,32 @@
|
||||
import { PushEvent } from "@octokit/webhooks-types";
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
|
||||
|
||||
import {
|
||||
CreateGitHubDataSourceSchema,
|
||||
GitHubDataSourceListItemSchema,
|
||||
GitHubDataSourceSchema,
|
||||
GitHubFindingSchema
|
||||
} from "./github-secret-scanning-schemas";
|
||||
|
||||
export type TGitHubDataSource = z.infer<typeof GitHubDataSourceSchema>;
|
||||
|
||||
export type TGitHubDataSourceInput = z.infer<typeof CreateGitHubDataSourceSchema>;
|
||||
|
||||
export type TGitHubDataSourceListItem = z.infer<typeof GitHubDataSourceListItemSchema>;
|
||||
|
||||
export type TGitHubFinding = z.infer<typeof GitHubFindingSchema>;
|
||||
|
||||
export type TGitHubDataSourceWithConnection = TGitHubDataSource & {
|
||||
connection: TGitHubRadarConnection;
|
||||
};
|
||||
|
||||
export type TQueueGitHubResourceDiffScan = {
|
||||
dataSourceType: SecretScanningDataSource.GitHub;
|
||||
payload: PushEvent;
|
||||
dataSourceId: string;
|
||||
resourceId: string;
|
||||
scanId: string;
|
||||
};
|
@@ -0,0 +1,3 @@
|
||||
export * from "./github-secret-scanning-constants";
|
||||
export * from "./github-secret-scanning-schemas";
|
||||
export * from "./github-secret-scanning-types";
|
@@ -0,0 +1,460 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import {
|
||||
SecretScanningResourcesSchema,
|
||||
SecretScanningScansSchema,
|
||||
TableName,
|
||||
TSecretScanningDataSources
|
||||
} from "@app/db/schemas";
|
||||
import { SecretScanningFindingStatus } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import {
|
||||
buildFindFilter,
|
||||
ormify,
|
||||
prependTableNameToFindFilter,
|
||||
selectAllTableCols,
|
||||
sqlNestRelationships,
|
||||
TFindOpt
|
||||
} from "@app/lib/knex";
|
||||
|
||||
export type TSecretScanningV2DALFactory = ReturnType<typeof secretScanningV2DALFactory>;
|
||||
|
||||
type TSecretScanningDataSourceFindFilter = Parameters<typeof buildFindFilter<TSecretScanningDataSources>>[0];
|
||||
type TSecretScanningDataSourceFindOptions = TFindOpt<TSecretScanningDataSources, true, "name">;
|
||||
|
||||
const baseSecretScanningDataSourceQuery = ({
|
||||
filter = {},
|
||||
db,
|
||||
tx
|
||||
}: {
|
||||
db: TDbClient;
|
||||
filter?: TSecretScanningDataSourceFindFilter;
|
||||
options?: TSecretScanningDataSourceFindOptions;
|
||||
tx?: Knex;
|
||||
}) => {
|
||||
const query = (tx || db.replicaNode())(TableName.SecretScanningDataSource)
|
||||
.join(
|
||||
TableName.AppConnection,
|
||||
`${TableName.SecretScanningDataSource}.connectionId`,
|
||||
`${TableName.AppConnection}.id`
|
||||
)
|
||||
.select(selectAllTableCols(TableName.SecretScanningDataSource))
|
||||
.select(
|
||||
// entire connection
|
||||
db.ref("name").withSchema(TableName.AppConnection).as("connectionName"),
|
||||
db.ref("method").withSchema(TableName.AppConnection).as("connectionMethod"),
|
||||
db.ref("app").withSchema(TableName.AppConnection).as("connectionApp"),
|
||||
db.ref("orgId").withSchema(TableName.AppConnection).as("connectionOrgId"),
|
||||
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
|
||||
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
|
||||
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
|
||||
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
|
||||
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
|
||||
db
|
||||
.ref("isPlatformManagedCredentials")
|
||||
.withSchema(TableName.AppConnection)
|
||||
.as("connectionIsPlatformManagedCredentials")
|
||||
);
|
||||
|
||||
if (filter) {
|
||||
/* eslint-disable @typescript-eslint/no-misused-promises */
|
||||
void query.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretScanningDataSource, filter)));
|
||||
}
|
||||
|
||||
return query;
|
||||
};
|
||||
|
||||
const expandSecretScanningDataSource = <
|
||||
T extends Awaited<ReturnType<typeof baseSecretScanningDataSourceQuery>>[number]
|
||||
>(
|
||||
dataSource: T
|
||||
) => {
|
||||
const {
|
||||
connectionApp,
|
||||
connectionName,
|
||||
connectionId,
|
||||
connectionOrgId,
|
||||
connectionEncryptedCredentials,
|
||||
connectionMethod,
|
||||
connectionDescription,
|
||||
connectionCreatedAt,
|
||||
connectionUpdatedAt,
|
||||
connectionVersion,
|
||||
connectionIsPlatformManagedCredentials,
|
||||
...el
|
||||
} = dataSource;
|
||||
|
||||
return {
|
||||
...el,
|
||||
connectionId,
|
||||
connection: connectionId
|
||||
? {
|
||||
app: connectionApp,
|
||||
id: connectionId,
|
||||
name: connectionName,
|
||||
orgId: connectionOrgId,
|
||||
encryptedCredentials: connectionEncryptedCredentials,
|
||||
method: connectionMethod,
|
||||
description: connectionDescription,
|
||||
createdAt: connectionCreatedAt,
|
||||
updatedAt: connectionUpdatedAt,
|
||||
version: connectionVersion,
|
||||
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
|
||||
}
|
||||
: undefined
|
||||
};
|
||||
};
|
||||
|
||||
export const secretScanningV2DALFactory = (db: TDbClient) => {
|
||||
const dataSourceOrm = ormify(db, TableName.SecretScanningDataSource);
|
||||
const resourceOrm = ormify(db, TableName.SecretScanningResource);
|
||||
const scanOrm = ormify(db, TableName.SecretScanningScan);
|
||||
const findingOrm = ormify(db, TableName.SecretScanningFinding);
|
||||
const configOrm = ormify(db, TableName.SecretScanningConfig);
|
||||
|
||||
const findDataSource = async (filter: Parameters<(typeof dataSourceOrm)["find"]>[0], tx?: Knex) => {
|
||||
try {
|
||||
const dataSources = await baseSecretScanningDataSourceQuery({ filter, db, tx });
|
||||
|
||||
if (!dataSources.length) return [];
|
||||
|
||||
return dataSources.map(expandSecretScanningDataSource);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find - Secret Scanning Data Source" });
|
||||
}
|
||||
};
|
||||
|
||||
const findDataSourceById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const dataSource = await baseSecretScanningDataSourceQuery({ filter: { id }, db, tx }).first();
|
||||
|
||||
if (dataSource) return expandSecretScanningDataSource(dataSource);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find By ID - Secret Scanning Data Source" });
|
||||
}
|
||||
};
|
||||
|
||||
const createDataSource = async (data: Parameters<(typeof dataSourceOrm)["create"]>[0], tx?: Knex) => {
|
||||
const source = await dataSourceOrm.create(data, tx);
|
||||
|
||||
const dataSource = (await baseSecretScanningDataSourceQuery({
|
||||
filter: { id: source.id },
|
||||
db,
|
||||
tx
|
||||
}).first())!;
|
||||
|
||||
return expandSecretScanningDataSource(dataSource);
|
||||
};
|
||||
|
||||
const updateDataSourceById = async (
|
||||
dataSourceId: string,
|
||||
data: Parameters<(typeof dataSourceOrm)["updateById"]>[1],
|
||||
tx?: Knex
|
||||
) => {
|
||||
const source = await dataSourceOrm.updateById(dataSourceId, data, tx);
|
||||
|
||||
const dataSource = (await baseSecretScanningDataSourceQuery({
|
||||
filter: { id: source.id },
|
||||
db,
|
||||
tx
|
||||
}).first())!;
|
||||
|
||||
return expandSecretScanningDataSource(dataSource);
|
||||
};
|
||||
|
||||
const deleteDataSourceById = async (dataSourceId: string, tx?: Knex) => {
|
||||
const dataSource = (await baseSecretScanningDataSourceQuery({
|
||||
filter: { id: dataSourceId },
|
||||
db,
|
||||
tx
|
||||
}).first())!;
|
||||
|
||||
await dataSourceOrm.deleteById(dataSourceId, tx);
|
||||
|
||||
return expandSecretScanningDataSource(dataSource);
|
||||
};
|
||||
|
||||
const findOneDataSource = async (filter: Parameters<(typeof dataSourceOrm)["findOne"]>[0], tx?: Knex) => {
|
||||
try {
|
||||
const dataSource = await baseSecretScanningDataSourceQuery({ filter, db, tx }).first();
|
||||
|
||||
if (dataSource) {
|
||||
return expandSecretScanningDataSource(dataSource);
|
||||
}
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find One - Secret Scanning Data Source" });
|
||||
}
|
||||
};
|
||||
|
||||
const findDataSourceWithDetails = async (filter: Parameters<(typeof dataSourceOrm)["find"]>[0], tx?: Knex) => {
|
||||
try {
|
||||
// TODO (scott): this query will probably need to be optimized
|
||||
|
||||
const dataSources = await baseSecretScanningDataSourceQuery({ filter, db, tx })
|
||||
.leftJoin(
|
||||
TableName.SecretScanningResource,
|
||||
`${TableName.SecretScanningResource}.dataSourceId`,
|
||||
`${TableName.SecretScanningDataSource}.id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.SecretScanningScan,
|
||||
`${TableName.SecretScanningScan}.resourceId`,
|
||||
`${TableName.SecretScanningResource}.id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.SecretScanningFinding,
|
||||
`${TableName.SecretScanningFinding}.scanId`,
|
||||
`${TableName.SecretScanningScan}.id`
|
||||
)
|
||||
.where((qb) => {
|
||||
void qb
|
||||
.where(`${TableName.SecretScanningFinding}.status`, SecretScanningFindingStatus.Unresolved)
|
||||
.orWhereNull(`${TableName.SecretScanningFinding}.status`);
|
||||
})
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.SecretScanningScan).as("scanId"),
|
||||
db.ref("status").withSchema(TableName.SecretScanningScan).as("scanStatus"),
|
||||
db.ref("statusMessage").withSchema(TableName.SecretScanningScan).as("scanStatusMessage"),
|
||||
db.ref("createdAt").withSchema(TableName.SecretScanningScan).as("scanCreatedAt"),
|
||||
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
|
||||
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId")
|
||||
);
|
||||
|
||||
if (!dataSources.length) return [];
|
||||
|
||||
const results = sqlNestRelationships({
|
||||
data: dataSources,
|
||||
key: "id",
|
||||
parentMapper: (dataSource) => expandSecretScanningDataSource(dataSource),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "scanId",
|
||||
label: "scans" as const,
|
||||
mapper: ({ scanId, scanCreatedAt, scanStatus, scanStatusMessage }) => ({
|
||||
id: scanId,
|
||||
createdAt: scanCreatedAt,
|
||||
status: scanStatus,
|
||||
statusMessage: scanStatusMessage
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "findingId",
|
||||
label: "findings" as const,
|
||||
mapper: ({ findingId }) => ({
|
||||
id: findingId
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return results.map(({ scans, findings, ...dataSource }) => {
|
||||
const lastScan =
|
||||
scans && scans.length
|
||||
? scans.reduce((latest, current) => {
|
||||
return new Date(current.createdAt) > new Date(latest.createdAt) ? current : latest;
|
||||
})
|
||||
: null;
|
||||
|
||||
return {
|
||||
...dataSource,
|
||||
lastScanStatus: lastScan?.status ?? null,
|
||||
lastScanStatusMessage: lastScan?.statusMessage ?? null,
|
||||
lastScannedAt: lastScan?.createdAt ?? null,
|
||||
unresolvedFindings: scans.length ? findings.length : null
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find with Details - Secret Scanning Data Source" });
|
||||
}
|
||||
};
|
||||
|
||||
const findResourcesWithDetails = async (filter: Parameters<(typeof resourceOrm)["find"]>[0], tx?: Knex) => {
|
||||
try {
|
||||
// TODO (scott): this query will probably need to be optimized
|
||||
|
||||
const resources = await (tx || db.replicaNode())(TableName.SecretScanningResource)
|
||||
.where((qb) => {
|
||||
if (filter)
|
||||
void qb.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretScanningResource, filter)));
|
||||
})
|
||||
.leftJoin(
|
||||
TableName.SecretScanningScan,
|
||||
`${TableName.SecretScanningScan}.resourceId`,
|
||||
`${TableName.SecretScanningResource}.id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.SecretScanningFinding,
|
||||
`${TableName.SecretScanningFinding}.scanId`,
|
||||
`${TableName.SecretScanningScan}.id`
|
||||
)
|
||||
.where((qb) => {
|
||||
void qb
|
||||
.where(`${TableName.SecretScanningFinding}.status`, SecretScanningFindingStatus.Unresolved)
|
||||
.orWhereNull(`${TableName.SecretScanningFinding}.status`);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.SecretScanningResource))
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.SecretScanningScan).as("scanId"),
|
||||
db.ref("status").withSchema(TableName.SecretScanningScan).as("scanStatus"),
|
||||
db.ref("type").withSchema(TableName.SecretScanningScan).as("scanType"),
|
||||
db.ref("statusMessage").withSchema(TableName.SecretScanningScan).as("scanStatusMessage"),
|
||||
db.ref("createdAt").withSchema(TableName.SecretScanningScan).as("scanCreatedAt"),
|
||||
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
|
||||
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId")
|
||||
);
|
||||
|
||||
if (!resources.length) return [];
|
||||
|
||||
const results = sqlNestRelationships({
|
||||
data: resources,
|
||||
key: "id",
|
||||
parentMapper: (resource) => SecretScanningResourcesSchema.parse(resource),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "scanId",
|
||||
label: "scans" as const,
|
||||
mapper: ({ scanId, scanCreatedAt, scanStatus, scanStatusMessage, scanType }) => ({
|
||||
id: scanId,
|
||||
type: scanType,
|
||||
createdAt: scanCreatedAt,
|
||||
status: scanStatus,
|
||||
statusMessage: scanStatusMessage
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "findingId",
|
||||
label: "findings" as const,
|
||||
mapper: ({ findingId }) => ({
|
||||
id: findingId
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return results.map(({ scans, findings, ...resource }) => {
|
||||
const lastScan =
|
||||
scans && scans.length
|
||||
? scans.reduce((latest, current) => {
|
||||
return new Date(current.createdAt) > new Date(latest.createdAt) ? current : latest;
|
||||
})
|
||||
: null;
|
||||
|
||||
return {
|
||||
...resource,
|
||||
lastScanStatus: lastScan?.status ?? null,
|
||||
lastScanStatusMessage: lastScan?.statusMessage ?? null,
|
||||
lastScannedAt: lastScan?.createdAt ?? null,
|
||||
unresolvedFindings: findings?.length ?? 0
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find with Details - Secret Scanning Resource" });
|
||||
}
|
||||
};
|
||||
|
||||
const findScansWithDetailsByDataSourceId = async (dataSourceId: string, tx?: Knex) => {
|
||||
try {
|
||||
// TODO (scott): this query will probably need to be optimized
|
||||
|
||||
const scans = await (tx || db.replicaNode())(TableName.SecretScanningScan)
|
||||
.leftJoin(
|
||||
TableName.SecretScanningResource,
|
||||
`${TableName.SecretScanningResource}.id`,
|
||||
`${TableName.SecretScanningScan}.resourceId`
|
||||
)
|
||||
.where(`${TableName.SecretScanningResource}.dataSourceId`, dataSourceId)
|
||||
.leftJoin(
|
||||
TableName.SecretScanningFinding,
|
||||
`${TableName.SecretScanningFinding}.scanId`,
|
||||
`${TableName.SecretScanningScan}.id`
|
||||
)
|
||||
.select(selectAllTableCols(TableName.SecretScanningScan))
|
||||
.select(
|
||||
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
|
||||
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId"),
|
||||
db.ref("name").withSchema(TableName.SecretScanningResource).as("resourceName")
|
||||
);
|
||||
|
||||
if (!scans.length) return [];
|
||||
|
||||
const results = sqlNestRelationships({
|
||||
data: scans,
|
||||
key: "id",
|
||||
parentMapper: (scan) => SecretScanningScansSchema.parse(scan),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "findingId",
|
||||
label: "findings" as const,
|
||||
mapper: ({ findingId, findingStatus }) => ({
|
||||
id: findingId,
|
||||
status: findingStatus
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "resourceId",
|
||||
label: "resources" as const,
|
||||
mapper: ({ resourceName }) => ({
|
||||
name: resourceName
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return results.map(({ findings, resources, ...scan }) => {
|
||||
return {
|
||||
...scan,
|
||||
unresolvedFindings:
|
||||
findings?.filter((finding) => finding.status === SecretScanningFindingStatus.Unresolved).length ?? 0,
|
||||
resolvedFindings:
|
||||
findings?.filter((finding) => finding.status !== SecretScanningFindingStatus.Unresolved).length ?? 0,
|
||||
resourceName: resources[0].name
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find with Details By Data Source ID - Secret Scanning Scan" });
|
||||
}
|
||||
};
|
||||
|
||||
const findScansByDataSourceId = async (dataSourceId: string, tx?: Knex) => {
|
||||
try {
|
||||
const scans = await (tx || db.replicaNode())(TableName.SecretScanningScan)
|
||||
.leftJoin(
|
||||
TableName.SecretScanningResource,
|
||||
`${TableName.SecretScanningResource}.id`,
|
||||
`${TableName.SecretScanningScan}.resourceId`
|
||||
)
|
||||
.where(`${TableName.SecretScanningResource}.dataSourceId`, dataSourceId)
|
||||
|
||||
.select(selectAllTableCols(TableName.SecretScanningScan));
|
||||
|
||||
return scans;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find By Data Source ID - Secret Scanning Scan" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
dataSources: {
|
||||
...dataSourceOrm,
|
||||
find: findDataSource,
|
||||
findById: findDataSourceById,
|
||||
findOne: findOneDataSource,
|
||||
create: createDataSource,
|
||||
updateById: updateDataSourceById,
|
||||
deleteById: deleteDataSourceById,
|
||||
findWithDetails: findDataSourceWithDetails
|
||||
},
|
||||
resources: {
|
||||
...resourceOrm,
|
||||
findWithDetails: findResourcesWithDetails
|
||||
},
|
||||
scans: {
|
||||
...scanOrm,
|
||||
findWithDetailsByDataSourceId: findScansWithDetailsByDataSourceId,
|
||||
findByDataSourceId: findScansByDataSourceId
|
||||
},
|
||||
findings: findingOrm,
|
||||
configs: configOrm
|
||||
};
|
||||
};
|
@@ -0,0 +1,33 @@
|
||||
export enum SecretScanningDataSource {
|
||||
GitHub = "github"
|
||||
}
|
||||
|
||||
export enum SecretScanningScanStatus {
|
||||
Completed = "completed",
|
||||
Failed = "failed",
|
||||
Queued = "queued",
|
||||
Scanning = "scanning"
|
||||
}
|
||||
|
||||
export enum SecretScanningScanType {
|
||||
FullScan = "full-scan",
|
||||
DiffScan = "diff-scan"
|
||||
}
|
||||
|
||||
export enum SecretScanningFindingStatus {
|
||||
Resolved = "resolved",
|
||||
Unresolved = "unresolved",
|
||||
FalsePositive = "false-positive",
|
||||
Ignore = "ignore"
|
||||
}
|
||||
|
||||
export enum SecretScanningResource {
|
||||
Repository = "repository",
|
||||
Project = "project"
|
||||
}
|
||||
|
||||
export enum SecretScanningFindingSeverity {
|
||||
High = "high",
|
||||
Medium = "medium",
|
||||
Low = "low"
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
|
||||
|
||||
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
|
||||
import {
|
||||
TQueueSecretScanningResourceDiffScan,
|
||||
TSecretScanningDataSourceCredentials,
|
||||
TSecretScanningDataSourceWithConnection,
|
||||
TSecretScanningFactory
|
||||
} from "./secret-scanning-v2-types";
|
||||
|
||||
type TSecretScanningFactoryImplementation = TSecretScanningFactory<
|
||||
TSecretScanningDataSourceWithConnection,
|
||||
TSecretScanningDataSourceCredentials,
|
||||
TQueueSecretScanningResourceDiffScan["payload"]
|
||||
>;
|
||||
|
||||
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
|
||||
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation
|
||||
};
|
@@ -0,0 +1,140 @@
|
||||
import { AxiosError } from "axios";
|
||||
import { exec } from "child_process";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||
|
||||
import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secret-scanning-v2-enums";
|
||||
import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListItem } from "./secret-scanning-v2-types";
|
||||
|
||||
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
|
||||
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
||||
};
|
||||
|
||||
export const listSecretScanningDataSourceOptions = () => {
|
||||
return Object.values(SECRET_SCANNING_SOURCE_LIST_OPTIONS).sort((a, b) => a.name.localeCompare(b.name));
|
||||
};
|
||||
|
||||
export const cloneRepository = async ({ cloneUrl, repoPath }: TCloneRepository): Promise<void> => {
|
||||
const command = `git clone ${cloneUrl} ${repoPath} --bare`;
|
||||
return new Promise((resolve, reject) => {
|
||||
exec(command, (error) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export function scanDirectory(inputPath: string, outputPath: string, configPath?: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const command = `cd ${inputPath} && infisical scan --exit-code=77 -r "${outputPath}" ${configPath ? `-c ${configPath}` : ""}`;
|
||||
exec(command, (error) => {
|
||||
if (error && error.code !== 77) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export const scanGitRepositoryAndGetFindings = async (
|
||||
scanPath: string,
|
||||
findingsPath: string,
|
||||
configPath?: string
|
||||
): TGetFindingsPayload => {
|
||||
await scanDirectory(scanPath, findingsPath, configPath);
|
||||
|
||||
const findingsData = JSON.parse(await readFindingsFile(findingsPath)) as SecretMatch[];
|
||||
|
||||
return findingsData.map(
|
||||
({
|
||||
// discard match and secret as we don't want to store
|
||||
Match,
|
||||
Secret,
|
||||
...finding
|
||||
}) => ({
|
||||
details: titleCaseToCamelCase(finding),
|
||||
fingerprint: `${finding.Fingerprint}:${finding.StartColumn}`,
|
||||
severity: SecretScanningFindingSeverity.High,
|
||||
rule: finding.RuleID
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
export const replaceNonChangesWithNewlines = (patch: string) => {
|
||||
return patch
|
||||
.split("\n")
|
||||
.map((line) => {
|
||||
// Keep added lines (remove the + prefix)
|
||||
if (line.startsWith("+") && !line.startsWith("+++")) {
|
||||
return line.substring(1);
|
||||
}
|
||||
|
||||
// Replace everything else with newlines to maintain line positioning
|
||||
|
||||
return "";
|
||||
})
|
||||
.join("\n");
|
||||
};
|
||||
|
||||
const HunkHeaderRegex = new RE2(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/);
|
||||
|
||||
export const convertPatchLineToFileLineNumber = (patch: string, patchLineNumber: number) => {
|
||||
const lines = patch.split("\n");
|
||||
let currentPatchLine = 0;
|
||||
let currentNewLine = 0;
|
||||
|
||||
for (const line of lines) {
|
||||
currentPatchLine += 1;
|
||||
|
||||
// Hunk header: @@ -a,b +c,d @@
|
||||
const hunkHeaderMatch = HunkHeaderRegex.match(line);
|
||||
if (hunkHeaderMatch) {
|
||||
const startLine = parseInt(hunkHeaderMatch[1], 10);
|
||||
currentNewLine = startLine;
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
if (currentPatchLine === patchLineNumber) {
|
||||
return currentNewLine;
|
||||
}
|
||||
|
||||
if (line.startsWith("+++")) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue; // skip file metadata lines
|
||||
}
|
||||
|
||||
// Advance only if the line exists in the new file
|
||||
if (line.startsWith("+") || line.startsWith(" ")) {
|
||||
currentNewLine += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return currentNewLine;
|
||||
};
|
||||
|
||||
const MAX_MESSAGE_LENGTH = 1024;
|
||||
|
||||
export const parseScanErrorMessage = (err: unknown): string => {
|
||||
let errorMessage: string;
|
||||
|
||||
if (err instanceof AxiosError) {
|
||||
errorMessage = err?.response?.data
|
||||
? JSON.stringify(err?.response?.data)
|
||||
: (err?.message ?? "An unknown error occurred.");
|
||||
} else {
|
||||
errorMessage = (err as Error)?.message || "An unknown error occurred.";
|
||||
}
|
||||
|
||||
return errorMessage.length <= MAX_MESSAGE_LENGTH
|
||||
? errorMessage
|
||||
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
|
||||
};
|
@@ -0,0 +1,14 @@
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
|
||||
[SecretScanningDataSource.GitHub]: "GitHub"
|
||||
};
|
||||
|
||||
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
|
||||
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar
|
||||
};
|
||||
|
||||
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
|
||||
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" }
|
||||
};
|
@@ -0,0 +1,626 @@
|
||||
import { join } from "path";
|
||||
|
||||
import { ProjectMembershipRole, TSecretScanningFindings } from "@app/db/schemas";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import {
|
||||
createTempFolder,
|
||||
deleteTempFolder,
|
||||
writeTextToFile
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import {
|
||||
parseScanErrorMessage,
|
||||
scanGitRepositoryAndGetFindings
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningResource,
|
||||
SecretScanningScanStatus,
|
||||
SecretScanningScanType
|
||||
} from "./secret-scanning-v2-enums";
|
||||
import { SECRET_SCANNING_FACTORY_MAP } from "./secret-scanning-v2-factory";
|
||||
import {
|
||||
TFindingsPayload,
|
||||
TQueueSecretScanningDataSourceFullScan,
|
||||
TQueueSecretScanningResourceDiffScan,
|
||||
TQueueSecretScanningSendNotification,
|
||||
TSecretScanningDataSourceWithConnection
|
||||
} from "./secret-scanning-v2-types";
|
||||
|
||||
type TSecretRotationV2QueueServiceFactoryDep = {
|
||||
queueService: TQueueServiceFactory;
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem">;
|
||||
};
|
||||
|
||||
export type TSecretScanningV2QueueServiceFactory = Awaited<ReturnType<typeof secretScanningV2QueueServiceFactory>>;
|
||||
|
||||
export const secretScanningV2QueueServiceFactory = async ({
|
||||
queueService,
|
||||
secretScanningV2DAL,
|
||||
projectMembershipDAL,
|
||||
projectDAL,
|
||||
smtpService,
|
||||
kmsService,
|
||||
auditLogService,
|
||||
keyStore
|
||||
}: TSecretRotationV2QueueServiceFactoryDep) => {
|
||||
const queueDataSourceFullScan = async (
|
||||
dataSource: TSecretScanningDataSourceWithConnection,
|
||||
resourceExternalId?: string
|
||||
) => {
|
||||
try {
|
||||
const { type } = dataSource;
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
|
||||
|
||||
const rawResources = await factory.listRawResources(dataSource);
|
||||
|
||||
let filteredRawResources = rawResources;
|
||||
|
||||
// TODO: should add individual resource fetch to factory
|
||||
if (resourceExternalId) {
|
||||
filteredRawResources = rawResources.filter((resource) => resource.externalId === resourceExternalId);
|
||||
}
|
||||
|
||||
if (!filteredRawResources.length) {
|
||||
throw new BadRequestError({
|
||||
message: `${resourceExternalId ? `Resource with "ID" ${resourceExternalId} could not be found.` : "Data source has no resources to scan"}. Ensure your data source config is correct and not filtering out scanning resources.`
|
||||
});
|
||||
}
|
||||
|
||||
for (const resource of filteredRawResources) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
if (await keyStore.getItem(KeyStorePrefixes.SecretScanningLock(dataSource.id, resource.externalId))) {
|
||||
throw new BadRequestError({ message: `A scan is already in progress for resource "${resource.name}"` });
|
||||
}
|
||||
}
|
||||
|
||||
await secretScanningV2DAL.resources.transaction(async (tx) => {
|
||||
const resources = await secretScanningV2DAL.resources.upsert(
|
||||
filteredRawResources.map((rawResource) => ({
|
||||
...rawResource,
|
||||
dataSourceId: dataSource.id
|
||||
})),
|
||||
["externalId", "dataSourceId"],
|
||||
tx
|
||||
);
|
||||
|
||||
const scans = await secretScanningV2DAL.scans.insertMany(
|
||||
resources.map((resource) => ({
|
||||
resourceId: resource.id,
|
||||
type: SecretScanningScanType.FullScan
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
||||
for (const scan of scans) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await queueService.queuePg(QueueJobs.SecretScanningV2FullScan, {
|
||||
scanId: scan.id,
|
||||
resourceId: scan.resourceId,
|
||||
dataSourceId: dataSource.id
|
||||
});
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(error, `Failed to queue full-scan for data source with ID "${dataSource.id}"`);
|
||||
|
||||
if (error instanceof BadRequestError) throw error;
|
||||
|
||||
throw new InternalServerError({ message: `Failed to queue scan: ${(error as Error).message}` });
|
||||
}
|
||||
};
|
||||
|
||||
await queueService.startPg<QueueName.SecretScanningV2>(
|
||||
QueueJobs.SecretScanningV2FullScan,
|
||||
async ([job]) => {
|
||||
const { scanId, resourceId, dataSourceId } = job.data as TQueueSecretScanningDataSourceFullScan;
|
||||
const { retryCount, retryLimit } = job;
|
||||
|
||||
const logDetails = `[scanId=${scanId}] [resourceId=${resourceId}] [dataSourceId=${dataSourceId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
|
||||
|
||||
const tempFolder = await createTempFolder();
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource) throw new Error(`Data source with ID "${dataSourceId}" not found`);
|
||||
|
||||
const resource = await secretScanningV2DAL.resources.findById(resourceId);
|
||||
|
||||
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
|
||||
|
||||
let lock: Awaited<ReturnType<typeof keyStore.acquireLock>> | undefined;
|
||||
|
||||
try {
|
||||
try {
|
||||
lock = await keyStore.acquireLock(
|
||||
[KeyStorePrefixes.SecretScanningLock(dataSource.id, resource.externalId)],
|
||||
60 * 1000 * 5
|
||||
);
|
||||
} catch (e) {
|
||||
throw new Error("Failed to acquire scanning lock.");
|
||||
}
|
||||
|
||||
await secretScanningV2DAL.scans.update(
|
||||
{ id: scanId },
|
||||
{
|
||||
status: SecretScanningScanStatus.Scanning
|
||||
}
|
||||
);
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
|
||||
|
||||
const findingsPath = join(tempFolder, "findings.json");
|
||||
|
||||
const scanPath = await factory.getFullScanPath({
|
||||
dataSource: {
|
||||
...dataSource,
|
||||
connection
|
||||
} as TSecretScanningDataSourceWithConnection,
|
||||
resourceName: resource.name,
|
||||
tempFolder
|
||||
});
|
||||
|
||||
const config = await secretScanningV2DAL.configs.findOne({
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
let configPath: string | undefined;
|
||||
|
||||
if (config && config.content) {
|
||||
configPath = join(tempFolder, "infisical-scan.toml");
|
||||
await writeTextToFile(configPath, config.content);
|
||||
}
|
||||
|
||||
let findingsPayload: TFindingsPayload;
|
||||
switch (resource.type) {
|
||||
case SecretScanningResource.Repository:
|
||||
case SecretScanningResource.Project:
|
||||
findingsPayload = await scanGitRepositoryAndGetFindings(scanPath, findingsPath, configPath);
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unhandled resource type");
|
||||
}
|
||||
|
||||
const allFindings = await secretScanningV2DAL.findings.transaction(async (tx) => {
|
||||
let findings: TSecretScanningFindings[] = [];
|
||||
if (findingsPayload.length) {
|
||||
findings = await secretScanningV2DAL.findings.upsert(
|
||||
findingsPayload.map((finding) => ({
|
||||
...finding,
|
||||
projectId: dataSource.projectId,
|
||||
dataSourceName: dataSource.name,
|
||||
dataSourceType: dataSource.type,
|
||||
resourceName: resource.name,
|
||||
resourceType: resource.type,
|
||||
scanId
|
||||
})),
|
||||
["projectId", "fingerprint"],
|
||||
tx,
|
||||
["resourceName", "dataSourceName"]
|
||||
);
|
||||
}
|
||||
|
||||
await secretScanningV2DAL.scans.update(
|
||||
{ id: scanId },
|
||||
{
|
||||
status: SecretScanningScanStatus.Completed,
|
||||
statusMessage: null
|
||||
}
|
||||
);
|
||||
|
||||
return findings;
|
||||
});
|
||||
|
||||
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
|
||||
|
||||
if (newFindings.length) {
|
||||
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
|
||||
status: SecretScanningScanStatus.Completed,
|
||||
resourceName: resource.name,
|
||||
isDiffScan: false,
|
||||
dataSource,
|
||||
numberOfSecrets: newFindings.length,
|
||||
scanId
|
||||
});
|
||||
}
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: dataSource.projectId,
|
||||
actor: {
|
||||
type: ActorType.PLATFORM,
|
||||
metadata: {}
|
||||
},
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
dataSourceType: dataSource.type,
|
||||
resourceId: resource.id,
|
||||
resourceType: resource.type,
|
||||
scanId,
|
||||
scanStatus: SecretScanningScanStatus.Completed,
|
||||
scanType: SecretScanningScanType.FullScan,
|
||||
numberOfSecretsDetected: findingsPayload.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
logger.info(`secretScanningV2Queue: Full Scan Complete ${logDetails} findings=[${findingsPayload.length}]`);
|
||||
} catch (error) {
|
||||
if (retryCount === retryLimit) {
|
||||
const errorMessage = parseScanErrorMessage(error);
|
||||
|
||||
await secretScanningV2DAL.scans.update(
|
||||
{ id: scanId },
|
||||
{
|
||||
status: SecretScanningScanStatus.Failed,
|
||||
statusMessage: errorMessage
|
||||
}
|
||||
);
|
||||
|
||||
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
|
||||
status: SecretScanningScanStatus.Failed,
|
||||
resourceName: resource.name,
|
||||
dataSource,
|
||||
errorMessage
|
||||
});
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: dataSource.projectId,
|
||||
actor: {
|
||||
type: ActorType.PLATFORM,
|
||||
metadata: {}
|
||||
},
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
dataSourceType: dataSource.type,
|
||||
resourceId: resource.id,
|
||||
resourceType: resource.type,
|
||||
scanId,
|
||||
scanStatus: SecretScanningScanStatus.Failed,
|
||||
scanType: SecretScanningScanType.FullScan
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
logger.error(error, `secretScanningV2Queue: Full Scan Failed ${logDetails}`);
|
||||
throw error;
|
||||
} finally {
|
||||
await deleteTempFolder(tempFolder);
|
||||
await lock?.release();
|
||||
}
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 20,
|
||||
pollingIntervalSeconds: 1
|
||||
}
|
||||
);
|
||||
|
||||
const queueResourceDiffScan = async ({
|
||||
payload,
|
||||
dataSourceId,
|
||||
dataSourceType
|
||||
}: Pick<TQueueSecretScanningResourceDiffScan, "payload" | "dataSourceId" | "dataSourceType">) => {
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]();
|
||||
|
||||
const resourcePayload = factory.getDiffScanResourcePayload(payload);
|
||||
|
||||
try {
|
||||
const { resourceId, scanId } = await secretScanningV2DAL.resources.transaction(async (tx) => {
|
||||
const [resource] = await secretScanningV2DAL.resources.upsert(
|
||||
[
|
||||
{
|
||||
...resourcePayload,
|
||||
dataSourceId
|
||||
}
|
||||
],
|
||||
["externalId", "dataSourceId"],
|
||||
tx
|
||||
);
|
||||
|
||||
const scan = await secretScanningV2DAL.scans.create(
|
||||
{
|
||||
resourceId: resource.id,
|
||||
type: SecretScanningScanType.DiffScan
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return {
|
||||
resourceId: resource.id,
|
||||
scanId: scan.id
|
||||
};
|
||||
});
|
||||
|
||||
await queueService.queuePg(QueueJobs.SecretScanningV2DiffScan, {
|
||||
payload,
|
||||
dataSourceId,
|
||||
dataSourceType,
|
||||
scanId,
|
||||
resourceId
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
error,
|
||||
`secretScanningV2Queue: Failed to queue diff scan [dataSourceId=${dataSourceId}] [resourceExternalId=${resourcePayload.externalId}]`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
await queueService.startPg<QueueName.SecretScanningV2>(
|
||||
QueueJobs.SecretScanningV2DiffScan,
|
||||
async ([job]) => {
|
||||
const { payload, dataSourceId, resourceId, scanId } = job.data as TQueueSecretScanningResourceDiffScan;
|
||||
const { retryCount, retryLimit } = job;
|
||||
|
||||
const logDetails = `[dataSourceId=${dataSourceId}] [scanId=${scanId}] [resourceId=${resourceId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource) throw new Error(`Data source with ID "${dataSourceId}" not found`);
|
||||
|
||||
const resource = await secretScanningV2DAL.resources.findById(resourceId);
|
||||
|
||||
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
|
||||
|
||||
const tempFolder = await createTempFolder();
|
||||
|
||||
try {
|
||||
await secretScanningV2DAL.scans.update(
|
||||
{ id: scanId },
|
||||
{
|
||||
status: SecretScanningScanStatus.Scanning
|
||||
}
|
||||
);
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
|
||||
|
||||
const config = await secretScanningV2DAL.configs.findOne({
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
let configPath: string | undefined;
|
||||
|
||||
if (config && config.content) {
|
||||
configPath = join(tempFolder, "infisical-scan.toml");
|
||||
await writeTextToFile(configPath, config.content);
|
||||
}
|
||||
|
||||
const findingsPayload = await factory.getDiffScanFindingsPayload({
|
||||
dataSource: {
|
||||
...dataSource,
|
||||
connection
|
||||
} as TSecretScanningDataSourceWithConnection,
|
||||
resourceName: resource.name,
|
||||
payload,
|
||||
configPath
|
||||
});
|
||||
|
||||
const allFindings = await secretScanningV2DAL.findings.transaction(async (tx) => {
|
||||
let findings: TSecretScanningFindings[] = [];
|
||||
|
||||
if (findingsPayload.length) {
|
||||
findings = await secretScanningV2DAL.findings.upsert(
|
||||
findingsPayload.map((finding) => ({
|
||||
...finding,
|
||||
projectId: dataSource.projectId,
|
||||
dataSourceName: dataSource.name,
|
||||
dataSourceType: dataSource.type,
|
||||
resourceName: resource.name,
|
||||
resourceType: resource.type,
|
||||
scanId
|
||||
})),
|
||||
["projectId", "fingerprint"],
|
||||
tx,
|
||||
["resourceName", "dataSourceName"]
|
||||
);
|
||||
}
|
||||
|
||||
await secretScanningV2DAL.scans.update(
|
||||
{ id: scanId },
|
||||
{
|
||||
status: SecretScanningScanStatus.Completed
|
||||
}
|
||||
);
|
||||
|
||||
return findings;
|
||||
});
|
||||
|
||||
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
|
||||
|
||||
if (newFindings.length) {
|
||||
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
|
||||
status: SecretScanningScanStatus.Completed,
|
||||
resourceName: resource.name,
|
||||
isDiffScan: true,
|
||||
dataSource,
|
||||
numberOfSecrets: newFindings.length,
|
||||
scanId
|
||||
});
|
||||
}
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: dataSource.projectId,
|
||||
actor: {
|
||||
type: ActorType.PLATFORM,
|
||||
metadata: {}
|
||||
},
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
dataSourceType: dataSource.type,
|
||||
resourceId,
|
||||
resourceType: resource.type,
|
||||
scanId,
|
||||
scanStatus: SecretScanningScanStatus.Completed,
|
||||
scanType: SecretScanningScanType.DiffScan,
|
||||
numberOfSecretsDetected: findingsPayload.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
logger.info(`secretScanningV2Queue: Diff Scan Complete ${logDetails}`);
|
||||
} catch (error) {
|
||||
if (retryCount === retryLimit) {
|
||||
const errorMessage = parseScanErrorMessage(error);
|
||||
|
||||
await secretScanningV2DAL.scans.update(
|
||||
{ id: scanId },
|
||||
{
|
||||
status: SecretScanningScanStatus.Failed,
|
||||
statusMessage: errorMessage
|
||||
}
|
||||
);
|
||||
|
||||
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
|
||||
status: SecretScanningScanStatus.Failed,
|
||||
resourceName: resource.name,
|
||||
dataSource,
|
||||
errorMessage
|
||||
});
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
projectId: dataSource.projectId,
|
||||
actor: {
|
||||
type: ActorType.PLATFORM,
|
||||
metadata: {}
|
||||
},
|
||||
event: {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
|
||||
metadata: {
|
||||
dataSourceId: dataSource.id,
|
||||
dataSourceType: dataSource.type,
|
||||
resourceId: resource.id,
|
||||
resourceType: resource.type,
|
||||
scanId,
|
||||
scanStatus: SecretScanningScanStatus.Failed,
|
||||
scanType: SecretScanningScanType.DiffScan
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
logger.error(error, `secretScanningV2Queue: Diff Scan Failed ${logDetails}`);
|
||||
throw error;
|
||||
} finally {
|
||||
await deleteTempFolder(tempFolder);
|
||||
}
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 20,
|
||||
pollingIntervalSeconds: 1
|
||||
}
|
||||
);
|
||||
|
||||
await queueService.startPg<QueueName.SecretScanningV2>(
|
||||
QueueJobs.SecretScanningV2SendNotification,
|
||||
async ([job]) => {
|
||||
const { dataSource, resourceName, ...payload } = job.data as TQueueSecretScanningSendNotification;
|
||||
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (!appCfg.isSmtpConfigured) return;
|
||||
|
||||
try {
|
||||
const { projectId } = dataSource;
|
||||
|
||||
logger.info(
|
||||
`secretScanningV2Queue: Sending Status Notification [dataSourceId=${dataSource.id}] [resourceName=${resourceName}] [status=${payload.status}]`
|
||||
);
|
||||
|
||||
const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId);
|
||||
const project = await projectDAL.findById(projectId);
|
||||
|
||||
const projectAdmins = projectMembers.filter((member) =>
|
||||
member.roles.some((role) => role.role === ProjectMembershipRole.Admin)
|
||||
);
|
||||
|
||||
const timestamp = new Date().toISOString();
|
||||
|
||||
await smtpService.sendMail({
|
||||
recipients: projectAdmins.map((member) => member.user.email!).filter(Boolean),
|
||||
template:
|
||||
payload.status === SecretScanningScanStatus.Completed
|
||||
? SmtpTemplates.SecretScanningV2SecretsDetected
|
||||
: SmtpTemplates.SecretScanningV2ScanFailed,
|
||||
subjectLine:
|
||||
payload.status === SecretScanningScanStatus.Completed
|
||||
? "Incident Alert: Secret(s) Leaked"
|
||||
: `Secret Scanning Failed`,
|
||||
substitutions:
|
||||
payload.status === SecretScanningScanStatus.Completed
|
||||
? {
|
||||
authorName: "Jim",
|
||||
authorEmail: "jim@infisical.com",
|
||||
resourceName,
|
||||
numberOfSecrets: payload.numberOfSecrets,
|
||||
isDiffScan: payload.isDiffScan,
|
||||
url: encodeURI(
|
||||
`${appCfg.SITE_URL}/secret-scanning/${projectId}/findings?search=scanId:${payload.scanId}`
|
||||
),
|
||||
timestamp
|
||||
}
|
||||
: {
|
||||
dataSourceName: dataSource.name,
|
||||
resourceName,
|
||||
projectName: project.name,
|
||||
timestamp,
|
||||
errorMessage: payload.errorMessage,
|
||||
url: encodeURI(
|
||||
`${appCfg.SITE_URL}/secret-scanning/${projectId}/data-sources/${dataSource.type}/${dataSource.id}`
|
||||
)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
error,
|
||||
`secretScanningV2Queue: Failed to Send Status Notification [dataSourceId=${dataSource.id}] [resourceName=${resourceName}] [status=${payload.status}]`
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 5,
|
||||
pollingIntervalSeconds: 1
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
queueDataSourceFullScan,
|
||||
queueResourceDiffScan
|
||||
};
|
||||
};
|
@@ -0,0 +1,99 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningDataSourcesSchema, SecretScanningFindingsSchema } from "@app/db/schemas";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
|
||||
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
|
||||
type SecretScanningDataSourceSchemaOpts = {
|
||||
type: SecretScanningDataSource;
|
||||
isConnectionRequired: boolean;
|
||||
};
|
||||
|
||||
export const BaseSecretScanningDataSourceSchema = ({
|
||||
type,
|
||||
isConnectionRequired
|
||||
}: SecretScanningDataSourceSchemaOpts) =>
|
||||
SecretScanningDataSourcesSchema.omit({
|
||||
// unique to provider
|
||||
type: true,
|
||||
connectionId: true,
|
||||
config: true,
|
||||
encryptedCredentials: true
|
||||
}).extend({
|
||||
type: z.literal(type),
|
||||
connectionId: isConnectionRequired ? z.string().uuid() : z.null(),
|
||||
connection: isConnectionRequired
|
||||
? z.object({
|
||||
app: z.literal(SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[type]),
|
||||
name: z.string(),
|
||||
id: z.string().uuid()
|
||||
})
|
||||
: z.null()
|
||||
});
|
||||
|
||||
export const BaseCreateSecretScanningDataSourceSchema = ({
|
||||
type,
|
||||
isConnectionRequired
|
||||
}: SecretScanningDataSourceSchemaOpts) =>
|
||||
z.object({
|
||||
name: slugSchema({ field: "name" }).describe(SecretScanningDataSources.CREATE(type).name),
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretScanningDataSources.CREATE(type).projectId),
|
||||
description: z
|
||||
.string()
|
||||
.trim()
|
||||
.max(256, "Description cannot exceed 256 characters")
|
||||
.nullish()
|
||||
.describe(SecretScanningDataSources.CREATE(type).description),
|
||||
connectionId: isConnectionRequired
|
||||
? z.string().uuid().describe(SecretScanningDataSources.CREATE(type).connectionId)
|
||||
: z.undefined(),
|
||||
isAutoScanEnabled: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.default(true)
|
||||
.describe(SecretScanningDataSources.CREATE(type).isAutoScanEnabled)
|
||||
});
|
||||
|
||||
export const BaseUpdateSecretScanningDataSourceSchema = (type: SecretScanningDataSource) =>
|
||||
z.object({
|
||||
name: slugSchema({ field: "name" }).describe(SecretScanningDataSources.UPDATE(type).name).optional(),
|
||||
description: z
|
||||
.string()
|
||||
.trim()
|
||||
.max(256, "Description cannot exceed 256 characters")
|
||||
.nullish()
|
||||
.describe(SecretScanningDataSources.UPDATE(type).description),
|
||||
isAutoScanEnabled: z.boolean().optional().describe(SecretScanningDataSources.UPDATE(type).isAutoScanEnabled)
|
||||
});
|
||||
|
||||
export const GitRepositoryScanFindingDetailsSchema = z.object({
|
||||
description: z.string(),
|
||||
startLine: z.number(),
|
||||
endLine: z.number(),
|
||||
startColumn: z.number(),
|
||||
endColumn: z.number(),
|
||||
file: z.string(),
|
||||
link: z.string(),
|
||||
symlinkFile: z.string(),
|
||||
commit: z.string(),
|
||||
entropy: z.number(),
|
||||
author: z.string(),
|
||||
email: z.string(),
|
||||
date: z.string(),
|
||||
message: z.string(),
|
||||
tags: z.string().array(),
|
||||
ruleID: z.string(),
|
||||
fingerprint: z.string()
|
||||
});
|
||||
|
||||
export const BaseSecretScanningFindingSchema = SecretScanningFindingsSchema.omit({
|
||||
dataSourceType: true,
|
||||
resourceType: true,
|
||||
details: true
|
||||
});
|
@@ -0,0 +1,875 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { join } from "path";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import {
|
||||
ProjectPermissionSecretScanningConfigActions,
|
||||
ProjectPermissionSecretScanningDataSourceActions,
|
||||
ProjectPermissionSecretScanningFindingActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import {
|
||||
createTempFolder,
|
||||
deleteTempFolder,
|
||||
scanContentAndGetFindings,
|
||||
writeTextToFile
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { githubSecretScanningService } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-service";
|
||||
import { SecretScanningFindingStatus } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { SECRET_SCANNING_FACTORY_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-factory";
|
||||
import { listSecretScanningDataSourceOptions } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import {
|
||||
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP,
|
||||
SECRET_SCANNING_DATA_SOURCE_NAME_MAP
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
|
||||
import {
|
||||
TCreateSecretScanningDataSourceDTO,
|
||||
TDeleteSecretScanningDataSourceDTO,
|
||||
TFindSecretScanningDataSourceByIdDTO,
|
||||
TFindSecretScanningDataSourceByNameDTO,
|
||||
TListSecretScanningDataSourcesByProjectId,
|
||||
TSecretScanningDataSource,
|
||||
TSecretScanningDataSourceWithConnection,
|
||||
TSecretScanningDataSourceWithDetails,
|
||||
TSecretScanningFinding,
|
||||
TSecretScanningResourceWithDetails,
|
||||
TSecretScanningScanWithDetails,
|
||||
TTriggerSecretScanningDataSourceDTO,
|
||||
TUpdateSecretScanningDataSourceDTO,
|
||||
TUpdateSecretScanningFindingDTO,
|
||||
TUpsertSecretScanningConfigDTO
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { DatabaseErrorCode } from "@app/lib/error-codes";
|
||||
import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
|
||||
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
|
||||
|
||||
export type TSecretScanningV2ServiceFactoryDep = {
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
||||
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
secretScanningV2Queue: Pick<
|
||||
TSecretScanningV2QueueServiceFactory,
|
||||
"queueDataSourceFullScan" | "queueResourceDiffScan"
|
||||
>;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TSecretScanningV2ServiceFactory = ReturnType<typeof secretScanningV2ServiceFactory>;
|
||||
|
||||
export const secretScanningV2ServiceFactory = ({
|
||||
secretScanningV2DAL,
|
||||
permissionService,
|
||||
appConnectionService,
|
||||
licenseService,
|
||||
secretScanningV2Queue,
|
||||
kmsService
|
||||
}: TSecretScanningV2ServiceFactoryDep) => {
|
||||
const $checkListSecretScanningDataSourcesByProjectIdPermissions = async (
|
||||
projectId: string,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Data Sources due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.Read,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
};
|
||||
|
||||
const listSecretScanningDataSourcesByProjectId = async (
|
||||
{ projectId, type }: TListSecretScanningDataSourcesByProjectId,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
await $checkListSecretScanningDataSourcesByProjectIdPermissions(projectId, actor);
|
||||
|
||||
const dataSources = await secretScanningV2DAL.dataSources.find({
|
||||
...(type && { type }),
|
||||
projectId
|
||||
});
|
||||
|
||||
return dataSources as TSecretScanningDataSource[];
|
||||
};
|
||||
|
||||
const listSecretScanningDataSourcesWithDetailsByProjectId = async (
|
||||
{ projectId, type }: TListSecretScanningDataSourcesByProjectId,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
await $checkListSecretScanningDataSourcesByProjectIdPermissions(projectId, actor);
|
||||
|
||||
const dataSources = await secretScanningV2DAL.dataSources.findWithDetails({
|
||||
...(type && { type }),
|
||||
projectId
|
||||
});
|
||||
|
||||
return dataSources as TSecretScanningDataSourceWithDetails[];
|
||||
};
|
||||
|
||||
const findSecretScanningDataSourceById = async (
|
||||
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.Read,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
return dataSource as TSecretScanningDataSource;
|
||||
};
|
||||
|
||||
const findSecretScanningDataSourceByName = async (
|
||||
{ type, sourceName, projectId }: TFindSecretScanningDataSourceByNameDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
// we prevent conflicting names within a folder
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findOne({
|
||||
name: sourceName,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with name "${sourceName}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.Read,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSource.id}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
return dataSource as TSecretScanningDataSource;
|
||||
};
|
||||
|
||||
const createSecretScanningDataSource = async (
|
||||
payload: TCreateSecretScanningDataSourceDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to create Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: payload.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.Create,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (payload.connectionId) {
|
||||
// validates permission to connect and app is valid for data source
|
||||
connection = await appConnectionService.connectAppConnectionById(
|
||||
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[payload.type],
|
||||
payload.connectionId,
|
||||
actor
|
||||
);
|
||||
}
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]();
|
||||
|
||||
try {
|
||||
const createdDataSource = await factory.initialize(
|
||||
{
|
||||
payload,
|
||||
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
|
||||
secretScanningV2DAL
|
||||
},
|
||||
async ({ credentials, externalId }) => {
|
||||
let encryptedCredentials: Buffer | null = null;
|
||||
|
||||
if (credentials) {
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: payload.projectId
|
||||
});
|
||||
|
||||
const { cipherTextBlob } = encryptor({
|
||||
plainText: Buffer.from(JSON.stringify(credentials))
|
||||
});
|
||||
|
||||
encryptedCredentials = cipherTextBlob;
|
||||
}
|
||||
|
||||
return secretScanningV2DAL.dataSources.transaction(async (tx) => {
|
||||
const dataSource = await secretScanningV2DAL.dataSources.create(
|
||||
{
|
||||
encryptedCredentials,
|
||||
externalId,
|
||||
...payload
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await factory.postInitialization({
|
||||
payload,
|
||||
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
|
||||
dataSourceId: dataSource.id,
|
||||
credentials
|
||||
});
|
||||
|
||||
return dataSource;
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
if (payload.isAutoScanEnabled) {
|
||||
try {
|
||||
await secretScanningV2Queue.queueDataSourceFullScan({
|
||||
...createdDataSource,
|
||||
connection
|
||||
} as TSecretScanningDataSourceWithConnection);
|
||||
} catch {
|
||||
// silently fail, don't want to block creation, they'll try scanning when they don't see anything and get the error
|
||||
}
|
||||
}
|
||||
|
||||
return createdDataSource as TSecretScanningDataSource;
|
||||
} catch (err) {
|
||||
if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) {
|
||||
throw new BadRequestError({
|
||||
message: `A Secret Scanning Data Source with the name "${payload.name}" already exists for the project with ID "${payload.projectId}"`
|
||||
});
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const updateSecretScanningDataSource = async (
|
||||
{ type, dataSourceId, ...payload }: TUpdateSecretScanningDataSourceDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to update Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.Edit,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
try {
|
||||
const updatedDataSource = await secretScanningV2DAL.dataSources.updateById(dataSourceId, payload);
|
||||
|
||||
return updatedDataSource as TSecretScanningDataSource;
|
||||
} catch (err) {
|
||||
if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) {
|
||||
throw new BadRequestError({
|
||||
message: `A Secret Scanning Data Source with the name "${payload.name}" already exists for the project with ID "${dataSource.projectId}"`
|
||||
});
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const deleteSecretScanningDataSource = async (
|
||||
{ type, dataSourceId }: TDeleteSecretScanningDataSourceDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to delete Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.Delete,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
// TODO: clean up webhooks
|
||||
|
||||
await secretScanningV2DAL.dataSources.deleteById(dataSourceId);
|
||||
|
||||
return dataSource as TSecretScanningDataSource;
|
||||
};
|
||||
|
||||
const triggerSecretScanningDataSourceScan = async (
|
||||
{ type, dataSourceId, resourceId }: TTriggerSecretScanningDataSourceDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to trigger scan for Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
|
||||
|
||||
let resourceExternalId: string | undefined;
|
||||
|
||||
if (resourceId) {
|
||||
const resource = await secretScanningV2DAL.resources.findOne({ id: resourceId, dataSourceId });
|
||||
if (!resource) {
|
||||
throw new NotFoundError({
|
||||
message: `Could not find Secret Scanning Resource with ID "${resourceId}" for Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
}
|
||||
resourceExternalId = resource.externalId;
|
||||
}
|
||||
|
||||
await secretScanningV2Queue.queueDataSourceFullScan(
|
||||
{
|
||||
...dataSource,
|
||||
connection
|
||||
} as TSecretScanningDataSourceWithConnection,
|
||||
resourceExternalId
|
||||
);
|
||||
|
||||
return dataSource as TSecretScanningDataSource;
|
||||
};
|
||||
|
||||
const listSecretScanningResourcesByDataSourceId = async (
|
||||
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadResources,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
const resources = await secretScanningV2DAL.resources.find({
|
||||
dataSourceId
|
||||
});
|
||||
|
||||
return { resources, projectId: dataSource.projectId };
|
||||
};
|
||||
|
||||
const listSecretScanningScansByDataSourceId = async (
|
||||
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
const scans = await secretScanningV2DAL.scans.findByDataSourceId(dataSourceId);
|
||||
|
||||
return { scans, projectId: dataSource.projectId };
|
||||
};
|
||||
|
||||
const listSecretScanningResourcesWithDetailsByDataSourceId = async (
|
||||
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadResources,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
const resources = await secretScanningV2DAL.resources.findWithDetails({ dataSourceId });
|
||||
|
||||
return { resources: resources as TSecretScanningResourceWithDetails[], projectId: dataSource.projectId };
|
||||
};
|
||||
|
||||
const listSecretScanningScansWithDetailsByDataSourceId = async (
|
||||
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Scans due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
|
||||
|
||||
if (!dataSource)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: dataSource.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
|
||||
ProjectPermissionSub.SecretScanningDataSources
|
||||
);
|
||||
|
||||
if (type !== dataSource.type)
|
||||
throw new BadRequestError({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
const scans = await secretScanningV2DAL.scans.findWithDetailsByDataSourceId(dataSourceId);
|
||||
|
||||
return { scans: scans as TSecretScanningScanWithDetails[], projectId: dataSource.projectId };
|
||||
};
|
||||
|
||||
const getSecretScanningUnresolvedFindingsCountByProjectId = async (projectId: string, actor: OrgServiceActor) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningFindingActions.Read,
|
||||
ProjectPermissionSub.SecretScanningFindings
|
||||
);
|
||||
|
||||
const [finding] = await secretScanningV2DAL.findings.find(
|
||||
{
|
||||
projectId,
|
||||
status: SecretScanningFindingStatus.Unresolved
|
||||
},
|
||||
{ count: true }
|
||||
);
|
||||
|
||||
return Number(finding?.count ?? 0);
|
||||
};
|
||||
|
||||
const listSecretScanningFindingsByProjectId = async (projectId: string, actor: OrgServiceActor) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningFindingActions.Read,
|
||||
ProjectPermissionSub.SecretScanningFindings
|
||||
);
|
||||
|
||||
const findings = await secretScanningV2DAL.findings.find({
|
||||
projectId
|
||||
});
|
||||
|
||||
return findings as TSecretScanningFinding[];
|
||||
};
|
||||
|
||||
const updateSecretScanningFindingById = async (
|
||||
{ findingId, remarks, status }: TUpdateSecretScanningFindingDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const finding = await secretScanningV2DAL.findings.findById(findingId);
|
||||
|
||||
if (!finding)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find Secret Scanning Finding with ID "${findingId}"`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId: finding.projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningFindingActions.Update,
|
||||
ProjectPermissionSub.SecretScanningFindings
|
||||
);
|
||||
|
||||
const updatedFinding = await secretScanningV2DAL.findings.updateById(findingId, {
|
||||
remarks,
|
||||
status
|
||||
});
|
||||
|
||||
return { finding: updatedFinding as TSecretScanningFinding, projectId: finding.projectId };
|
||||
};
|
||||
|
||||
const findSecretScanningConfigByProjectId = async (projectId: string, actor: OrgServiceActor) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Configuration due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningConfigActions.Read,
|
||||
ProjectPermissionSub.SecretScanningConfigs
|
||||
);
|
||||
|
||||
const config = await secretScanningV2DAL.configs.findOne({
|
||||
projectId
|
||||
});
|
||||
|
||||
return (
|
||||
config ?? { content: null, projectId, updatedAt: null } // using default config
|
||||
);
|
||||
};
|
||||
|
||||
const upsertSecretScanningConfig = async (
|
||||
{ projectId, content }: TUpsertSecretScanningConfigDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretScanning)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to access Secret Scanning Configuration due to plan restriction. Upgrade plan to enable Secret Scanning."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretScanning,
|
||||
projectId
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretScanningConfigActions.Update,
|
||||
ProjectPermissionSub.SecretScanningConfigs
|
||||
);
|
||||
|
||||
if (content) {
|
||||
const tempFolder = await createTempFolder();
|
||||
try {
|
||||
const configPath = join(tempFolder, "infisical-scan.toml");
|
||||
await writeTextToFile(configPath, content);
|
||||
|
||||
// just checking if config parses
|
||||
await scanContentAndGetFindings("", configPath);
|
||||
} catch (e) {
|
||||
throw new BadRequestError({
|
||||
message: "Unable to parse configuration: Check syntax and formatting."
|
||||
});
|
||||
} finally {
|
||||
await deleteTempFolder(tempFolder);
|
||||
}
|
||||
}
|
||||
|
||||
const [config] = await secretScanningV2DAL.configs.upsert(
|
||||
[
|
||||
{
|
||||
projectId,
|
||||
content
|
||||
}
|
||||
],
|
||||
"projectId"
|
||||
);
|
||||
|
||||
return config;
|
||||
};
|
||||
|
||||
return {
|
||||
listSecretScanningDataSourceOptions,
|
||||
listSecretScanningDataSourcesByProjectId,
|
||||
listSecretScanningDataSourcesWithDetailsByProjectId,
|
||||
findSecretScanningDataSourceById,
|
||||
findSecretScanningDataSourceByName,
|
||||
createSecretScanningDataSource,
|
||||
updateSecretScanningDataSource,
|
||||
deleteSecretScanningDataSource,
|
||||
triggerSecretScanningDataSourceScan,
|
||||
listSecretScanningResourcesByDataSourceId,
|
||||
listSecretScanningScansByDataSourceId,
|
||||
listSecretScanningResourcesWithDetailsByDataSourceId,
|
||||
listSecretScanningScansWithDetailsByDataSourceId,
|
||||
getSecretScanningUnresolvedFindingsCountByProjectId,
|
||||
listSecretScanningFindingsByProjectId,
|
||||
updateSecretScanningFindingById,
|
||||
findSecretScanningConfigByProjectId,
|
||||
upsertSecretScanningConfig,
|
||||
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue)
|
||||
};
|
||||
};
|
@@ -0,0 +1,189 @@
|
||||
import {
|
||||
TSecretScanningDataSources,
|
||||
TSecretScanningFindingsInsert,
|
||||
TSecretScanningResources,
|
||||
TSecretScanningScans
|
||||
} from "@app/db/schemas";
|
||||
import {
|
||||
TGitHubDataSource,
|
||||
TGitHubDataSourceInput,
|
||||
TGitHubDataSourceListItem,
|
||||
TGitHubDataSourceWithConnection,
|
||||
TGitHubFinding,
|
||||
TQueueGitHubResourceDiffScan
|
||||
} from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export type TSecretScanningDataSource = TGitHubDataSource;
|
||||
|
||||
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
|
||||
lastScannedAt?: Date | null;
|
||||
lastScanStatus?: SecretScanningScanStatus | null;
|
||||
lastScanStatusMessage?: string | null;
|
||||
unresolvedFindings: number;
|
||||
};
|
||||
|
||||
export type TSecretScanningResourceWithDetails = TSecretScanningResources & {
|
||||
lastScannedAt?: Date | null;
|
||||
lastScanStatus?: SecretScanningScanStatus | null;
|
||||
lastScanStatusMessage?: string | null;
|
||||
unresolvedFindings: number;
|
||||
};
|
||||
|
||||
export type TSecretScanningScanWithDetails = TSecretScanningScans & {
|
||||
unresolvedFindings: number;
|
||||
resolvedFindings: number;
|
||||
resourceName: string;
|
||||
};
|
||||
|
||||
export type TSecretScanningDataSourceWithConnection = TGitHubDataSourceWithConnection;
|
||||
|
||||
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput;
|
||||
|
||||
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem;
|
||||
|
||||
export type TSecretScanningFinding = TGitHubFinding;
|
||||
|
||||
export type TListSecretScanningDataSourcesByProjectId = {
|
||||
projectId: string;
|
||||
type?: SecretScanningDataSource;
|
||||
};
|
||||
|
||||
export type TFindSecretScanningDataSourceByIdDTO = {
|
||||
dataSourceId: string;
|
||||
type: SecretScanningDataSource;
|
||||
};
|
||||
|
||||
export type TFindSecretScanningDataSourceByNameDTO = {
|
||||
sourceName: string;
|
||||
projectId: string;
|
||||
type: SecretScanningDataSource;
|
||||
};
|
||||
|
||||
export type TCreateSecretScanningDataSourceDTO = Pick<
|
||||
TSecretScanningDataSource,
|
||||
"description" | "name" | "projectId"
|
||||
> & {
|
||||
connectionId?: string;
|
||||
type: SecretScanningDataSource;
|
||||
isAutoScanEnabled?: boolean;
|
||||
config: Partial<TSecretScanningDataSourceInput["config"]>;
|
||||
};
|
||||
|
||||
export type TUpdateSecretScanningDataSourceDTO = Partial<
|
||||
Omit<TCreateSecretScanningDataSourceDTO, "projectId" | "connectionId">
|
||||
> & {
|
||||
dataSourceId: string;
|
||||
type: SecretScanningDataSource;
|
||||
};
|
||||
|
||||
export type TDeleteSecretScanningDataSourceDTO = {
|
||||
type: SecretScanningDataSource;
|
||||
dataSourceId: string;
|
||||
};
|
||||
|
||||
export type TTriggerSecretScanningDataSourceDTO = {
|
||||
type: SecretScanningDataSource;
|
||||
dataSourceId: string;
|
||||
resourceId?: string;
|
||||
};
|
||||
|
||||
export type TQueueSecretScanningDataSourceFullScan = {
|
||||
dataSourceId: string;
|
||||
resourceId: string;
|
||||
scanId: string;
|
||||
};
|
||||
|
||||
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan;
|
||||
|
||||
export type TQueueSecretScanningSendNotification = {
|
||||
dataSource: TSecretScanningDataSources;
|
||||
resourceName: string;
|
||||
} & (
|
||||
| { status: SecretScanningScanStatus.Failed; errorMessage: string }
|
||||
| { status: SecretScanningScanStatus.Completed; numberOfSecrets: number; scanId: string; isDiffScan: boolean }
|
||||
);
|
||||
|
||||
export type TCloneRepository = {
|
||||
cloneUrl: string;
|
||||
repoPath: string;
|
||||
};
|
||||
|
||||
export type TSecretScanningFactoryListRawResources<T extends TSecretScanningDataSourceWithConnection> = (
|
||||
dataSource: T
|
||||
) => Promise<Pick<TSecretScanningResources, "externalId" | "name" | "type">[]>;
|
||||
|
||||
export type TSecretScanningFactoryGetDiffScanResourcePayload<
|
||||
P extends TQueueSecretScanningResourceDiffScan["payload"]
|
||||
> = (payload: P) => Pick<TSecretScanningResources, "externalId" | "name" | "type">;
|
||||
|
||||
export type TSecretScanningFactoryGetFullScanPath<T extends TSecretScanningDataSourceWithConnection> = (parameters: {
|
||||
dataSource: T;
|
||||
resourceName: string;
|
||||
tempFolder: string;
|
||||
}) => Promise<string>;
|
||||
|
||||
export type TSecretScanningFactoryGetDiffScanFindingsPayload<
|
||||
T extends TSecretScanningDataSourceWithConnection,
|
||||
P extends TQueueSecretScanningResourceDiffScan["payload"]
|
||||
> = (parameters: { dataSource: T; resourceName: string; payload: P; configPath?: string }) => Promise<TFindingsPayload>;
|
||||
|
||||
export type TSecretScanningDataSourceRaw = NonNullable<
|
||||
Awaited<ReturnType<TSecretScanningV2DALFactory["dataSources"]["findById"]>>
|
||||
>;
|
||||
|
||||
export type TSecretScanningFactoryInitialize<
|
||||
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
||||
C extends TSecretScanningDataSourceCredentials = undefined
|
||||
> = (
|
||||
params: {
|
||||
payload: TCreateSecretScanningDataSourceDTO;
|
||||
connection: T;
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
||||
},
|
||||
callback: (parameters: { credentials?: C; externalId?: string }) => Promise<TSecretScanningDataSourceRaw>
|
||||
) => Promise<TSecretScanningDataSourceRaw>;
|
||||
|
||||
export type TSecretScanningFactoryPostInitialization<
|
||||
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
||||
C extends TSecretScanningDataSourceCredentials = undefined
|
||||
> = (params: {
|
||||
payload: TCreateSecretScanningDataSourceDTO;
|
||||
connection: T;
|
||||
credentials: C;
|
||||
dataSourceId: string;
|
||||
}) => Promise<void>;
|
||||
|
||||
export type TSecretScanningFactory<
|
||||
T extends TSecretScanningDataSourceWithConnection,
|
||||
C extends TSecretScanningDataSourceCredentials,
|
||||
P extends TQueueSecretScanningResourceDiffScan["payload"]
|
||||
> = () => {
|
||||
listRawResources: TSecretScanningFactoryListRawResources<T>;
|
||||
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
|
||||
initialize: TSecretScanningFactoryInitialize<T["connection"] | undefined, C>;
|
||||
postInitialization: TSecretScanningFactoryPostInitialization<T["connection"] | undefined, C>;
|
||||
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
|
||||
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
|
||||
};
|
||||
|
||||
export type TFindingsPayload = Pick<TSecretScanningFindingsInsert, "details" | "fingerprint" | "severity" | "rule">[];
|
||||
export type TGetFindingsPayload = Promise<TFindingsPayload>;
|
||||
|
||||
export type TUpdateSecretScanningFindingDTO = {
|
||||
status?: SecretScanningFindingStatus;
|
||||
remarks?: string | null;
|
||||
findingId: string;
|
||||
};
|
||||
|
||||
export type TUpsertSecretScanningConfigDTO = {
|
||||
projectId: string;
|
||||
content: string | null;
|
||||
};
|
||||
|
||||
export type TSecretScanningDataSourceCredentials = undefined;
|
@@ -0,0 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
|
||||
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [GitHubDataSourceSchema]);
|
||||
|
||||
export const SecretScanningFindingSchema = z.discriminatedUnion("resourceType", [GitHubFindingSchema]);
|
@@ -65,9 +65,9 @@ export function runInfisicalScanOnRepo(repoPath: string, outputPath: string): Pr
|
||||
});
|
||||
}
|
||||
|
||||
export function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
|
||||
export function runInfisicalScan(inputPath: string, outputPath: string, configPath?: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
|
||||
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}" ${configPath ? `-c "${configPath}"` : ""}`;
|
||||
exec(command, (error) => {
|
||||
if (error && error.code !== 77) {
|
||||
reject(error);
|
||||
@@ -138,14 +138,14 @@ export async function scanFullRepoContentAndGetFindings(
|
||||
}
|
||||
}
|
||||
|
||||
export async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
|
||||
export async function scanContentAndGetFindings(textContent: string, configPath?: string): Promise<SecretMatch[]> {
|
||||
const tempFolder = await createTempFolder();
|
||||
const filePath = join(tempFolder, "content.txt");
|
||||
const findingsPath = join(tempFolder, "findings.json");
|
||||
|
||||
try {
|
||||
await writeTextToFile(filePath, textContent);
|
||||
await runInfisicalScan(filePath, findingsPath);
|
||||
await runInfisicalScan(filePath, findingsPath, configPath);
|
||||
const findingsData = await readFindingsFile(findingsPath);
|
||||
return JSON.parse(findingsData) as SecretMatch[];
|
||||
} finally {
|
||||
|
@@ -9,6 +9,7 @@ export type SecretMatch = {
|
||||
Match: string;
|
||||
Secret: string;
|
||||
File: string;
|
||||
Link: string;
|
||||
SymlinkFile: string;
|
||||
Commit: string;
|
||||
Entropy: number;
|
||||
|
@@ -36,6 +36,8 @@ export const KeyStorePrefixes = {
|
||||
`sync-integration-last-run-${projectId}-${environmentSlug}-${secretPath}` as const,
|
||||
SecretSyncLock: (syncId: string) => `secret-sync-mutex-${syncId}` as const,
|
||||
SecretRotationLock: (rotationId: string) => `secret-rotation-v2-mutex-${rotationId}` as const,
|
||||
SecretScanningLock: (dataSourceId: string, resourceExternalId: string) =>
|
||||
`secret-scanning-v2-mutex-${dataSourceId}-${resourceExternalId}` as const,
|
||||
CaOrderCertificateForSubscriberLock: (subscriberId: string) =>
|
||||
`ca-order-certificate-for-subscriber-lock-${subscriberId}` as const,
|
||||
SecretSyncLastRunTimestamp: (syncId: string) => `secret-sync-last-run-${syncId}` as const,
|
||||
|
@@ -3,6 +3,12 @@ import {
|
||||
SECRET_ROTATION_CONNECTION_MAP,
|
||||
SECRET_ROTATION_NAME_MAP
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-maps";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
AUTO_SYNC_DESCRIPTION_HELPER,
|
||||
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP,
|
||||
SECRET_SCANNING_DATA_SOURCE_NAME_MAP
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { APP_CONNECTION_NAME_MAP } from "@app/services/app-connection/app-connection-maps";
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
@@ -57,7 +63,8 @@ export enum ApiDocsTags {
|
||||
SshHostGroups = "SSH Host Groups",
|
||||
KmsKeys = "KMS Keys",
|
||||
KmsEncryption = "KMS Encryption",
|
||||
KmsSigning = "KMS Signing"
|
||||
KmsSigning = "KMS Signing",
|
||||
SecretScanning = "Secret Scanning"
|
||||
}
|
||||
|
||||
export const GROUPS = {
|
||||
@@ -2432,3 +2439,81 @@ export const SecretRotations = {
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const SecretScanningDataSources = {
|
||||
LIST: (type?: SecretScanningDataSource) => ({
|
||||
projectId: `The ID of the project to list ${type ? SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type] : "Scanning"} Data Sources from.`
|
||||
}),
|
||||
GET_BY_ID: (type: SecretScanningDataSource) => ({
|
||||
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`
|
||||
}),
|
||||
GET_BY_NAME: (type: SecretScanningDataSource) => ({
|
||||
sourceName: `The name of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`,
|
||||
projectId: `The ID of the project the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source is located in.`
|
||||
}),
|
||||
CREATE: (type: SecretScanningDataSource) => {
|
||||
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
|
||||
const autoScanDescription = AUTO_SYNC_DESCRIPTION_HELPER[type];
|
||||
return {
|
||||
name: `The name of the ${sourceType} Data Source to create. Must be slug-friendly.`,
|
||||
description: `An optional description for the ${sourceType} Data Source.`,
|
||||
projectId: `The ID of the project to create the ${sourceType} Data Source in.`,
|
||||
connectionId: `The ID of the ${
|
||||
APP_CONNECTION_NAME_MAP[SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[type]]
|
||||
} Connection to use for this Data Source.`,
|
||||
isAutoScanEnabled: `Whether scans should be automatically performed when a ${autoScanDescription.verb} occurs to ${autoScanDescription.noun} associated with this Data Source.`,
|
||||
config: `The configuration parameters to use for this Data Source.`
|
||||
};
|
||||
},
|
||||
UPDATE: (type: SecretScanningDataSource) => {
|
||||
const typeName = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
|
||||
const autoScanDescription = AUTO_SYNC_DESCRIPTION_HELPER[type];
|
||||
|
||||
return {
|
||||
dataSourceId: `The ID of the ${typeName} Data Source to be updated.`,
|
||||
name: `The updated name of the ${typeName} Data Source. Must be slug-friendly.`,
|
||||
description: `The updated description of the ${typeName} Data Source.`,
|
||||
isAutoScanEnabled: `Whether scans should be automatically performed when a ${autoScanDescription.verb} occurs to ${autoScanDescription.noun} associated with this Data Source.`,
|
||||
config: `The updated configuration parameters to use for this Data Source.`
|
||||
};
|
||||
},
|
||||
DELETE: (type: SecretScanningDataSource) => ({
|
||||
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to be deleted.`
|
||||
}),
|
||||
SCAN: (type: SecretScanningDataSource) => ({
|
||||
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to trigger a scan for.`,
|
||||
resourceId: `The ID of the individual Data Source resource to trigger a scan for.`
|
||||
}),
|
||||
LIST_RESOURCES: (type: SecretScanningDataSource) => ({
|
||||
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to list resources from.`
|
||||
}),
|
||||
LIST_SCANS: (type: SecretScanningDataSource) => ({
|
||||
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to list scans for.`
|
||||
}),
|
||||
CONFIG: {
|
||||
GITHUB: {
|
||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const SecretScanningFindings = {
|
||||
LIST: {
|
||||
projectId: `The ID of the project to list Secret Scanning Findings from.`
|
||||
},
|
||||
UPDATE: {
|
||||
findingId: "The ID of the Secret Scanning Finding to update.",
|
||||
status: "The updated status of the specified Secret Scanning Finding.",
|
||||
remarks: "Remarks pertaining to the status of this finding."
|
||||
}
|
||||
};
|
||||
|
||||
export const SecretScanningConfigs = {
|
||||
GET_BY_PROJECT_ID: {
|
||||
projectId: `The ID of the project to retrieve the Secret Scanning Configuration for.`
|
||||
},
|
||||
UPDATE: {
|
||||
projectId: "The ID of the project to update the Secret Scanning Configuration for.",
|
||||
content: "The contents of the Secret Scanning Configuration file."
|
||||
}
|
||||
};
|
||||
|
@@ -1,5 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { QueueWorkerProfile } from "@app/lib/types";
|
||||
|
||||
import { removeTrailingSlash } from "../fn";
|
||||
import { CustomLogger } from "../logger/logger";
|
||||
import { zpStr } from "../zod";
|
||||
@@ -69,6 +71,7 @@ const envSchema = z
|
||||
ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
QUEUE_WORKERS_ENABLED: zodStrBool.default("true"),
|
||||
QUEUE_WORKER_PROFILE: z.nativeEnum(QueueWorkerProfile).default(QueueWorkerProfile.All),
|
||||
HTTPS_ENABLED: zodStrBool,
|
||||
ROTATION_DEVELOPMENT_MODE: zodStrBool.default("false").optional(),
|
||||
// smtp options
|
||||
@@ -230,6 +233,14 @@ const envSchema = z
|
||||
INF_APP_CONNECTION_GITHUB_APP_SLUG: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID: zpStr(z.string().optional()),
|
||||
|
||||
// github radar app
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// gcp app
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL: zpStr(z.string().optional()),
|
||||
|
||||
@@ -298,6 +309,13 @@ const envSchema = z
|
||||
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
|
||||
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
|
||||
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
|
||||
isSecretScanningV2Configured:
|
||||
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID) &&
|
||||
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY) &&
|
||||
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG) &&
|
||||
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID) &&
|
||||
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET) &&
|
||||
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET),
|
||||
isHsmConfigured:
|
||||
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
|
||||
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,
|
||||
|
@@ -32,3 +32,24 @@ export const shake = <RemovedKeys extends string, T = object>(
|
||||
return acc;
|
||||
}, {} as T);
|
||||
};
|
||||
|
||||
export const titleCaseToCamelCase = (obj: unknown): unknown => {
|
||||
if (typeof obj !== "object" || obj === null) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map((item: object) => titleCaseToCamelCase(item));
|
||||
}
|
||||
|
||||
const result: Record<string, unknown> = {};
|
||||
|
||||
for (const key in obj) {
|
||||
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
||||
const camelKey = key.charAt(0).toLowerCase() + key.slice(1);
|
||||
result[camelKey] = titleCaseToCamelCase((obj as Record<string, unknown>)[key]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
@@ -179,13 +179,18 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
|
||||
throw new DatabaseError({ error, name: "batchInsert" });
|
||||
}
|
||||
},
|
||||
upsert: async (data: readonly Tables[Tname]["insert"][], onConflictField: keyof Tables[Tname]["base"], tx?: Knex) => {
|
||||
upsert: async (
|
||||
data: readonly Tables[Tname]["insert"][],
|
||||
onConflictField: keyof Tables[Tname]["base"] | Array<keyof Tables[Tname]["base"]>,
|
||||
tx?: Knex,
|
||||
mergeColumns?: (keyof Knex.ResolveTableType<Knex.TableType<Tname>, "update">)[] | undefined
|
||||
) => {
|
||||
try {
|
||||
if (!data.length) return [];
|
||||
const res = await (tx || db)(tableName)
|
||||
.insert(data as never)
|
||||
.onConflict(onConflictField as never)
|
||||
.merge()
|
||||
.merge(mergeColumns)
|
||||
.returning("*");
|
||||
return res;
|
||||
} catch (error) {
|
||||
|
@@ -9,3 +9,5 @@ export const DistinguishedNameRegex =
|
||||
export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]{2,}$/);
|
||||
|
||||
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
|
||||
|
||||
export const GitHubRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
|
||||
|
@@ -78,3 +78,9 @@ export type OrgServiceActor = {
|
||||
authMethod: ActorAuthMethod;
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export enum QueueWorkerProfile {
|
||||
All = "all",
|
||||
Standard = "standard",
|
||||
SecretScanning = "secret-scanning"
|
||||
}
|
||||
|
@@ -11,9 +11,15 @@ import {
|
||||
TScanFullRepoEventPayload,
|
||||
TScanPushEventPayload
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import {
|
||||
TQueueSecretScanningDataSourceFullScan,
|
||||
TQueueSecretScanningResourceDiffScan,
|
||||
TQueueSecretScanningSendNotification
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { buildRedisFromConfig, TRedisConfigKeys } from "@app/lib/config/redis";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueWorkerProfile } from "@app/lib/types";
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
import {
|
||||
TFailedIntegrationSyncEmailsPayload,
|
||||
@@ -54,7 +60,8 @@ export enum QueueName {
|
||||
ImportSecretsFromExternalSource = "import-secrets-from-external-source",
|
||||
AppConnectionSecretSync = "app-connection-secret-sync",
|
||||
SecretRotationV2 = "secret-rotation-v2",
|
||||
InvalidateCache = "invalidate-cache"
|
||||
InvalidateCache = "invalidate-cache",
|
||||
SecretScanningV2 = "secret-scanning-v2"
|
||||
}
|
||||
|
||||
export enum QueueJobs {
|
||||
@@ -88,6 +95,9 @@ export enum QueueJobs {
|
||||
SecretRotationV2RotateSecrets = "secret-rotation-v2-rotate-secrets",
|
||||
SecretRotationV2SendNotification = "secret-rotation-v2-send-notification",
|
||||
InvalidateCache = "invalidate-cache",
|
||||
SecretScanningV2FullScan = "secret-scanning-v2-full-scan",
|
||||
SecretScanningV2DiffScan = "secret-scanning-v2-diff-scan",
|
||||
SecretScanningV2SendNotification = "secret-scanning-v2-notification",
|
||||
CaOrderCertificateForSubscriber = "ca-order-certificate-for-subscriber",
|
||||
PkiSubscriberDailyAutoRenewal = "pki-subscriber-daily-auto-renewal"
|
||||
}
|
||||
@@ -250,6 +260,19 @@ export type TQueueJobTypes = {
|
||||
};
|
||||
};
|
||||
};
|
||||
[QueueName.SecretScanningV2]:
|
||||
| {
|
||||
name: QueueJobs.SecretScanningV2FullScan;
|
||||
payload: TQueueSecretScanningDataSourceFullScan;
|
||||
}
|
||||
| {
|
||||
name: QueueJobs.SecretScanningV2DiffScan;
|
||||
payload: TQueueSecretScanningResourceDiffScan;
|
||||
}
|
||||
| {
|
||||
name: QueueJobs.SecretScanningV2SendNotification;
|
||||
payload: TQueueSecretScanningSendNotification;
|
||||
};
|
||||
[QueueName.CaLifecycle]: {
|
||||
name: QueueJobs.CaOrderCertificateForSubscriber;
|
||||
payload: {
|
||||
@@ -263,6 +286,37 @@ export type TQueueJobTypes = {
|
||||
};
|
||||
};
|
||||
|
||||
const SECRET_SCANNING_JOBS = [
|
||||
QueueJobs.SecretScanningV2FullScan,
|
||||
QueueJobs.SecretScanningV2DiffScan,
|
||||
QueueJobs.SecretScanningV2SendNotification,
|
||||
QueueJobs.SecretScan
|
||||
];
|
||||
|
||||
const NON_STANDARD_JOBS = [...SECRET_SCANNING_JOBS];
|
||||
|
||||
const SECRET_SCANNING_QUEUES = [
|
||||
QueueName.SecretScanningV2,
|
||||
QueueName.SecretFullRepoScan,
|
||||
QueueName.SecretPushEventScan
|
||||
];
|
||||
|
||||
const NON_STANDARD_QUEUES = [...SECRET_SCANNING_QUEUES];
|
||||
|
||||
const isQueueEnabled = (name: QueueName) => {
|
||||
const appCfg = getConfig();
|
||||
switch (appCfg.QUEUE_WORKER_PROFILE) {
|
||||
case QueueWorkerProfile.Standard:
|
||||
return !NON_STANDARD_QUEUES.includes(name);
|
||||
case QueueWorkerProfile.SecretScanning:
|
||||
return SECRET_SCANNING_QUEUES.includes(name);
|
||||
case QueueWorkerProfile.All:
|
||||
default:
|
||||
// allow all
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
|
||||
export const queueServiceFactory = (
|
||||
redisCfg: TRedisConfigKeys,
|
||||
@@ -319,7 +373,7 @@ export const queueServiceFactory = (
|
||||
});
|
||||
|
||||
const appCfg = getConfig();
|
||||
if (appCfg.QUEUE_WORKERS_ENABLED) {
|
||||
if (appCfg.QUEUE_WORKERS_ENABLED && isQueueEnabled(name)) {
|
||||
workerContainer[name] = new Worker<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>(name, jobFn, {
|
||||
...queueSettings,
|
||||
connection
|
||||
@@ -338,6 +392,30 @@ export const queueServiceFactory = (
|
||||
throw new Error(`${jobName} queue is already initialized`);
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (!appCfg.QUEUE_WORKERS_ENABLED) return;
|
||||
|
||||
switch (appCfg.QUEUE_WORKER_PROFILE) {
|
||||
case QueueWorkerProfile.Standard:
|
||||
if (NON_STANDARD_JOBS.includes(jobName)) {
|
||||
// only process standard jobs
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
case QueueWorkerProfile.SecretScanning:
|
||||
if (!SECRET_SCANNING_JOBS.includes(jobName)) {
|
||||
// only process secret scanning jobs
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
case QueueWorkerProfile.All:
|
||||
default:
|
||||
// allow all
|
||||
}
|
||||
|
||||
await pgBoss.createQueue(jobName);
|
||||
queueContainerPg[jobName] = true;
|
||||
|
||||
@@ -357,7 +435,7 @@ export const queueServiceFactory = (
|
||||
listener: WorkerListener<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>[U]
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
if (!appCfg.QUEUE_WORKERS_ENABLED) {
|
||||
if (!appCfg.QUEUE_WORKERS_ENABLED || !isQueueEnabled(name)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
66
backend/src/server/plugins/secret-scanner-v2.ts
Normal file
66
backend/src/server/plugins/secret-scanner-v2.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import type { EmitterWebhookEventName } from "@octokit/webhooks/dist-types/types";
|
||||
import { PushEvent } from "@octokit/webhooks-types";
|
||||
import { Probot } from "probot";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
|
||||
export const registerSecretScanningV2Webhooks = async (server: FastifyZodProvider) => {
|
||||
const probotApp = (app: Probot) => {
|
||||
app.on("installation.deleted", async (context) => {
|
||||
const { payload } = context;
|
||||
const { installation } = payload;
|
||||
|
||||
await server.services.secretScanningV2.github.handleInstallationDeletedEvent(installation.id);
|
||||
});
|
||||
|
||||
app.on("installation", async (context) => {
|
||||
const { payload } = context;
|
||||
logger.info({ repositories: payload.repositories }, "Installed secret scanner to");
|
||||
});
|
||||
|
||||
app.on("push", async (context) => {
|
||||
const { payload } = context;
|
||||
await server.services.secretScanningV2.github.handlePushEvent(payload as PushEvent);
|
||||
});
|
||||
};
|
||||
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (!appCfg.isSecretScanningV2Configured) {
|
||||
logger.info("Secret Scanning V2 is not configured. Skipping registration of secret scanning v2 webhooks.");
|
||||
return;
|
||||
}
|
||||
|
||||
const probot = new Probot({
|
||||
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID as string,
|
||||
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY as string,
|
||||
secret: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET as string
|
||||
});
|
||||
|
||||
await probot.load(probotApp);
|
||||
|
||||
// github push event webhook
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/github",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
const eventName = req.headers["x-github-event"] as EmitterWebhookEventName;
|
||||
const signatureSHA256 = req.headers["x-hub-signature-256"] as string;
|
||||
const id = req.headers["x-github-delivery"] as string;
|
||||
|
||||
await probot.webhooks.verifyAndReceive({
|
||||
id,
|
||||
name: eventName,
|
||||
payload: JSON.stringify(req.body),
|
||||
signature: signatureSHA256
|
||||
});
|
||||
|
||||
return res.send("ok");
|
||||
}
|
||||
});
|
||||
};
|
@@ -92,6 +92,9 @@ import { gitAppInstallSessionDALFactory } from "@app/ee/services/secret-scanning
|
||||
import { secretScanningDALFactory } from "@app/ee/services/secret-scanning/secret-scanning-dal";
|
||||
import { secretScanningQueueFactory } from "@app/ee/services/secret-scanning/secret-scanning-queue";
|
||||
import { secretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { secretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import { secretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
|
||||
import { secretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
|
||||
import { secretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { snapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal";
|
||||
import { snapshotFolderDALFactory } from "@app/ee/services/secret-snapshot/snapshot-folder-dal";
|
||||
@@ -118,6 +121,7 @@ import { getConfig, TEnvConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { registerSecretScanningV2Webhooks } from "@app/server/plugins/secret-scanner-v2";
|
||||
import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue/access-token-queue";
|
||||
import { apiKeyDALFactory } from "@app/services/api-key/api-key-dal";
|
||||
import { apiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||
@@ -312,6 +316,9 @@ export const registerRoutes = async (
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
|
||||
await server.register(registerSecretScanningV2Webhooks, {
|
||||
prefix: "/secret-scanning/webhooks"
|
||||
});
|
||||
|
||||
// db layers
|
||||
const userDAL = userDALFactory(db);
|
||||
@@ -459,6 +466,7 @@ export const registerRoutes = async (
|
||||
const secretRotationV2DAL = secretRotationV2DALFactory(db, folderDAL);
|
||||
const microsoftTeamsIntegrationDAL = microsoftTeamsIntegrationDALFactory(db);
|
||||
const projectMicrosoftTeamsConfigDAL = projectMicrosoftTeamsConfigDALFactory(db);
|
||||
const secretScanningV2DAL = secretScanningV2DALFactory(db);
|
||||
|
||||
const permissionService = permissionServiceFactory({
|
||||
permissionDAL,
|
||||
@@ -1784,6 +1792,26 @@ export const registerRoutes = async (
|
||||
smtpService
|
||||
});
|
||||
|
||||
const secretScanningV2Queue = await secretScanningV2QueueServiceFactory({
|
||||
auditLogService,
|
||||
secretScanningV2DAL,
|
||||
queueService,
|
||||
projectDAL,
|
||||
projectMembershipDAL,
|
||||
smtpService,
|
||||
kmsService,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const secretScanningV2Service = secretScanningV2ServiceFactory({
|
||||
permissionService,
|
||||
appConnectionService,
|
||||
licenseService,
|
||||
secretScanningV2DAL,
|
||||
secretScanningV2Queue,
|
||||
kmsService
|
||||
});
|
||||
|
||||
await superAdminService.initServerCfg();
|
||||
|
||||
// setup the communication with license key server
|
||||
@@ -1898,7 +1926,8 @@ export const registerRoutes = async (
|
||||
secretRotationV2: secretRotationV2Service,
|
||||
microsoftTeams: microsoftTeamsService,
|
||||
assumePrivileges: assumePrivilegeService,
|
||||
githubOrgSync: githubOrgSyncConfigService
|
||||
githubOrgSync: githubOrgSyncConfigService,
|
||||
secretScanningV2: secretScanningV2Service
|
||||
});
|
||||
|
||||
const cronJobs: CronJob[] = [];
|
||||
|
@@ -33,6 +33,10 @@ import {
|
||||
} from "@app/services/app-connection/databricks";
|
||||
import { GcpConnectionListItemSchema, SanitizedGcpConnectionSchema } from "@app/services/app-connection/gcp";
|
||||
import { GitHubConnectionListItemSchema, SanitizedGitHubConnectionSchema } from "@app/services/app-connection/github";
|
||||
import {
|
||||
GitHubRadarConnectionListItemSchema,
|
||||
SanitizedGitHubRadarConnectionSchema
|
||||
} from "@app/services/app-connection/github-radar";
|
||||
import {
|
||||
HCVaultConnectionListItemSchema,
|
||||
SanitizedHCVaultConnectionSchema
|
||||
@@ -67,6 +71,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
const SanitizedAppConnectionSchema = z.union([
|
||||
...SanitizedAwsConnectionSchema.options,
|
||||
...SanitizedGitHubConnectionSchema.options,
|
||||
...SanitizedGitHubRadarConnectionSchema.options,
|
||||
...SanitizedGcpConnectionSchema.options,
|
||||
...SanitizedAzureKeyVaultConnectionSchema.options,
|
||||
...SanitizedAzureAppConfigurationConnectionSchema.options,
|
||||
@@ -91,6 +96,7 @@ const SanitizedAppConnectionSchema = z.union([
|
||||
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
AwsConnectionListItemSchema,
|
||||
GitHubConnectionListItemSchema,
|
||||
GitHubRadarConnectionListItemSchema,
|
||||
GcpConnectionListItemSchema,
|
||||
AzureKeyVaultConnectionListItemSchema,
|
||||
AzureAppConfigurationConnectionListItemSchema,
|
||||
|
@@ -0,0 +1,54 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateGitHubRadarConnectionSchema,
|
||||
SanitizedGitHubRadarConnectionSchema,
|
||||
UpdateGitHubRadarConnectionSchema
|
||||
} from "@app/services/app-connection/github-radar";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerGitHubRadarConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.GitHubRadar,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedGitHubRadarConnectionSchema,
|
||||
createSchema: CreateGitHubRadarConnectionSchema,
|
||||
updateSchema: UpdateGitHubRadarConnectionSchema
|
||||
});
|
||||
|
||||
// The below endpoints are not exposed and for Infisical App use
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/repositories`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
repositories: z.object({ id: z.number(), name: z.string() }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
|
||||
const repositories = await server.services.appConnection.githubRadar.listRepositories(
|
||||
connectionId,
|
||||
req.permission
|
||||
);
|
||||
|
||||
return { repositories };
|
||||
}
|
||||
});
|
||||
};
|
@@ -11,6 +11,7 @@ import { registerCamundaConnectionRouter } from "./camunda-connection-router";
|
||||
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
|
||||
import { registerGcpConnectionRouter } from "./gcp-connection-router";
|
||||
import { registerGitHubConnectionRouter } from "./github-connection-router";
|
||||
import { registerGitHubRadarConnectionRouter } from "./github-radar-connection-router";
|
||||
import { registerHCVaultConnectionRouter } from "./hc-vault-connection-router";
|
||||
import { registerHumanitecConnectionRouter } from "./humanitec-connection-router";
|
||||
import { registerLdapConnectionRouter } from "./ldap-connection-router";
|
||||
@@ -28,6 +29,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
||||
{
|
||||
[AppConnection.AWS]: registerAwsConnectionRouter,
|
||||
[AppConnection.GitHub]: registerGitHubConnectionRouter,
|
||||
[AppConnection.GitHubRadar]: registerGitHubRadarConnectionRouter,
|
||||
[AppConnection.GCP]: registerGcpConnectionRouter,
|
||||
[AppConnection.AzureKeyVault]: registerAzureKeyVaultConnectionRouter,
|
||||
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
|
||||
|
@@ -160,7 +160,14 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
.default("false")
|
||||
.transform((value) => value === "true"),
|
||||
type: z
|
||||
.enum([ProjectType.SecretManager, ProjectType.KMS, ProjectType.CertificateManager, ProjectType.SSH, "all"])
|
||||
.enum([
|
||||
ProjectType.SecretManager,
|
||||
ProjectType.KMS,
|
||||
ProjectType.CertificateManager,
|
||||
ProjectType.SSH,
|
||||
ProjectType.SecretScanning,
|
||||
"all"
|
||||
])
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
@@ -173,7 +180,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const workspaces = await server.services.project.getProjects({
|
||||
includeRoles: req.query.includeRoles,
|
||||
|
@@ -1,5 +1,6 @@
|
||||
export enum AppConnection {
|
||||
GitHub = "github",
|
||||
GitHubRadar = "github-radar",
|
||||
AWS = "aws",
|
||||
Databricks = "databricks",
|
||||
GCP = "gcp",
|
||||
|
@@ -52,6 +52,11 @@ import {
|
||||
} from "./databricks";
|
||||
import { GcpConnectionMethod, getGcpConnectionListItem, validateGcpConnectionCredentials } from "./gcp";
|
||||
import { getGitHubConnectionListItem, GitHubConnectionMethod, validateGitHubConnectionCredentials } from "./github";
|
||||
import {
|
||||
getGitHubRadarConnectionListItem,
|
||||
GitHubRadarConnectionMethod,
|
||||
validateGitHubRadarConnectionCredentials
|
||||
} from "./github-radar";
|
||||
import {
|
||||
getHCVaultConnectionListItem,
|
||||
HCVaultConnectionMethod,
|
||||
@@ -89,6 +94,7 @@ export const listAppConnectionOptions = () => {
|
||||
return [
|
||||
getAwsConnectionListItem(),
|
||||
getGitHubConnectionListItem(),
|
||||
getGitHubRadarConnectionListItem(),
|
||||
getGcpConnectionListItem(),
|
||||
getAzureKeyVaultConnectionListItem(),
|
||||
getAzureAppConfigurationConnectionListItem(),
|
||||
@@ -160,6 +166,7 @@ export const validateAppConnectionCredentials = async (
|
||||
[AppConnection.AWS]: validateAwsConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Databricks]: validateDatabricksConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.GitHub]: validateGitHubConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.GitHubRadar]: validateGitHubRadarConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.GCP]: validateGcpConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.AzureKeyVault]: validateAzureKeyVaultConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.AzureAppConfiguration]:
|
||||
@@ -188,6 +195,7 @@ export const validateAppConnectionCredentials = async (
|
||||
export const getAppConnectionMethodName = (method: TAppConnection["method"]) => {
|
||||
switch (method) {
|
||||
case GitHubConnectionMethod.App:
|
||||
case GitHubRadarConnectionMethod.App:
|
||||
return "GitHub App";
|
||||
case AzureKeyVaultConnectionMethod.OAuth:
|
||||
case AzureAppConfigurationConnectionMethod.OAuth:
|
||||
@@ -258,6 +266,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
|
||||
[AppConnection.AWS]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Databricks]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.GitHub]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.GitHubRadar]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.GCP]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureKeyVault]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureAppConfiguration]: platformManagedCredentialsNotSupported,
|
||||
|
@@ -3,6 +3,7 @@ import { AppConnection, AppConnectionPlanType } from "./app-connection-enums";
|
||||
export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
[AppConnection.AWS]: "AWS",
|
||||
[AppConnection.GitHub]: "GitHub",
|
||||
[AppConnection.GitHubRadar]: "GitHub Radar",
|
||||
[AppConnection.GCP]: "GCP",
|
||||
[AppConnection.AzureKeyVault]: "Azure Key Vault",
|
||||
[AppConnection.AzureAppConfiguration]: "Azure App Configuration",
|
||||
@@ -27,6 +28,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanType> = {
|
||||
[AppConnection.AWS]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.GitHub]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.GitHubRadar]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.GCP]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureKeyVault]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureAppConfiguration]: AppConnectionPlanType.Regular,
|
||||
|
@@ -19,6 +19,7 @@ import {
|
||||
validateAppConnectionCredentials
|
||||
} from "@app/services/app-connection/app-connection-fns";
|
||||
import { auth0ConnectionService } from "@app/services/app-connection/auth0/auth0-connection-service";
|
||||
import { githubRadarConnectionService } from "@app/services/app-connection/github-radar/github-radar-connection-service";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
import { ValidateOnePassConnectionCredentialsSchema } from "./1password";
|
||||
@@ -49,6 +50,7 @@ import { ValidateGcpConnectionCredentialsSchema } from "./gcp";
|
||||
import { gcpConnectionService } from "./gcp/gcp-connection-service";
|
||||
import { ValidateGitHubConnectionCredentialsSchema } from "./github";
|
||||
import { githubConnectionService } from "./github/github-connection-service";
|
||||
import { ValidateGitHubRadarConnectionCredentialsSchema } from "./github-radar";
|
||||
import { ValidateHCVaultConnectionCredentialsSchema } from "./hc-vault";
|
||||
import { hcVaultConnectionService } from "./hc-vault/hc-vault-connection-service";
|
||||
import { ValidateHumanitecConnectionCredentialsSchema } from "./humanitec";
|
||||
@@ -78,6 +80,7 @@ export type TAppConnectionServiceFactory = ReturnType<typeof appConnectionServic
|
||||
const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAppConnectionCredentialsSchema> = {
|
||||
[AppConnection.AWS]: ValidateAwsConnectionCredentialsSchema,
|
||||
[AppConnection.GitHub]: ValidateGitHubConnectionCredentialsSchema,
|
||||
[AppConnection.GitHubRadar]: ValidateGitHubRadarConnectionCredentialsSchema,
|
||||
[AppConnection.GCP]: ValidateGcpConnectionCredentialsSchema,
|
||||
[AppConnection.AzureKeyVault]: ValidateAzureKeyVaultConnectionCredentialsSchema,
|
||||
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema,
|
||||
@@ -486,6 +489,7 @@ export const appConnectionServiceFactory = ({
|
||||
connectAppConnectionById,
|
||||
listAvailableAppConnectionsForUser,
|
||||
github: githubConnectionService(connectAppConnectionById),
|
||||
githubRadar: githubRadarConnectionService(connectAppConnectionById),
|
||||
gcp: gcpConnectionService(connectAppConnectionById),
|
||||
databricks: databricksConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
aws: awsConnectionService(connectAppConnectionById),
|
||||
|
@@ -69,6 +69,12 @@ import {
|
||||
TGitHubConnectionInput,
|
||||
TValidateGitHubConnectionCredentialsSchema
|
||||
} from "./github";
|
||||
import {
|
||||
TGitHubRadarConnection,
|
||||
TGitHubRadarConnectionConfig,
|
||||
TGitHubRadarConnectionInput,
|
||||
TValidateGitHubRadarConnectionCredentialsSchema
|
||||
} from "./github-radar";
|
||||
import {
|
||||
THCVaultConnection,
|
||||
THCVaultConnectionConfig,
|
||||
@@ -122,6 +128,7 @@ import {
|
||||
export type TAppConnection = { id: string } & (
|
||||
| TAwsConnection
|
||||
| TGitHubConnection
|
||||
| TGitHubRadarConnection
|
||||
| TGcpConnection
|
||||
| TAzureKeyVaultConnection
|
||||
| TAzureAppConfigurationConnection
|
||||
@@ -150,6 +157,7 @@ export type TSqlConnection = TPostgresConnection | TMsSqlConnection | TMySqlConn
|
||||
export type TAppConnectionInput = { id: string } & (
|
||||
| TAwsConnectionInput
|
||||
| TGitHubConnectionInput
|
||||
| TGitHubRadarConnectionInput
|
||||
| TGcpConnectionInput
|
||||
| TAzureKeyVaultConnectionInput
|
||||
| TAzureAppConfigurationConnectionInput
|
||||
@@ -185,6 +193,7 @@ export type TUpdateAppConnectionDTO = Partial<Omit<TCreateAppConnectionDTO, "met
|
||||
export type TAppConnectionConfig =
|
||||
| TAwsConnectionConfig
|
||||
| TGitHubConnectionConfig
|
||||
| TGitHubRadarConnectionConfig
|
||||
| TGcpConnectionConfig
|
||||
| TAzureKeyVaultConnectionConfig
|
||||
| TAzureAppConfigurationConnectionConfig
|
||||
@@ -206,6 +215,7 @@ export type TAppConnectionConfig =
|
||||
export type TValidateAppConnectionCredentialsSchema =
|
||||
| TValidateAwsConnectionCredentialsSchema
|
||||
| TValidateGitHubConnectionCredentialsSchema
|
||||
| TValidateGitHubRadarConnectionCredentialsSchema
|
||||
| TValidateGcpConnectionCredentialsSchema
|
||||
| TValidateAzureKeyVaultConnectionCredentialsSchema
|
||||
| TValidateAzureAppConfigurationConnectionCredentialsSchema
|
||||
|
@@ -0,0 +1,3 @@
|
||||
export enum GitHubRadarConnectionMethod {
|
||||
App = "github-app"
|
||||
}
|
@@ -0,0 +1,166 @@
|
||||
import { createAppAuth } from "@octokit/auth-app";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { AxiosResponse } from "axios";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError, ForbiddenRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { getAppConnectionMethodName } from "@app/services/app-connection/app-connection-fns";
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import { GitHubRadarConnectionMethod } from "./github-radar-connection-enums";
|
||||
import {
|
||||
TGitHubRadarConnection,
|
||||
TGitHubRadarConnectionConfig,
|
||||
TGitHubRadarRepository
|
||||
} from "./github-radar-connection-types";
|
||||
|
||||
export const getGitHubRadarConnectionListItem = () => {
|
||||
const { INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG } = getConfig();
|
||||
|
||||
return {
|
||||
name: "GitHub Radar" as const,
|
||||
app: AppConnection.GitHubRadar as const,
|
||||
methods: Object.values(GitHubRadarConnectionMethod) as [GitHubRadarConnectionMethod.App],
|
||||
appClientSlug: INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG
|
||||
};
|
||||
};
|
||||
|
||||
export const getGitHubRadarClient = (appConnection: TGitHubRadarConnection) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const { method, credentials } = appConnection;
|
||||
|
||||
let client: Octokit;
|
||||
|
||||
switch (method) {
|
||||
case GitHubRadarConnectionMethod.App:
|
||||
if (!appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID || !appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY) {
|
||||
throw new InternalServerError({
|
||||
message: `GitHub ${getAppConnectionMethodName(method).replace(
|
||||
"GitHub",
|
||||
""
|
||||
)} environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
client = new Octokit({
|
||||
authStrategy: createAppAuth,
|
||||
auth: {
|
||||
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
|
||||
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
|
||||
installationId: credentials.installationId
|
||||
}
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled GitHub Radar connection method: ${method as GitHubRadarConnectionMethod}`
|
||||
});
|
||||
}
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
export const listGitHubRadarRepositories = async (appConnection: TGitHubRadarConnection) => {
|
||||
const client = getGitHubRadarClient(appConnection);
|
||||
|
||||
const repositories: TGitHubRadarRepository[] = await client.paginate("GET /installation/repositories");
|
||||
|
||||
return repositories;
|
||||
};
|
||||
|
||||
type TokenRespData = {
|
||||
access_token: string;
|
||||
scope: string;
|
||||
token_type: string;
|
||||
error?: string;
|
||||
};
|
||||
|
||||
export const validateGitHubRadarConnectionCredentials = async (config: TGitHubRadarConnectionConfig) => {
|
||||
const { credentials, method } = config;
|
||||
|
||||
const { INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID, INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET, SITE_URL } =
|
||||
getConfig();
|
||||
|
||||
if (!INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID || !INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET) {
|
||||
throw new InternalServerError({
|
||||
message: `GitHub ${getAppConnectionMethodName(method).replace(
|
||||
"GitHub",
|
||||
""
|
||||
)} environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<TokenRespData>;
|
||||
|
||||
try {
|
||||
tokenResp = await request.get<TokenRespData>("https://github.com/login/oauth/access_token", {
|
||||
params: {
|
||||
client_id: INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET,
|
||||
code: credentials.code,
|
||||
redirect_uri: `${SITE_URL}/organization/app-connections/github-radar/oauth/callback`
|
||||
},
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
});
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
});
|
||||
}
|
||||
|
||||
if (tokenResp.status !== 200) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate credentials: GitHub responded with a status code of ${tokenResp.status} (${tokenResp.statusText}). Verify credentials and try again.`
|
||||
});
|
||||
}
|
||||
|
||||
if (method === GitHubRadarConnectionMethod.App) {
|
||||
const installationsResp = await request.get<{
|
||||
installations: {
|
||||
id: number;
|
||||
account: {
|
||||
login: string;
|
||||
type: string;
|
||||
id: number;
|
||||
};
|
||||
}[];
|
||||
}>(IntegrationUrls.GITHUB_USER_INSTALLATIONS, {
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `Bearer ${tokenResp.data.access_token}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
const matchingInstallation = installationsResp.data.installations.find(
|
||||
(installation) => installation.id === +credentials.installationId
|
||||
);
|
||||
|
||||
if (!matchingInstallation) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "User does not have access to the provided installation"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenResp.data.access_token) {
|
||||
throw new InternalServerError({ message: `Missing access token: ${tokenResp.data.error}` });
|
||||
}
|
||||
|
||||
switch (method) {
|
||||
case GitHubRadarConnectionMethod.App:
|
||||
return {
|
||||
installationId: credentials.installationId
|
||||
};
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled GitHub connection method: ${method as GitHubRadarConnectionMethod}`
|
||||
});
|
||||
}
|
||||
};
|
@@ -0,0 +1,66 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { GitHubRadarConnectionMethod } from "./github-radar-connection-enums";
|
||||
|
||||
export const GitHubRadarConnectionInputCredentialsSchema = z.object({
|
||||
code: z.string().trim().min(1, "GitHub Radar App code required"),
|
||||
installationId: z.string().min(1, "GitHub Radar App Installation ID required")
|
||||
});
|
||||
|
||||
export const GitHubRadarConnectionOutputCredentialsSchema = z.object({
|
||||
installationId: z.string()
|
||||
});
|
||||
|
||||
export const ValidateGitHubRadarConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
.literal(GitHubRadarConnectionMethod.App)
|
||||
.describe(AppConnections.CREATE(AppConnection.GitHubRadar).method),
|
||||
credentials: GitHubRadarConnectionInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.GitHubRadar).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateGitHubRadarConnectionSchema = ValidateGitHubRadarConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.GitHubRadar)
|
||||
);
|
||||
|
||||
export const UpdateGitHubRadarConnectionSchema = z
|
||||
.object({
|
||||
credentials: GitHubRadarConnectionInputCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.GitHubRadar).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.GitHubRadar));
|
||||
|
||||
const BaseGitHubRadarConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.GitHubRadar) });
|
||||
|
||||
export const GitHubRadarConnectionSchema = BaseGitHubRadarConnectionSchema.extend({
|
||||
method: z.literal(GitHubRadarConnectionMethod.App),
|
||||
credentials: GitHubRadarConnectionOutputCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedGitHubRadarConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseGitHubRadarConnectionSchema.extend({
|
||||
method: z.literal(GitHubRadarConnectionMethod.App),
|
||||
credentials: GitHubRadarConnectionOutputCredentialsSchema.pick({})
|
||||
})
|
||||
]);
|
||||
|
||||
export const GitHubRadarConnectionListItemSchema = z.object({
|
||||
name: z.literal("GitHub Radar"),
|
||||
app: z.literal(AppConnection.GitHubRadar),
|
||||
// the below is preferable but currently breaks with our zod to json schema parser
|
||||
// methods: z.tuple([z.literal(GitHubConnectionMethod.App), z.literal(GitHubConnectionMethod.OAuth)]),
|
||||
methods: z.nativeEnum(GitHubRadarConnectionMethod).array(),
|
||||
appClientSlug: z.string().optional()
|
||||
});
|
@@ -0,0 +1,24 @@
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { listGitHubRadarRepositories } from "@app/services/app-connection/github-radar/github-radar-connection-fns";
|
||||
import { TGitHubRadarConnection } from "@app/services/app-connection/github-radar/github-radar-connection-types";
|
||||
|
||||
type TGetAppConnectionFunc = (
|
||||
app: AppConnection,
|
||||
connectionId: string,
|
||||
actor: OrgServiceActor
|
||||
) => Promise<TGitHubRadarConnection>;
|
||||
|
||||
export const githubRadarConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||
const listRepositories = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.GitHubRadar, connectionId, actor);
|
||||
|
||||
const repositories = await listGitHubRadarRepositories(appConnection);
|
||||
|
||||
return repositories.map((repo) => ({ id: repo.id, name: repo.full_name }));
|
||||
};
|
||||
|
||||
return {
|
||||
listRepositories
|
||||
};
|
||||
};
|
@@ -0,0 +1,28 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
CreateGitHubRadarConnectionSchema,
|
||||
GitHubRadarConnectionSchema,
|
||||
ValidateGitHubRadarConnectionCredentialsSchema
|
||||
} from "./github-radar-connection-schemas";
|
||||
|
||||
export type TGitHubRadarConnection = z.infer<typeof GitHubRadarConnectionSchema>;
|
||||
|
||||
export type TGitHubRadarConnectionInput = z.infer<typeof CreateGitHubRadarConnectionSchema> & {
|
||||
app: AppConnection.GitHubRadar;
|
||||
};
|
||||
|
||||
export type TValidateGitHubRadarConnectionCredentialsSchema = typeof ValidateGitHubRadarConnectionCredentialsSchema;
|
||||
|
||||
export type TGitHubRadarConnectionConfig = DiscriminativePick<
|
||||
TGitHubRadarConnectionInput,
|
||||
"method" | "app" | "credentials"
|
||||
>;
|
||||
|
||||
export type TGitHubRadarRepository = {
|
||||
id: number;
|
||||
full_name: string;
|
||||
};
|
@@ -0,0 +1,4 @@
|
||||
export * from "./github-radar-connection-enums";
|
||||
export * from "./github-radar-connection-fns";
|
||||
export * from "./github-radar-connection-schemas";
|
||||
export * from "./github-radar-connection-types";
|
@@ -13,7 +13,12 @@ import { LdapConnectionMethod, LdapProvider } from "./ldap-connection-enums";
|
||||
|
||||
export const LdapConnectionSimpleBindCredentialsSchema = z.object({
|
||||
provider: z.nativeEnum(LdapProvider).describe(AppConnections.CREDENTIALS.LDAP.provider),
|
||||
url: z.string().trim().min(1, "URL required").regex(LdapUrlRegex).describe(AppConnections.CREDENTIALS.LDAP.url),
|
||||
url: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "URL required")
|
||||
.refine((value) => LdapUrlRegex.test(value), "Invalid LDAP URL")
|
||||
.describe(AppConnections.CREDENTIALS.LDAP.url),
|
||||
dn: z
|
||||
.string()
|
||||
.trim()
|
||||
|
@@ -139,6 +139,11 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
tokenReviewerJwt = serviceAccountJwt;
|
||||
}
|
||||
|
||||
let { kubernetesHost } = identityKubernetesAuth;
|
||||
if (kubernetesHost.startsWith("https://") || kubernetesHost.startsWith("http://")) {
|
||||
kubernetesHost = new RE2("^https?:\\/\\/").replace(kubernetesHost, "");
|
||||
}
|
||||
|
||||
const tokenReviewCallback = async (host: string = identityKubernetesAuth.kubernetesHost, port?: number) => {
|
||||
const baseUrl = port ? `${host}:${port}` : host;
|
||||
|
||||
@@ -163,7 +168,8 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
// if ca cert, rejectUnauthorized: true
|
||||
httpsAgent: new https.Agent({
|
||||
ca: caCert,
|
||||
rejectUnauthorized: !!caCert
|
||||
rejectUnauthorized: Boolean(caCert),
|
||||
servername: kubernetesHost
|
||||
})
|
||||
}
|
||||
)
|
||||
@@ -186,12 +192,6 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
return res.data;
|
||||
};
|
||||
|
||||
let { kubernetesHost } = identityKubernetesAuth;
|
||||
|
||||
if (kubernetesHost.startsWith("https://") || kubernetesHost.startsWith("http://")) {
|
||||
kubernetesHost = new RE2("^https?:\\/\\/").replace(kubernetesHost, "");
|
||||
}
|
||||
|
||||
const [k8sHost, k8sPort] = kubernetesHost.split(":");
|
||||
|
||||
const data = identityKubernetesAuth.gatewayId
|
||||
|
@@ -412,7 +412,15 @@ export const identityProjectDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
]
|
||||
});
|
||||
return members;
|
||||
|
||||
return members.map((el) => ({
|
||||
...el,
|
||||
roles: el.roles.sort((a, b) => {
|
||||
const roleA = (a.customRoleName || a.role).toLowerCase();
|
||||
const roleB = (b.customRoleName || b.role).toLowerCase();
|
||||
return roleA.localeCompare(roleB);
|
||||
})
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByProjectId" });
|
||||
}
|
||||
|
@@ -92,7 +92,8 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
db.ref("temporaryAccessEndTime").withSchema(TableName.ProjectUserMembershipRole),
|
||||
db.ref("name").as("projectName").withSchema(TableName.Project)
|
||||
)
|
||||
.where({ isGhost: false });
|
||||
.where({ isGhost: false })
|
||||
.orderBy(`${TableName.Users}.username` as "username");
|
||||
|
||||
const members = sqlNestRelationships({
|
||||
data: docs,
|
||||
@@ -149,7 +150,14 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
]
|
||||
});
|
||||
return members;
|
||||
return members.map((el) => ({
|
||||
...el,
|
||||
roles: el.roles.sort((a, b) => {
|
||||
const roleA = (a.customRoleName || a.role).toLowerCase();
|
||||
const roleB = (b.customRoleName || b.role).toLowerCase();
|
||||
return roleA.localeCompare(roleB);
|
||||
})
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find all project members" });
|
||||
}
|
||||
|
@@ -22,6 +22,56 @@ export type TProjectDALFactory = ReturnType<typeof projectDALFactory>;
|
||||
export const projectDALFactory = (db: TDbClient) => {
|
||||
const projectOrm = ormify(db, TableName.Project);
|
||||
|
||||
const findIdentityProjects = async (identityId: string, orgId: string, projectType: ProjectType | "all") => {
|
||||
try {
|
||||
const workspaces = await db(TableName.IdentityProjectMembership)
|
||||
.where({ identityId })
|
||||
.join(TableName.Project, `${TableName.IdentityProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.where(`${TableName.Project}.orgId`, orgId)
|
||||
.andWhere((qb) => {
|
||||
if (projectType !== "all") {
|
||||
void qb.where(`${TableName.Project}.type`, projectType);
|
||||
}
|
||||
})
|
||||
.leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
|
||||
.select(
|
||||
selectAllTableCols(TableName.Project),
|
||||
db.ref("id").withSchema(TableName.Project).as("_id"),
|
||||
db.ref("id").withSchema(TableName.Environment).as("envId"),
|
||||
db.ref("slug").withSchema(TableName.Environment).as("envSlug"),
|
||||
db.ref("name").withSchema(TableName.Environment).as("envName")
|
||||
)
|
||||
.orderBy([
|
||||
{ column: `${TableName.Project}.name`, order: "asc" },
|
||||
{ column: `${TableName.Environment}.position`, order: "asc" }
|
||||
]);
|
||||
|
||||
const nestedWorkspaces = sqlNestRelationships({
|
||||
data: workspaces,
|
||||
key: "id",
|
||||
parentMapper: ({ _id, ...el }) => ({ _id, ...ProjectsSchema.parse(el) }),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "envId",
|
||||
label: "environments" as const,
|
||||
mapper: ({ envId: id, envSlug: slug, envName: name }) => ({
|
||||
id,
|
||||
slug,
|
||||
name
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return nestedWorkspaces.map((workspace) => ({
|
||||
...workspace,
|
||||
organization: workspace.orgId
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find identity projects" });
|
||||
}
|
||||
};
|
||||
|
||||
const findUserProjects = async (userId: string, orgId: string, projectType: ProjectType | "all") => {
|
||||
try {
|
||||
const workspaces = await db
|
||||
@@ -443,6 +493,7 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
return {
|
||||
...projectOrm,
|
||||
findUserProjects,
|
||||
findIdentityProjects,
|
||||
setProjectUpgradeStatus,
|
||||
findAllProjectsByIdentity,
|
||||
findProjectGhostUser,
|
||||
|
@@ -573,12 +573,16 @@ export const projectServiceFactory = ({
|
||||
|
||||
const getProjects = async ({
|
||||
actorId,
|
||||
actor,
|
||||
includeRoles,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
type = ProjectType.SecretManager
|
||||
}: TListProjectsDTO) => {
|
||||
const workspaces = await projectDAL.findUserProjects(actorId, actorOrgId, type);
|
||||
const workspaces =
|
||||
actor === ActorType.IDENTITY
|
||||
? await projectDAL.findIdentityProjects(actorId, actorOrgId, type)
|
||||
: await projectDAL.findUserProjects(actorId, actorOrgId, type);
|
||||
|
||||
if (includeRoles) {
|
||||
const { permission } = await permissionService.getUserOrgPermission(
|
||||
|
@@ -13,7 +13,7 @@ export const BaseEmailWrapper = ({ title, preview, children, siteUrl }: BaseEmai
|
||||
<Html>
|
||||
<Head title={title} />
|
||||
<Tailwind>
|
||||
<Body className="bg-gray-300 my-auto mx-auto font-sans px-[8px]">
|
||||
<Body className="bg-gray-300 my-auto mx-auto font-sans px-[8px] py-[4px]">
|
||||
<Preview>{preview}</Preview>
|
||||
<Container className="bg-white rounded-xl my-[40px] mx-auto pb-[0px] max-w-[500px]">
|
||||
<Section className="border-0 border-b border-[#d1e309] border-solid bg-[#EBF852] mb-[44px] h-[10px] rounded-t-xl" />
|
||||
|
@@ -0,0 +1,67 @@
|
||||
import { Button, Heading, Section, Text } from "@react-email/components";
|
||||
import React from "react";
|
||||
|
||||
import { BaseEmailWrapper, BaseEmailWrapperProps } from "./BaseEmailWrapper";
|
||||
|
||||
interface SecretScanningScanFailedTemplateProps extends Omit<BaseEmailWrapperProps, "title" | "preview" | "children"> {
|
||||
dataSourceName: string;
|
||||
resourceName: string;
|
||||
projectName: string;
|
||||
timestamp: string;
|
||||
url: string;
|
||||
errorMessage: string;
|
||||
}
|
||||
|
||||
export const SecretScanningScanFailedTemplate = ({
|
||||
dataSourceName,
|
||||
resourceName,
|
||||
projectName,
|
||||
siteUrl,
|
||||
errorMessage,
|
||||
url,
|
||||
timestamp
|
||||
}: SecretScanningScanFailedTemplateProps) => {
|
||||
return (
|
||||
<BaseEmailWrapper
|
||||
title="Secret Scanning Failed"
|
||||
preview="Infisical encountered an error while attempting to scan for secret leaks."
|
||||
siteUrl={siteUrl}
|
||||
>
|
||||
<Heading className="text-black text-[18px] leading-[28px] text-center font-normal p-0 mx-0">
|
||||
Infisical encountered an error while attempting to scan the resource <strong>{resourceName}</strong>
|
||||
</Heading>
|
||||
<Section className="px-[24px] mt-[36px] pt-[26px] pb-[4px] text-[14px] border border-solid border-gray-200 rounded-md bg-gray-50">
|
||||
<strong>Resource</strong>
|
||||
<Text className="text-[14px] mt-[4px]">{resourceName}</Text>
|
||||
<strong>Data Source</strong>
|
||||
<Text className="text-[14px] mt-[4px]">{dataSourceName}</Text>
|
||||
<strong>Project</strong>
|
||||
<Text className="text-[14px] mt-[4px]">{projectName}</Text>
|
||||
<strong>Timestamp</strong>
|
||||
<Text className="text-[14px] mt-[4px]">{timestamp}</Text>
|
||||
<strong>Error</strong>
|
||||
<Text className="text-[14px] text-red-500 mt-[4px]">{errorMessage}</Text>
|
||||
</Section>
|
||||
<Section className="text-center mt-[28px]">
|
||||
<Button
|
||||
href={url}
|
||||
className="rounded-md p-3 px-[28px] my-[8px] text-center text-[16px] bg-[#EBF852] border-solid border border-[#d1e309] text-black font-medium"
|
||||
>
|
||||
View in Infisical
|
||||
</Button>
|
||||
</Section>
|
||||
</BaseEmailWrapper>
|
||||
);
|
||||
};
|
||||
|
||||
export default SecretScanningScanFailedTemplate;
|
||||
|
||||
SecretScanningScanFailedTemplate.PreviewProps = {
|
||||
dataSourceName: "my-data-source",
|
||||
resourceName: "my-resource",
|
||||
projectName: "my-project",
|
||||
timestamp: "May 3rd 2025, 5:42 pm",
|
||||
url: "https://infisical.com",
|
||||
errorMessage: "401 Unauthorized",
|
||||
siteUrl: "https://infisical.com"
|
||||
} as SecretScanningScanFailedTemplateProps;
|
@@ -0,0 +1,101 @@
|
||||
import { Button, Heading, Link, Section, Text } from "@react-email/components";
|
||||
import React from "react";
|
||||
|
||||
import { BaseEmailWrapper, BaseEmailWrapperProps } from "./BaseEmailWrapper";
|
||||
|
||||
interface SecretScanningSecretsDetectedTemplateProps
|
||||
extends Omit<BaseEmailWrapperProps, "title" | "preview" | "children"> {
|
||||
numberOfSecrets: number;
|
||||
isDiffScan: boolean;
|
||||
authorName?: string;
|
||||
authorEmail?: string;
|
||||
resourceName: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export const SecretScanningSecretsDetectedTemplate = ({
|
||||
numberOfSecrets,
|
||||
siteUrl,
|
||||
authorName,
|
||||
authorEmail,
|
||||
isDiffScan,
|
||||
resourceName,
|
||||
url
|
||||
}: SecretScanningSecretsDetectedTemplateProps) => {
|
||||
return (
|
||||
<BaseEmailWrapper
|
||||
title="Incident Alert: Secret(s) Leaked"
|
||||
preview="Infisical uncovered one or more leaked secrets."
|
||||
siteUrl={siteUrl}
|
||||
>
|
||||
<Heading className="text-black text-[18px] leading-[28px] text-center font-normal p-0 mx-0">
|
||||
Infisical has uncovered <strong>{numberOfSecrets}</strong> secret(s)
|
||||
{isDiffScan ? " from a recent commit to" : " in"} <strong>{resourceName}</strong>
|
||||
</Heading>
|
||||
<Section className="px-[24px] mt-[36px] pt-[8px] pb-[8px] text-[14px] border border-solid border-gray-200 rounded-md bg-gray-50">
|
||||
<Text className="text-[14px]">
|
||||
You are receiving this notification because one or more leaked secrets have been detected
|
||||
{isDiffScan && " in a recent commit"}
|
||||
{isDiffScan ? (
|
||||
(authorName || authorEmail) && (
|
||||
<>
|
||||
{" "}
|
||||
pushed by <strong>{authorName ?? "Unknown Pusher"}</strong>{" "}
|
||||
{authorEmail && (
|
||||
<>
|
||||
(
|
||||
<Link href={`mailto:${authorEmail}`} className="text-slate-700 no-underline">
|
||||
{authorEmail}
|
||||
</Link>
|
||||
)
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
) : (
|
||||
<>
|
||||
{" "}
|
||||
in your resource <strong>{resourceName}</strong>
|
||||
</>
|
||||
)}
|
||||
.
|
||||
</Text>
|
||||
<Text className="text-[14px]">
|
||||
If these are test secrets, please add `infisical-scan:ignore` at the end of the line containing the secret as
|
||||
a comment in the given programming language. This will prevent future notifications from being sent out for
|
||||
these secrets.
|
||||
</Text>
|
||||
<Text className="text-[14px] text-red-500">
|
||||
If these are production secrets, please rotate them immediately.
|
||||
</Text>
|
||||
<Text className="text-[14px]">
|
||||
Once you have taken action, be sure to update the finding status in the{" "}
|
||||
<Link href={url} className="text-slate-700 no-underline">
|
||||
Infisical Dashboard
|
||||
</Link>
|
||||
.
|
||||
</Text>
|
||||
</Section>
|
||||
<Section className="text-center mt-[28px]">
|
||||
<Button
|
||||
href={url}
|
||||
className="rounded-md p-3 px-[28px] my-[8px] text-center text-[16px] bg-[#EBF852] border-solid border border-[#d1e309] text-black font-medium"
|
||||
>
|
||||
View Leaked Secrets
|
||||
</Button>
|
||||
</Section>
|
||||
</BaseEmailWrapper>
|
||||
);
|
||||
};
|
||||
|
||||
export default SecretScanningSecretsDetectedTemplate;
|
||||
|
||||
SecretScanningSecretsDetectedTemplate.PreviewProps = {
|
||||
authorName: "Jim",
|
||||
authorEmail: "jim@infisical.com",
|
||||
resourceName: "my-resource",
|
||||
numberOfSecrets: 3,
|
||||
url: "https://infisical.com",
|
||||
isDiffScan: true,
|
||||
siteUrl: "https://infisical.com"
|
||||
} as SecretScanningSecretsDetectedTemplateProps;
|
@@ -21,6 +21,8 @@ export * from "./SecretLeakIncidentTemplate";
|
||||
export * from "./SecretReminderTemplate";
|
||||
export * from "./SecretRequestCompletedTemplate";
|
||||
export * from "./SecretRotationFailedTemplate";
|
||||
export * from "./SecretScanningScanFailedTemplate";
|
||||
export * from "./SecretScanningSecretsDetectedTemplate";
|
||||
export * from "./SecretSyncFailedTemplate";
|
||||
export * from "./ServiceTokenExpiryNoticeTemplate";
|
||||
export * from "./SignupEmailVerificationTemplate";
|
||||
|
@@ -30,6 +30,8 @@ import {
|
||||
SecretReminderTemplate,
|
||||
SecretRequestCompletedTemplate,
|
||||
SecretRotationFailedTemplate,
|
||||
SecretScanningScanFailedTemplate,
|
||||
SecretScanningSecretsDetectedTemplate,
|
||||
SecretSyncFailedTemplate,
|
||||
ServiceTokenExpiryNoticeTemplate,
|
||||
SignupEmailVerificationTemplate,
|
||||
@@ -73,7 +75,9 @@ export enum SmtpTemplates {
|
||||
ProjectAccessRequest = "projectAccess",
|
||||
OrgAdminProjectDirectAccess = "orgAdminProjectGrantAccess",
|
||||
OrgAdminBreakglassAccess = "orgAdminBreakglassAccess",
|
||||
ServiceTokenExpired = "serviceTokenExpired"
|
||||
ServiceTokenExpired = "serviceTokenExpired",
|
||||
SecretScanningV2ScanFailed = "secretScanningV2ScanFailed",
|
||||
SecretScanningV2SecretsDetected = "secretScanningV2SecretsDetected"
|
||||
}
|
||||
|
||||
export enum SmtpHost {
|
||||
@@ -113,7 +117,9 @@ const EmailTemplateMap: Record<SmtpTemplates, React.FC<any>> = {
|
||||
[SmtpTemplates.SecretApprovalRequestNeedsReview]: SecretApprovalRequestNeedsReviewTemplate,
|
||||
[SmtpTemplates.ResetPassword]: PasswordResetTemplate,
|
||||
[SmtpTemplates.SetupPassword]: PasswordSetupTemplate,
|
||||
[SmtpTemplates.PkiExpirationAlert]: PkiExpirationAlertTemplate
|
||||
[SmtpTemplates.PkiExpirationAlert]: PkiExpirationAlertTemplate,
|
||||
[SmtpTemplates.SecretScanningV2ScanFailed]: SecretScanningScanFailedTemplate,
|
||||
[SmtpTemplates.SecretScanningV2SecretsDetected]: SecretScanningSecretsDetectedTemplate
|
||||
};
|
||||
|
||||
export const smtpServiceFactory = (cfg: TSmtpConfig) => {
|
||||
|
@@ -12,6 +12,35 @@ import (
|
||||
|
||||
const USER_AGENT = "cli"
|
||||
|
||||
const (
|
||||
operationCallGetRawSecretsV3 = "CallGetRawSecretsV3"
|
||||
operationCallGetEncryptedWorkspaceKey = "CallGetEncryptedWorkspaceKey"
|
||||
operationCallGetServiceTokenDetails = "CallGetServiceTokenDetails"
|
||||
operationCallLogin1V3 = "CallLogin1V3"
|
||||
operationCallVerifyMfaToken = "CallVerifyMfaToken"
|
||||
operationCallLogin2V3 = "CallLogin2V3"
|
||||
operationCallGetAllOrganizations = "CallGetAllOrganizations"
|
||||
operationCallSelectOrganization = "CallSelectOrganization"
|
||||
operationCallGetAllWorkSpacesUserBelongsTo = "CallGetAllWorkSpacesUserBelongsTo"
|
||||
operationCallGetProjectById = "CallGetProjectById"
|
||||
operationCallIsAuthenticated = "CallIsAuthenticated"
|
||||
operationCallGetNewAccessTokenWithRefreshToken = "CallGetNewAccessTokenWithRefreshToken"
|
||||
operationCallGetFoldersV1 = "CallGetFoldersV1"
|
||||
operationCallCreateFolderV1 = "CallCreateFolderV1"
|
||||
operationCallDeleteFolderV1 = "CallDeleteFolderV1"
|
||||
operationCallDeleteSecretsV3 = "CallDeleteSecretsV3"
|
||||
operationCallCreateServiceToken = "CallCreateServiceToken"
|
||||
operationCallUniversalAuthLogin = "CallUniversalAuthLogin"
|
||||
operationCallMachineIdentityRefreshAccessToken = "CallMachineIdentityRefreshAccessToken"
|
||||
operationCallFetchSingleSecretByName = "CallFetchSingleSecretByName"
|
||||
operationCallCreateRawSecretsV3 = "CallCreateRawSecretsV3"
|
||||
operationCallUpdateRawSecretsV3 = "CallUpdateRawSecretsV3"
|
||||
operationCallRegisterGatewayIdentityV1 = "CallRegisterGatewayIdentityV1"
|
||||
operationCallExchangeRelayCertV1 = "CallExchangeRelayCertV1"
|
||||
operationCallGatewayHeartBeatV1 = "CallGatewayHeartBeatV1"
|
||||
operationCallBootstrapInstance = "CallBootstrapInstance"
|
||||
)
|
||||
|
||||
func CallGetEncryptedWorkspaceKey(httpClient *resty.Client, request GetEncryptedWorkspaceKeyRequest) (GetEncryptedWorkspaceKeyResponse, error) {
|
||||
endpoint := fmt.Sprintf("%v/v2/workspace/%v/encrypted-key", config.INFISICAL_URL, request.WorkspaceId)
|
||||
var result GetEncryptedWorkspaceKeyResponse
|
||||
@@ -22,11 +51,11 @@ func CallGetEncryptedWorkspaceKey(httpClient *resty.Client, request GetEncrypted
|
||||
Get(endpoint)
|
||||
|
||||
if err != nil {
|
||||
return GetEncryptedWorkspaceKeyResponse{}, fmt.Errorf("CallGetEncryptedWorkspaceKey: Unable to complete api request [err=%s]", err)
|
||||
return GetEncryptedWorkspaceKeyResponse{}, NewGenericRequestError(operationCallGetEncryptedWorkspaceKey, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetEncryptedWorkspaceKeyResponse{}, fmt.Errorf("CallGetEncryptedWorkspaceKey: Unsuccessful response [%v %v] [status-code=%v]", response.Request.Method, response.Request.URL, response.StatusCode())
|
||||
return GetEncryptedWorkspaceKeyResponse{}, NewAPIErrorWithResponse(operationCallGetEncryptedWorkspaceKey, response, nil)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
@@ -41,11 +70,11 @@ func CallGetServiceTokenDetailsV2(httpClient *resty.Client) (GetServiceTokenDeta
|
||||
Get(fmt.Sprintf("%v/v2/service-token", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return GetServiceTokenDetailsResponse{}, fmt.Errorf("CallGetServiceTokenDetails: Unable to complete api request [err=%s]", err)
|
||||
return GetServiceTokenDetailsResponse{}, NewGenericRequestError(operationCallGetServiceTokenDetails, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetServiceTokenDetailsResponse{}, fmt.Errorf("CallGetServiceTokenDetails: Unsuccessful response: [response=%s]", response)
|
||||
return GetServiceTokenDetailsResponse{}, NewAPIErrorWithResponse(operationCallGetServiceTokenDetails, response, nil)
|
||||
}
|
||||
|
||||
return tokenDetailsResponse, nil
|
||||
@@ -61,11 +90,11 @@ func CallLogin1V2(httpClient *resty.Client, request GetLoginOneV2Request) (GetLo
|
||||
Post(fmt.Sprintf("%v/v3/auth/login1", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return GetLoginOneV2Response{}, fmt.Errorf("CallLogin1V3: Unable to complete api request [err=%s]", err)
|
||||
return GetLoginOneV2Response{}, NewGenericRequestError(operationCallLogin1V3, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetLoginOneV2Response{}, fmt.Errorf("CallLogin1V3: Unsuccessful response: [response=%s]", response)
|
||||
return GetLoginOneV2Response{}, NewAPIErrorWithResponse(operationCallLogin1V3, response, nil)
|
||||
}
|
||||
|
||||
return loginOneV2Response, nil
|
||||
@@ -99,7 +128,7 @@ func CallVerifyMfaToken(httpClient *resty.Client, request VerifyMfaTokenRequest)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("CallVerifyMfaToken: Unable to complete api request [err=%s]", err)
|
||||
return nil, nil, NewGenericRequestError(operationCallVerifyMfaToken, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
@@ -135,11 +164,11 @@ func CallLogin2V2(httpClient *resty.Client, request GetLoginTwoV2Request) (GetLo
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return GetLoginTwoV2Response{}, fmt.Errorf("CallLogin2V3: Unable to complete api request [err=%s]", err)
|
||||
return GetLoginTwoV2Response{}, NewGenericRequestError(operationCallLogin2V3, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetLoginTwoV2Response{}, fmt.Errorf("CallLogin2V3: Unsuccessful response: [response=%s]", response)
|
||||
return GetLoginTwoV2Response{}, NewAPIErrorWithResponse(operationCallLogin2V3, response, nil)
|
||||
}
|
||||
|
||||
return loginTwoV2Response, nil
|
||||
@@ -154,11 +183,11 @@ func CallGetAllOrganizations(httpClient *resty.Client) (GetOrganizationsResponse
|
||||
Get(fmt.Sprintf("%v/v1/organization", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return GetOrganizationsResponse{}, err
|
||||
return GetOrganizationsResponse{}, NewGenericRequestError(operationCallGetAllOrganizations, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetOrganizationsResponse{}, fmt.Errorf("CallGetAllOrganizations: Unsuccessful response: [response=%v]", response)
|
||||
return GetOrganizationsResponse{}, NewAPIErrorWithResponse(operationCallGetAllOrganizations, response, nil)
|
||||
}
|
||||
|
||||
return orgResponse, nil
|
||||
@@ -175,11 +204,11 @@ func CallSelectOrganization(httpClient *resty.Client, request SelectOrganization
|
||||
Post(fmt.Sprintf("%v/v3/auth/select-organization", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return SelectOrganizationResponse{}, err
|
||||
return SelectOrganizationResponse{}, NewGenericRequestError(operationCallSelectOrganization, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return SelectOrganizationResponse{}, fmt.Errorf("CallSelectOrganization: Unsuccessful response: [response=%v]", response)
|
||||
return SelectOrganizationResponse{}, NewAPIErrorWithResponse(operationCallSelectOrganization, response, nil)
|
||||
}
|
||||
|
||||
return selectOrgResponse, nil
|
||||
@@ -214,11 +243,11 @@ func CallGetProjectById(httpClient *resty.Client, id string) (Project, error) {
|
||||
Get(fmt.Sprintf("%v/v1/workspace/%s", config.INFISICAL_URL, id))
|
||||
|
||||
if err != nil {
|
||||
return Project{}, err
|
||||
return Project{}, NewGenericRequestError(operationCallGetProjectById, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return Project{}, fmt.Errorf("CallGetProjectById: Unsuccessful response: [response=%v]", response)
|
||||
return Project{}, NewAPIErrorWithResponse(operationCallGetProjectById, response, nil)
|
||||
}
|
||||
|
||||
return projectResponse.Project, nil
|
||||
@@ -237,7 +266,7 @@ func CallIsAuthenticated(httpClient *resty.Client) bool {
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
log.Debug().Msgf("CallIsAuthenticated: Unsuccessful response: [response=%v]", response)
|
||||
log.Debug().Msgf("%s: Unsuccessful response: [response=%v]", operationCallIsAuthenticated, response)
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -257,11 +286,11 @@ func CallGetNewAccessTokenWithRefreshToken(httpClient *resty.Client, refreshToke
|
||||
Post(fmt.Sprintf("%v/v1/auth/token", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return GetNewAccessTokenWithRefreshTokenResponse{}, err
|
||||
return GetNewAccessTokenWithRefreshTokenResponse{}, NewGenericRequestError(operationCallGetNewAccessTokenWithRefreshToken, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetNewAccessTokenWithRefreshTokenResponse{}, fmt.Errorf("CallGetNewAccessTokenWithRefreshToken: Unsuccessful response: [response=%v]", response)
|
||||
return GetNewAccessTokenWithRefreshTokenResponse{}, NewAPIErrorWithResponse(operationCallGetNewAccessTokenWithRefreshToken, response, nil)
|
||||
}
|
||||
|
||||
return newAccessToken, nil
|
||||
@@ -280,11 +309,11 @@ func CallGetFoldersV1(httpClient *resty.Client, request GetFoldersV1Request) (Ge
|
||||
response, err := httpRequest.Get(fmt.Sprintf("%v/v1/folders", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return GetFoldersV1Response{}, fmt.Errorf("CallGetFoldersV1: Unable to complete api request [err=%v]", err)
|
||||
return GetFoldersV1Response{}, NewGenericRequestError(operationCallGetFoldersV1, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetFoldersV1Response{}, fmt.Errorf("CallGetFoldersV1: Unsuccessful [response=%s]", response)
|
||||
return GetFoldersV1Response{}, NewAPIErrorWithResponse(operationCallGetFoldersV1, response, nil)
|
||||
}
|
||||
|
||||
return foldersResponse, nil
|
||||
@@ -300,11 +329,11 @@ func CallCreateFolderV1(httpClient *resty.Client, request CreateFolderV1Request)
|
||||
|
||||
response, err := httpRequest.Post(fmt.Sprintf("%v/v1/folders", config.INFISICAL_URL))
|
||||
if err != nil {
|
||||
return CreateFolderV1Response{}, fmt.Errorf("CallCreateFolderV1: Unable to complete api request [err=%s]", err)
|
||||
return CreateFolderV1Response{}, NewGenericRequestError(operationCallCreateFolderV1, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return CreateFolderV1Response{}, fmt.Errorf("CallCreateFolderV1: Unsuccessful [response=%s]", response.String())
|
||||
return CreateFolderV1Response{}, NewAPIErrorWithResponse(operationCallCreateFolderV1, response, nil)
|
||||
}
|
||||
|
||||
return folderResponse, nil
|
||||
@@ -321,11 +350,11 @@ func CallDeleteFolderV1(httpClient *resty.Client, request DeleteFolderV1Request)
|
||||
|
||||
response, err := httpRequest.Delete(fmt.Sprintf("%v/v1/folders/%v", config.INFISICAL_URL, request.FolderName))
|
||||
if err != nil {
|
||||
return DeleteFolderV1Response{}, fmt.Errorf("CallDeleteFolderV1: Unable to complete api request [err=%s]", err)
|
||||
return DeleteFolderV1Response{}, NewGenericRequestError(operationCallDeleteFolderV1, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return DeleteFolderV1Response{}, fmt.Errorf("CallDeleteFolderV1: Unsuccessful [response=%s]", response.String())
|
||||
return DeleteFolderV1Response{}, NewAPIErrorWithResponse(operationCallDeleteFolderV1, response, nil)
|
||||
}
|
||||
|
||||
return folderResponse, nil
|
||||
@@ -342,11 +371,12 @@ func CallDeleteSecretsRawV3(httpClient *resty.Client, request DeleteSecretV3Requ
|
||||
Delete(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("CallDeleteSecretsV3: Unable to complete api request [err=%s]", err)
|
||||
return NewGenericRequestError(operationCallDeleteSecretsV3, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return fmt.Errorf("CallDeleteSecretsV3: Unsuccessful response. Please make sure your secret path, workspace and environment name are all correct [response=%s]", response)
|
||||
additionalContext := "Please make sure your secret path, workspace and environment name are all correct."
|
||||
return NewAPIErrorWithResponse(operationCallDeleteSecretsV3, response, &additionalContext)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -362,11 +392,11 @@ func CallCreateServiceToken(httpClient *resty.Client, request CreateServiceToken
|
||||
Post(fmt.Sprintf("%v/v2/service-token/", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return CreateServiceTokenResponse{}, fmt.Errorf("CallCreateServiceToken: Unable to complete api request [err=%s]", err)
|
||||
return CreateServiceTokenResponse{}, NewGenericRequestError(operationCallCreateServiceToken, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return CreateServiceTokenResponse{}, fmt.Errorf("CallCreateServiceToken: Unsuccessful response [%v %v] [status-code=%v]", response.Request.Method, response.Request.URL, response.StatusCode())
|
||||
return CreateServiceTokenResponse{}, NewAPIErrorWithResponse(operationCallCreateServiceToken, response, nil)
|
||||
}
|
||||
|
||||
return createServiceTokenResponse, nil
|
||||
@@ -382,11 +412,11 @@ func CallUniversalAuthLogin(httpClient *resty.Client, request UniversalAuthLogin
|
||||
Post(fmt.Sprintf("%v/v1/auth/universal-auth/login/", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return UniversalAuthLoginResponse{}, fmt.Errorf("CallUniversalAuthLogin: Unable to complete api request [err=%s]", err)
|
||||
return UniversalAuthLoginResponse{}, NewGenericRequestError(operationCallUniversalAuthLogin, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return UniversalAuthLoginResponse{}, fmt.Errorf("CallUniversalAuthLogin: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
return UniversalAuthLoginResponse{}, NewAPIErrorWithResponse(operationCallUniversalAuthLogin, response, nil)
|
||||
}
|
||||
|
||||
return universalAuthLoginResponse, nil
|
||||
@@ -402,11 +432,11 @@ func CallMachineIdentityRefreshAccessToken(httpClient *resty.Client, request Uni
|
||||
Post(fmt.Sprintf("%v/v1/auth/token/renew", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return UniversalAuthRefreshResponse{}, fmt.Errorf("CallMachineIdentityRefreshAccessToken: Unable to complete api request [err=%s]", err)
|
||||
return UniversalAuthRefreshResponse{}, NewGenericRequestError(operationCallMachineIdentityRefreshAccessToken, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return UniversalAuthRefreshResponse{}, fmt.Errorf("CallMachineIdentityRefreshAccessToken: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
return UniversalAuthRefreshResponse{}, NewAPIErrorWithResponse(operationCallMachineIdentityRefreshAccessToken, response, nil)
|
||||
}
|
||||
|
||||
return universalAuthRefreshResponse, nil
|
||||
@@ -441,19 +471,19 @@ func CallGetRawSecretsV3(httpClient *resty.Client, request GetRawSecretsV3Reques
|
||||
response, err := req.Get(fmt.Sprintf("%v/v3/secrets/raw", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return GetRawSecretsV3Response{}, fmt.Errorf("CallGetRawSecretsV3: Unable to complete api request [err=%w]", err)
|
||||
return GetRawSecretsV3Response{}, NewGenericRequestError(operationCallGetRawSecretsV3, err)
|
||||
}
|
||||
|
||||
if response.IsError() &&
|
||||
(strings.Contains(response.String(), "bot_not_found_error") ||
|
||||
strings.Contains(strings.ToLower(response.String()), "failed to find bot key") ||
|
||||
strings.Contains(strings.ToLower(response.String()), "bot is not active")) {
|
||||
return GetRawSecretsV3Response{}, fmt.Errorf(`Project with id %s is incompatible with your current CLI version. Upgrade your project by visiting the project settings page. If you're self-hosting and project upgrade option isn't yet available, contact your administrator to upgrade your Infisical instance to the latest release.
|
||||
`, request.WorkspaceId)
|
||||
additionalContext := fmt.Sprintf(`Project with id %s is incompatible with your current CLI version. Upgrade your project by visiting the project settings page. If you're self-hosting and project upgrade option isn't yet available, contact your administrator to upgrade your Infisical instance to the latest release.`, request.WorkspaceId)
|
||||
return GetRawSecretsV3Response{}, NewAPIErrorWithResponse(operationCallGetRawSecretsV3, response, &additionalContext)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetRawSecretsV3Response{}, fmt.Errorf("CallGetRawSecretsV3: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
return GetRawSecretsV3Response{}, NewAPIErrorWithResponse(operationCallGetRawSecretsV3, response, nil)
|
||||
}
|
||||
|
||||
getRawSecretsV3Response.ETag = response.Header().Get(("etag"))
|
||||
@@ -477,11 +507,11 @@ func CallFetchSingleSecretByName(httpClient *resty.Client, request GetRawSecretV
|
||||
Get(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
|
||||
|
||||
if err != nil {
|
||||
return GetRawSecretV3ByNameResponse{}, fmt.Errorf("CallFetchSingleSecretByName: Unable to complete api request [err=%w]", err)
|
||||
return GetRawSecretV3ByNameResponse{}, NewGenericRequestError(operationCallFetchSingleSecretByName, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return GetRawSecretV3ByNameResponse{}, fmt.Errorf("CallFetchSingleSecretByName: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
return GetRawSecretV3ByNameResponse{}, NewAPIErrorWithResponse(operationCallFetchSingleSecretByName, response, nil)
|
||||
}
|
||||
|
||||
getRawSecretV3ByNameResponse.ETag = response.Header().Get(("etag"))
|
||||
@@ -517,11 +547,11 @@ func CallCreateRawSecretsV3(httpClient *resty.Client, request CreateRawSecretV3R
|
||||
Post(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("CallCreateRawSecretsV3: Unable to complete api request [err=%w]", err)
|
||||
return NewGenericRequestError(operationCallCreateRawSecretsV3, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return fmt.Errorf("CallCreateRawSecretsV3: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
return NewAPIErrorWithResponse(operationCallCreateRawSecretsV3, response, nil)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -535,11 +565,11 @@ func CallUpdateRawSecretsV3(httpClient *resty.Client, request UpdateRawSecretByN
|
||||
Patch(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("CallUpdateRawSecretsV3: Unable to complete api request [err=%w]", err)
|
||||
return NewGenericRequestError(operationCallUpdateRawSecretsV3, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return fmt.Errorf("CallUpdateRawSecretsV3: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
return NewAPIErrorWithResponse(operationCallUpdateRawSecretsV3, response, nil)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -554,11 +584,11 @@ func CallRegisterGatewayIdentityV1(httpClient *resty.Client) (*GetRelayCredentia
|
||||
Post(fmt.Sprintf("%v/v1/gateways/register-identity", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("CallRegisterGatewayIdentityV1: Unable to complete api request [err=%w]", err)
|
||||
return nil, NewGenericRequestError(operationCallRegisterGatewayIdentityV1, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return nil, fmt.Errorf("CallRegisterGatewayIdentityV1: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
return nil, NewAPIErrorWithResponse(operationCallRegisterGatewayIdentityV1, response, nil)
|
||||
}
|
||||
|
||||
return &resBody, nil
|
||||
@@ -574,11 +604,11 @@ func CallExchangeRelayCertV1(httpClient *resty.Client, request ExchangeRelayCert
|
||||
Post(fmt.Sprintf("%v/v1/gateways/exchange-cert", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("CallExchangeRelayCertV1: Unable to complete api request [err=%w]", err)
|
||||
return nil, NewGenericRequestError(operationCallExchangeRelayCertV1, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return nil, fmt.Errorf("CallExchangeRelayCertV1: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
return nil, NewAPIErrorWithResponse(operationCallExchangeRelayCertV1, response, nil)
|
||||
}
|
||||
|
||||
return &resBody, nil
|
||||
@@ -591,11 +621,11 @@ func CallGatewayHeartBeatV1(httpClient *resty.Client) error {
|
||||
Post(fmt.Sprintf("%v/v1/gateways/heartbeat", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("CallGatewayHeartBeatV1: Unable to complete api request [err=%w]", err)
|
||||
return NewGenericRequestError(operationCallGatewayHeartBeatV1, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return fmt.Errorf("CallGatewayHeartBeatV1: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
return NewAPIErrorWithResponse(operationCallGatewayHeartBeatV1, response, nil)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -611,11 +641,11 @@ func CallBootstrapInstance(httpClient *resty.Client, request BootstrapInstanceRe
|
||||
Post(fmt.Sprintf("%v/v1/admin/bootstrap", request.Domain))
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("CallBootstrapInstance: Unable to complete api request [err=%w]", err)
|
||||
return nil, NewGenericRequestError(operationCallBootstrapInstance, err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return nil, fmt.Errorf("CallBootstrapInstance: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
return nil, NewAPIErrorWithResponse(operationCallBootstrapInstance, response, nil)
|
||||
}
|
||||
|
||||
return resBody, nil
|
||||
|
80
cli/packages/api/errors.go
Normal file
80
cli/packages/api/errors.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/infisical/go-sdk/packages/util"
|
||||
)
|
||||
|
||||
type GenericRequestError struct {
|
||||
err error
|
||||
operation string
|
||||
}
|
||||
|
||||
func (e *GenericRequestError) Error() string {
|
||||
return fmt.Sprintf("%s: Unable to complete api request [err=%v]", e.operation, e.err)
|
||||
}
|
||||
|
||||
func NewGenericRequestError(operation string, err error) *GenericRequestError {
|
||||
return &GenericRequestError{err: err, operation: operation}
|
||||
}
|
||||
|
||||
// APIError represents an error response from the API
|
||||
type APIError struct {
|
||||
AdditionalContext string `json:"additionalContext,omitempty"`
|
||||
Operation string `json:"operation"`
|
||||
Method string `json:"method"`
|
||||
URL string `json:"url"`
|
||||
StatusCode int `json:"statusCode"`
|
||||
ErrorMessage string `json:"message,omitempty"`
|
||||
ReqId string `json:"reqId,omitempty"`
|
||||
}
|
||||
|
||||
func (e *APIError) Error() string {
|
||||
msg := fmt.Sprintf(
|
||||
"%s Unsuccessful response [%v %v] [status-code=%v] [request-id=%v]",
|
||||
e.Operation,
|
||||
e.Method,
|
||||
e.URL,
|
||||
e.StatusCode,
|
||||
e.ReqId,
|
||||
)
|
||||
|
||||
if e.ErrorMessage != "" {
|
||||
msg = fmt.Sprintf("%s [message=\"%s\"]", msg, e.ErrorMessage)
|
||||
}
|
||||
|
||||
if e.AdditionalContext != "" {
|
||||
msg = fmt.Sprintf("%s [additional-context=\"%s\"]", msg, e.AdditionalContext)
|
||||
}
|
||||
|
||||
return msg
|
||||
}
|
||||
|
||||
func NewAPIErrorWithResponse(operation string, res *resty.Response, additionalContext *string) error {
|
||||
errorMessage := util.TryParseErrorBody(res)
|
||||
reqId := util.TryExtractReqId(res)
|
||||
|
||||
if res == nil {
|
||||
return NewGenericRequestError(operation, fmt.Errorf("response is nil"))
|
||||
}
|
||||
|
||||
apiError := &APIError{
|
||||
Operation: operation,
|
||||
Method: res.Request.Method,
|
||||
URL: res.Request.URL,
|
||||
StatusCode: res.StatusCode(),
|
||||
ReqId: reqId,
|
||||
}
|
||||
|
||||
if additionalContext != nil && *additionalContext != "" {
|
||||
apiError.AdditionalContext = *additionalContext
|
||||
}
|
||||
|
||||
if errorMessage != "" {
|
||||
apiError.ErrorMessage = errorMessage
|
||||
}
|
||||
|
||||
return apiError
|
||||
}
|
@@ -63,7 +63,7 @@ func getDynamicSecretList(cmd *cobra.Command, args []string) {
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get local project details")
|
||||
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
@@ -72,7 +72,6 @@ func getDynamicSecretList(cmd *cobra.Command, args []string) {
|
||||
infisicalToken = token.Token
|
||||
} else {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
@@ -181,7 +180,7 @@ func createDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get local project details")
|
||||
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
@@ -190,7 +189,6 @@ func createDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
infisicalToken = token.Token
|
||||
} else {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
@@ -312,7 +310,7 @@ func renewDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get local project details")
|
||||
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
@@ -321,7 +319,6 @@ func renewDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
infisicalToken = token.Token
|
||||
} else {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
@@ -420,7 +417,7 @@ func revokeDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get local project details")
|
||||
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
@@ -429,7 +426,6 @@ func revokeDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
infisicalToken = token.Token
|
||||
} else {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
@@ -527,7 +523,7 @@ func listDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get local project details")
|
||||
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
@@ -536,7 +532,6 @@ func listDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
infisicalToken = token.Token
|
||||
} else {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
|
@@ -112,7 +112,7 @@ var createCmd = &cobra.Command{
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get workspace file")
|
||||
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
@@ -180,7 +180,7 @@ var deleteCmd = &cobra.Command{
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get workspace file")
|
||||
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
|
@@ -158,10 +158,6 @@ var secretsSetCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
if token == nil {
|
||||
util.RequireLocalWorkspaceFile()
|
||||
}
|
||||
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
|
||||
@@ -175,6 +171,13 @@ var secretsSetCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
if token == nil && projectId == "" {
|
||||
_, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
}
|
||||
|
||||
secretsPath, err := cmd.Flags().GetString("path")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@@ -225,7 +228,7 @@ var secretsSetCmd = &cobra.Command{
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to get your local config details [err=%v]")
|
||||
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
@@ -308,7 +311,7 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get local project details")
|
||||
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
@@ -317,7 +320,6 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
httpClient.SetAuthToken(token.Token)
|
||||
} else {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
|
@@ -15,7 +15,6 @@ func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder
|
||||
var folderErr error
|
||||
if params.InfisicalToken == "" && params.UniversalAuthAccessToken == "" {
|
||||
RequireLogin()
|
||||
RequireLocalWorkspaceFile()
|
||||
|
||||
log.Debug().Msg("GetAllFolders: Trying to fetch folders using logged in details")
|
||||
|
||||
@@ -28,16 +27,15 @@ func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder
|
||||
loggedInUserDetails = EstablishUserLoginSession()
|
||||
}
|
||||
|
||||
workspaceFile, err := GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
if params.WorkspaceId == "" {
|
||||
workspaceFile, err := GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
params.WorkspaceId = workspaceFile.WorkspaceId
|
||||
}
|
||||
|
||||
if params.WorkspaceId != "" {
|
||||
workspaceFile.WorkspaceId = params.WorkspaceId
|
||||
}
|
||||
|
||||
folders, err := GetFoldersViaJTW(loggedInUserDetails.UserCredentials.JTWToken, workspaceFile.WorkspaceId, params.Environment, params.FoldersPath)
|
||||
folders, err := GetFoldersViaJTW(loggedInUserDetails.UserCredentials.JTWToken, params.WorkspaceId, params.Environment, params.FoldersPath)
|
||||
folderErr = err
|
||||
foldersToReturn = folders
|
||||
} else if params.InfisicalToken != "" {
|
||||
@@ -186,7 +184,6 @@ func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, er
|
||||
// If no token is provided, we will try to get the token from the current logged in user
|
||||
if params.InfisicalToken == "" {
|
||||
RequireLogin()
|
||||
RequireLocalWorkspaceFile()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails(true)
|
||||
|
||||
if err != nil {
|
||||
@@ -235,7 +232,6 @@ func DeleteFolder(params models.DeleteFolderParameters) ([]models.SingleFolder,
|
||||
// If no token is provided, we will try to get the token from the current logged in user
|
||||
if params.InfisicalToken == "" {
|
||||
RequireLogin()
|
||||
RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails(true)
|
||||
|
||||
|
@@ -251,10 +251,15 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
|
||||
var errorToReturn error
|
||||
|
||||
if params.InfisicalToken == "" && params.UniversalAuthAccessToken == "" {
|
||||
if projectConfigFilePath == "" {
|
||||
RequireLocalWorkspaceFile()
|
||||
} else {
|
||||
ValidateWorkspaceFile(projectConfigFilePath)
|
||||
if params.WorkspaceId == "" {
|
||||
if projectConfigFilePath == "" {
|
||||
_, err := GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
} else {
|
||||
ValidateWorkspaceFile(projectConfigFilePath)
|
||||
}
|
||||
}
|
||||
|
||||
RequireLogin()
|
||||
@@ -276,29 +281,28 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
|
||||
loggedInUserDetails = EstablishUserLoginSession()
|
||||
}
|
||||
|
||||
var infisicalDotJson models.WorkspaceConfigFile
|
||||
if params.WorkspaceId == "" {
|
||||
var infisicalDotJson models.WorkspaceConfigFile
|
||||
|
||||
if projectConfigFilePath == "" {
|
||||
projectConfig, err := GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
if projectConfigFilePath == "" {
|
||||
projectConfig, err := GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
|
||||
}
|
||||
|
||||
infisicalDotJson = projectConfig
|
||||
} else {
|
||||
projectConfig, err := GetWorkSpaceFromFilePath(projectConfigFilePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
infisicalDotJson = projectConfig
|
||||
}
|
||||
|
||||
infisicalDotJson = projectConfig
|
||||
} else {
|
||||
projectConfig, err := GetWorkSpaceFromFilePath(projectConfigFilePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
infisicalDotJson = projectConfig
|
||||
params.WorkspaceId = infisicalDotJson.WorkspaceId
|
||||
}
|
||||
|
||||
if params.WorkspaceId != "" {
|
||||
infisicalDotJson.WorkspaceId = params.WorkspaceId
|
||||
}
|
||||
|
||||
res, err := GetPlainTextSecretsV3(loggedInUserDetails.UserCredentials.JTWToken, infisicalDotJson.WorkspaceId,
|
||||
res, err := GetPlainTextSecretsV3(loggedInUserDetails.UserCredentials.JTWToken, params.WorkspaceId,
|
||||
params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive, params.TagSlugs, true)
|
||||
log.Debug().Msgf("GetAllEnvironmentVariables: Trying to fetch secrets JTW token [err=%s]", err)
|
||||
|
||||
@@ -307,7 +311,7 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupEncryptionKey, res.Secrets)
|
||||
WriteBackupSecrets(params.WorkspaceId, params.Environment, params.SecretsPath, backupEncryptionKey, res.Secrets)
|
||||
}
|
||||
|
||||
secretsToReturn = res.Secrets
|
||||
@@ -316,7 +320,7 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
|
||||
if !isConnected {
|
||||
backupEncryptionKey, _ := GetBackupEncryptionKey()
|
||||
if backupEncryptionKey != nil {
|
||||
backedUpSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupEncryptionKey)
|
||||
backedUpSecrets, err := ReadBackupSecrets(params.WorkspaceId, params.Environment, params.SecretsPath, backupEncryptionKey)
|
||||
if len(backedUpSecrets) > 0 {
|
||||
PrintWarning("Unable to fetch the latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug")
|
||||
secretsToReturn = backedUpSecrets
|
||||
|
@@ -1,4 +1,4 @@
|
||||
error: CallGetRawSecretsV3: Unsuccessful response [GET https://app.infisical.com/api/v3/secrets/raw?environment=invalid-env&expandSecretReferences=true&include_imports=true&recursive=true&secretPath=%2F&workspaceId=bef697d4-849b-4a75-b284-0922f87f8ba2] [status-code=404] [response={"error":"NotFound","message":"Environment with slug 'invalid-env' in project with ID bef697d4-849b-4a75-b284-0922f87f8ba2 not found","statusCode":404}]
|
||||
error: CallGetRawSecretsV3 Unsuccessful response [GET https://app.infisical.com/api/v3/secrets/raw?environment=invalid-env&expandSecretReferences=true&include_imports=true&recursive=true&secretPath=%2F&workspaceId=bef697d4-849b-4a75-b284-0922f87f8ba2] [status-code=404] [request-id=<unknown-value>] [message="Environment with slug 'invalid-env' in project with ID bef697d4-849b-4a75-b284-0922f87f8ba2 not found"]
|
||||
|
||||
|
||||
If this issue continues, get support at https://infisical.com/slack
|
||||
|
@@ -6,6 +6,7 @@ import (
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
@@ -71,7 +72,11 @@ func SetupCli() {
|
||||
}
|
||||
|
||||
func FilterRequestID(input string) string {
|
||||
// Find the JSON part of the error message
|
||||
requestIDPattern := regexp.MustCompile(`\[request-id=[^\]]+\]`)
|
||||
reqIDPattern := regexp.MustCompile(`\[reqId=[^\]]+\]`)
|
||||
input = requestIDPattern.ReplaceAllString(input, "[request-id=<unknown-value>]")
|
||||
input = reqIDPattern.ReplaceAllString(input, "[reqId=<unknown-value>]")
|
||||
|
||||
start := strings.Index(input, "{")
|
||||
end := strings.LastIndex(input, "}") + 1
|
||||
|
||||
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Available"
|
||||
openapi: "GET /api/v1/app-connections/github-radar/available"
|
||||
---
|
@@ -0,0 +1,10 @@
|
||||
---
|
||||
title: "Create"
|
||||
openapi: "POST /api/v1/app-connections/github-radar"
|
||||
---
|
||||
|
||||
<Note>
|
||||
GitHub Radar Connections must be created through the Infisical UI.
|
||||
Check out the configuration docs for [GitHub Radar Connections](/integrations/app-connections/github-radar) for a step-by-step
|
||||
guide.
|
||||
</Note>
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Delete"
|
||||
openapi: "DELETE /api/v1/app-connections/github-radar/{connectionId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by ID"
|
||||
openapi: "GET /api/v1/app-connections/github-radar/{connectionId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by Name"
|
||||
openapi: "GET /api/v1/app-connections/github-radar/connection-name/{connectionName}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "List"
|
||||
openapi: "GET /api/v1/app-connections/github-radar"
|
||||
---
|
@@ -0,0 +1,10 @@
|
||||
---
|
||||
title: "Update"
|
||||
openapi: "PATCH /api/v1/app-connections/github-radar/{connectionId}"
|
||||
---
|
||||
|
||||
<Note>
|
||||
GitHub Radar Connections must be updated through the Infisical UI.
|
||||
Check out the configuration docs for [GitHub Radar Connections](/integrations/app-connections/github-radar) for a step-by-step
|
||||
guide.
|
||||
</Note>
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by Project ID"
|
||||
openapi: "GET /api/v2/secret-scanning/configs"
|
||||
---
|
@@ -0,0 +1,8 @@
|
||||
---
|
||||
title: "Update"
|
||||
openapi: "PATCH /api/v2/secret-scanning/configs"
|
||||
---
|
||||
|
||||
<Note>
|
||||
Check out the [Configuration Docs](/documentation/platform/secret-scanning/overview#configuration) for an in-depth guide on custom configurations.
|
||||
</Note>
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Create"
|
||||
openapi: "POST /api/v2/secret-scanning/data-sources/github"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Delete"
|
||||
openapi: "DELETE /api/v2/secret-scanning/data-sources/github/{dataSourceId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by ID"
|
||||
openapi: "GET /api/v2/secret-scanning/data-sources/github/{dataSourceId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by Name"
|
||||
openapi: "GET /api/v2/secret-scanning/data-sources/github/data-source-name/{dataSourceName}"
|
||||
---
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user