mirror of
https://github.com/Infisical/infisical.git
synced 2025-09-07 10:22:29 +00:00
Compare commits
88 Commits
daniel/php
...
v0.148.0-n
Author | SHA1 | Date | |
---|---|---|---|
|
05da013a3b | ||
|
306297e7f4 | ||
|
55e89631aa | ||
|
57e2f4be55 | ||
|
e72a9606c2 | ||
|
467d88c019 | ||
|
9bc87f5ece | ||
|
fd98d2d301 | ||
|
490fdd5601 | ||
|
2f7d22688b | ||
|
81183c7e7b | ||
|
3a75a50d0b | ||
|
17b6ab0db0 | ||
|
d274bc0112 | ||
|
8981af6da3 | ||
|
f0ea9871d2 | ||
|
4e2d1cd5ca | ||
|
485b4fcf7c | ||
|
ab2146367c | ||
|
0c9ade33dd | ||
|
0f5e451f1a | ||
|
d534e82646 | ||
|
b53c250ded | ||
|
fbb243b2a0 | ||
|
f9288a4d2b | ||
|
6c2ea93822 | ||
|
fcb931670a | ||
|
ab2fae1516 | ||
|
93a942c49e | ||
|
e2be867c95 | ||
|
0baa0dcfb7 | ||
|
94027239e0 | ||
|
0c26fcbb0f | ||
|
035156bcc3 | ||
|
c116eb9ed2 | ||
|
8b84fc093f | ||
|
00a522f9d0 | ||
|
839b27d5bf | ||
|
1909fae076 | ||
|
5b09caa097 | ||
|
d5e99d7fc6 | ||
|
735ddc1138 | ||
|
3b235e3668 | ||
|
5c2dc32ded | ||
|
d84572532a | ||
|
93341ef6e5 | ||
|
3d78984320 | ||
|
3dae165710 | ||
|
a94635e5be | ||
|
912cd5d20a | ||
|
e29a0e487e | ||
|
8aa270545d | ||
|
3c24132e97 | ||
|
38a7cb896b | ||
|
6abd58ee21 | ||
|
c8275f41a3 | ||
|
48283d2826 | ||
|
a6d8ca5a6b | ||
|
c6b1af5737 | ||
|
e263c95a14 | ||
|
4e16b0ac8f | ||
|
785262fb9a | ||
|
ba1cd33e38 | ||
|
b26ca68fe1 | ||
|
8467286aa3 | ||
|
cea43d497d | ||
|
3700597ba7 | ||
|
65f0597bd8 | ||
|
5b3cae7255 | ||
|
a4ff6340f8 | ||
|
bfb2486204 | ||
|
c29b5e37f3 | ||
|
2b1a36a96d | ||
|
e666409026 | ||
|
ecfc8b5f87 | ||
|
435bcd03d3 | ||
|
4d6e12d6b2 | ||
|
88155576a2 | ||
|
394538769b | ||
|
f7828ed458 | ||
|
b40bb72643 | ||
|
4f1cd69bcc | ||
|
4d4b4c13c3 | ||
|
c8bf9049de | ||
|
ab91863c77 | ||
|
14473c742c | ||
|
4063cf5294 | ||
|
a7f33d669f |
@@ -314,8 +314,8 @@ describe("Secret expansion", () => {
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: "/deep/nested",
|
||||
environment: "prod",
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
|
44
backend/package-lock.json
generated
44
backend/package-lock.json
generated
@@ -25,6 +25,7 @@
|
||||
"@fastify/multipart": "8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/reply-from": "^9.8.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/static": "^7.0.4",
|
||||
@@ -8044,6 +8045,42 @@
|
||||
"toad-cache": "^3.3.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/reply-from": {
|
||||
"version": "9.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/reply-from/-/reply-from-9.8.0.tgz",
|
||||
"integrity": "sha512-bPNVaFhEeNI0Lyl6404YZaPFokudCplidE3QoOcr78yOy6H9sYw97p5KPYvY/NJNUHfFtvxOaSAHnK+YSiv/Mg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/error": "^3.0.0",
|
||||
"end-of-stream": "^1.4.4",
|
||||
"fast-content-type-parse": "^1.1.0",
|
||||
"fast-querystring": "^1.0.0",
|
||||
"fastify-plugin": "^4.0.0",
|
||||
"toad-cache": "^3.7.0",
|
||||
"undici": "^5.19.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/reply-from/node_modules/@fastify/busboy": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
|
||||
"integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/reply-from/node_modules/undici": {
|
||||
"version": "5.29.0",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz",
|
||||
"integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/busboy": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/request-context": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/request-context/-/request-context-5.1.0.tgz",
|
||||
@@ -29330,9 +29367,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/toad-cache": {
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/toad-cache/-/toad-cache-3.3.0.tgz",
|
||||
"integrity": "sha512-3oDzcogWGHZdkwrHyvJVpPjA7oNzY6ENOV3PsWJY9XYPZ6INo94Yd47s5may1U+nleBPwDhrRiTPMIvKaa3MQg==",
|
||||
"version": "3.7.0",
|
||||
"resolved": "https://registry.npmjs.org/toad-cache/-/toad-cache-3.7.0.tgz",
|
||||
"integrity": "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
|
@@ -145,6 +145,7 @@
|
||||
"@fastify/multipart": "8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/reply-from": "^9.8.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/static": "^7.0.4",
|
||||
|
9
backend/src/@types/fastify.d.ts
vendored
9
backend/src/@types/fastify.d.ts
vendored
@@ -1,13 +1,13 @@
|
||||
import "fastify";
|
||||
|
||||
import { Redis } from "ioredis";
|
||||
import { Cluster, Redis } from "ioredis";
|
||||
|
||||
import { TUsers } from "@app/db/schemas";
|
||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-types";
|
||||
import { TAssumePrivilegeServiceFactory } from "@app/ee/services/assume-privilege/assume-privilege-types";
|
||||
import { TAuditLogServiceFactory, TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-types";
|
||||
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-types";
|
||||
@@ -83,6 +83,7 @@ import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua
|
||||
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
|
||||
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
|
||||
import { TMicrosoftTeamsServiceFactory } from "@app/services/microsoft-teams/microsoft-teams-service";
|
||||
import { TOfflineUsageReportServiceFactory } from "@app/services/offline-usage-report/offline-usage-report-service";
|
||||
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
|
||||
import { TOrgServiceFactory } from "@app/services/org/org-service";
|
||||
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
|
||||
@@ -161,6 +162,7 @@ declare module "fastify" {
|
||||
};
|
||||
// identity injection. depending on which kinda of token the information is filled in auth
|
||||
auth: TAuthMode;
|
||||
shouldForwardWritesToPrimaryInstance: boolean;
|
||||
permission: {
|
||||
authMethod: ActorAuthMethod;
|
||||
type: ActorType;
|
||||
@@ -194,7 +196,7 @@ declare module "fastify" {
|
||||
}
|
||||
|
||||
interface FastifyInstance {
|
||||
redis: Redis;
|
||||
redis: Redis | Cluster;
|
||||
services: {
|
||||
login: TAuthLoginFactory;
|
||||
password: TAuthPasswordFactory;
|
||||
@@ -303,6 +305,7 @@ declare module "fastify" {
|
||||
bus: TEventBusService;
|
||||
sse: TServerSentEventsService;
|
||||
identityAuthTemplate: TIdentityAuthTemplateServiceFactory;
|
||||
offlineUsageReport: TOfflineUsageReportServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
221
backend/src/db/migrations/20250903191434_audit-log-stream-v2.ts
Normal file
221
backend/src/db/migrations/20250903191434_audit-log-stream-v2.ts
Normal file
@@ -0,0 +1,221 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.AuditLogStream)) {
|
||||
const hasProvider = await knex.schema.hasColumn(TableName.AuditLogStream, "provider");
|
||||
const hasEncryptedCredentials = await knex.schema.hasColumn(TableName.AuditLogStream, "encryptedCredentials");
|
||||
|
||||
await knex.schema.alterTable(TableName.AuditLogStream, (t) => {
|
||||
if (!hasProvider) t.string("provider").notNullable().defaultTo("custom");
|
||||
if (!hasEncryptedCredentials) t.binary("encryptedCredentials");
|
||||
|
||||
// This column will no longer be used but we're not dropping it so that we can have a backup in case the migration goes wrong
|
||||
t.string("url").nullable().alter();
|
||||
});
|
||||
|
||||
if (!hasEncryptedCredentials) {
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
const envConfig = await getMigrationEnvConfig(superAdminDAL);
|
||||
const keyStore = inMemoryKeyStore();
|
||||
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const logStreams = await knex(TableName.AuditLogStream).select(
|
||||
"id",
|
||||
"orgId",
|
||||
|
||||
"url",
|
||||
"encryptedHeadersAlgorithm",
|
||||
"encryptedHeadersCiphertext",
|
||||
"encryptedHeadersIV",
|
||||
"encryptedHeadersKeyEncoding",
|
||||
"encryptedHeadersTag"
|
||||
);
|
||||
|
||||
const updatedLogStreams = await Promise.all(
|
||||
logStreams.map(async (el) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
|
||||
const provider = "custom";
|
||||
let credentials;
|
||||
|
||||
if (
|
||||
el.encryptedHeadersTag &&
|
||||
el.encryptedHeadersIV &&
|
||||
el.encryptedHeadersCiphertext &&
|
||||
el.encryptedHeadersKeyEncoding
|
||||
) {
|
||||
const decryptedHeaders = crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
tag: el.encryptedHeadersTag,
|
||||
iv: el.encryptedHeadersIV,
|
||||
ciphertext: el.encryptedHeadersCiphertext,
|
||||
keyEncoding: el.encryptedHeadersKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
credentials = {
|
||||
url: el.url,
|
||||
headers: JSON.parse(decryptedHeaders)
|
||||
};
|
||||
} else {
|
||||
credentials = {
|
||||
url: el.url,
|
||||
headers: []
|
||||
};
|
||||
}
|
||||
|
||||
const encryptedCredentials = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(JSON.stringify(credentials), "utf8")
|
||||
}).cipherTextBlob;
|
||||
|
||||
return {
|
||||
id: el.id,
|
||||
orgId: el.orgId,
|
||||
url: el.url,
|
||||
provider,
|
||||
encryptedCredentials
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedLogStreams.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.AuditLogStream)
|
||||
.insert(updatedLogStreams.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
await knex.schema.alterTable(TableName.AuditLogStream, (t) => {
|
||||
t.binary("encryptedCredentials").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// IMPORTANT: The down migration does not utilize the existing "url" and encrypted header columns
|
||||
// because we're taking the latest data from the credentials column and re-encrypting it into relevant columns
|
||||
//
|
||||
// If this down migration was to fail, you can fall-back to the existing URL and encrypted header columns to retrieve
|
||||
// data that was created prior to this migration
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.AuditLogStream)) {
|
||||
const hasProvider = await knex.schema.hasColumn(TableName.AuditLogStream, "provider");
|
||||
const hasEncryptedCredentials = await knex.schema.hasColumn(TableName.AuditLogStream, "encryptedCredentials");
|
||||
|
||||
if (hasEncryptedCredentials) {
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
const envConfig = await getMigrationEnvConfig(superAdminDAL);
|
||||
const keyStore = inMemoryKeyStore();
|
||||
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const logStreamsToRevert = await knex(TableName.AuditLogStream)
|
||||
.select("id", "orgId", "encryptedCredentials")
|
||||
.where("provider", "custom")
|
||||
.whereNotNull("encryptedCredentials");
|
||||
|
||||
const updatedLogStreams = await Promise.all(
|
||||
logStreamsToRevert.map(async (el) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
|
||||
const decryptedCredentials = orgKmsService
|
||||
.decryptor({
|
||||
cipherTextBlob: el.encryptedCredentials
|
||||
})
|
||||
.toString();
|
||||
|
||||
const credentials: { url: string; headers: { key: string; value: string }[] } =
|
||||
JSON.parse(decryptedCredentials);
|
||||
|
||||
const originalUrl: string = credentials.url;
|
||||
|
||||
const encryptedHeadersResult = crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.encryptWithRootEncryptionKey(JSON.stringify(credentials.headers), envConfig);
|
||||
|
||||
const encryptedHeadersAlgorithm: string = encryptedHeadersResult.algorithm;
|
||||
const encryptedHeadersCiphertext: string = encryptedHeadersResult.ciphertext;
|
||||
const encryptedHeadersIV: string = encryptedHeadersResult.iv;
|
||||
const encryptedHeadersKeyEncoding: string = encryptedHeadersResult.encoding;
|
||||
const encryptedHeadersTag: string = encryptedHeadersResult.tag;
|
||||
|
||||
return {
|
||||
id: el.id,
|
||||
orgId: el.orgId,
|
||||
encryptedCredentials: el.encryptedCredentials,
|
||||
|
||||
url: originalUrl,
|
||||
encryptedHeadersAlgorithm,
|
||||
encryptedHeadersCiphertext,
|
||||
encryptedHeadersIV,
|
||||
encryptedHeadersKeyEncoding,
|
||||
encryptedHeadersTag
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedLogStreams.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.AuditLogStream)
|
||||
.insert(updatedLogStreams.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
await knex(TableName.AuditLogStream)
|
||||
.where((qb) => {
|
||||
void qb.whereNot("provider", "custom").orWhereNull("url");
|
||||
})
|
||||
.del();
|
||||
}
|
||||
|
||||
await knex.schema.alterTable(TableName.AuditLogStream, (t) => {
|
||||
t.string("url").notNullable().alter();
|
||||
|
||||
if (hasProvider) t.dropColumn("provider");
|
||||
if (hasEncryptedCredentials) t.dropColumn("encryptedCredentials");
|
||||
});
|
||||
}
|
||||
}
|
@@ -5,11 +5,13 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AuditLogStreamsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
url: z.string(),
|
||||
url: z.string().nullable().optional(),
|
||||
encryptedHeadersCiphertext: z.string().nullable().optional(),
|
||||
encryptedHeadersIV: z.string().nullable().optional(),
|
||||
encryptedHeadersTag: z.string().nullable().optional(),
|
||||
@@ -17,7 +19,9 @@ export const AuditLogStreamsSchema = z.object({
|
||||
encryptedHeadersKeyEncoding: z.string().nullable().optional(),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
provider: z.string().default("custom"),
|
||||
encryptedCredentials: zodBuffer
|
||||
});
|
||||
|
||||
export type TAuditLogStreams = z.infer<typeof AuditLogStreamsSchema>;
|
||||
|
@@ -1,215 +0,0 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { AUDIT_LOG_STREAMS } from "@app/lib/api-docs";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedAuditLogStreamSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerAuditLogStreamRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Create an Audit Log Stream.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
url: z.string().min(1).describe(AUDIT_LOG_STREAMS.CREATE.url),
|
||||
headers: z
|
||||
.object({
|
||||
key: z.string().min(1).trim().describe(AUDIT_LOG_STREAMS.CREATE.headers.key),
|
||||
value: z.string().min(1).trim().describe(AUDIT_LOG_STREAMS.CREATE.headers.value)
|
||||
})
|
||||
.describe(AUDIT_LOG_STREAMS.CREATE.headers.desc)
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStream: SanitizedAuditLogStreamSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogStream = await server.services.auditLogStream.create({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
url: req.body.url,
|
||||
headers: req.body.headers
|
||||
});
|
||||
|
||||
return { auditLogStream };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Update an Audit Log Stream by ID.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
id: z.string().describe(AUDIT_LOG_STREAMS.UPDATE.id)
|
||||
}),
|
||||
body: z.object({
|
||||
url: z.string().optional().describe(AUDIT_LOG_STREAMS.UPDATE.url),
|
||||
headers: z
|
||||
.object({
|
||||
key: z.string().min(1).trim().describe(AUDIT_LOG_STREAMS.UPDATE.headers.key),
|
||||
value: z.string().min(1).trim().describe(AUDIT_LOG_STREAMS.UPDATE.headers.value)
|
||||
})
|
||||
.describe(AUDIT_LOG_STREAMS.UPDATE.headers.desc)
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStream: SanitizedAuditLogStreamSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogStream = await server.services.auditLogStream.updateById({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
id: req.params.id,
|
||||
url: req.body.url,
|
||||
headers: req.body.headers
|
||||
});
|
||||
|
||||
return { auditLogStream };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Delete an Audit Log Stream by ID.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
id: z.string().describe(AUDIT_LOG_STREAMS.DELETE.id)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStream: SanitizedAuditLogStreamSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogStream = await server.services.auditLogStream.deleteById({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
id: req.params.id
|
||||
});
|
||||
|
||||
return { auditLogStream };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Get an Audit Log Stream by ID.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
id: z.string().describe(AUDIT_LOG_STREAMS.GET_BY_ID.id)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStream: SanitizedAuditLogStreamSchema.extend({
|
||||
headers: z
|
||||
.object({
|
||||
key: z.string(),
|
||||
value: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogStream = await server.services.auditLogStream.getById({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
id: req.params.id
|
||||
});
|
||||
|
||||
return { auditLogStream };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "List Audit Log Streams.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStreams: SanitizedAuditLogStreamSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogStreams = await server.services.auditLogStream.list({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod
|
||||
});
|
||||
|
||||
return { auditLogStreams };
|
||||
}
|
||||
});
|
||||
};
|
@@ -0,0 +1,142 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { LogProvider } from "@app/ee/services/audit-log-stream/audit-log-stream-enums";
|
||||
import { TAuditLogStream } from "@app/ee/services/audit-log-stream/audit-log-stream-types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerAuditLogStreamEndpoints = <T extends TAuditLogStream>({
|
||||
server,
|
||||
provider,
|
||||
createSchema,
|
||||
updateSchema,
|
||||
sanitizedResponseSchema
|
||||
}: {
|
||||
server: FastifyZodProvider;
|
||||
provider: LogProvider;
|
||||
createSchema: z.ZodType<{
|
||||
credentials: T["credentials"];
|
||||
}>;
|
||||
updateSchema: z.ZodType<{
|
||||
credentials: T["credentials"];
|
||||
}>;
|
||||
sanitizedResponseSchema: z.ZodTypeAny;
|
||||
}) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:logStreamId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
logStreamId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStream: sanitizedResponseSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { logStreamId } = req.params;
|
||||
|
||||
const auditLogStream = await server.services.auditLogStream.getById(logStreamId, provider, req.permission);
|
||||
|
||||
return { auditLogStream };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: createSchema,
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStream: sanitizedResponseSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { credentials } = req.body;
|
||||
|
||||
const auditLogStream = await server.services.auditLogStream.create(
|
||||
{
|
||||
provider,
|
||||
credentials
|
||||
},
|
||||
req.permission
|
||||
);
|
||||
|
||||
return { auditLogStream };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:logStreamId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
logStreamId: z.string().uuid()
|
||||
}),
|
||||
body: updateSchema,
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStream: sanitizedResponseSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { logStreamId } = req.params;
|
||||
const { credentials } = req.body;
|
||||
|
||||
const auditLogStream = await server.services.auditLogStream.updateById(
|
||||
{
|
||||
logStreamId,
|
||||
provider,
|
||||
credentials
|
||||
},
|
||||
req.permission
|
||||
);
|
||||
|
||||
return { auditLogStream };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:logStreamId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
logStreamId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStream: sanitizedResponseSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { logStreamId } = req.params;
|
||||
|
||||
const auditLogStream = await server.services.auditLogStream.deleteById(logStreamId, provider, req.permission);
|
||||
|
||||
return { auditLogStream };
|
||||
}
|
||||
});
|
||||
};
|
@@ -0,0 +1,73 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
CustomProviderListItemSchema,
|
||||
SanitizedCustomProviderSchema
|
||||
} from "@app/ee/services/audit-log-stream/custom/custom-provider-schemas";
|
||||
import {
|
||||
DatadogProviderListItemSchema,
|
||||
SanitizedDatadogProviderSchema
|
||||
} from "@app/ee/services/audit-log-stream/datadog/datadog-provider-schemas";
|
||||
import {
|
||||
SanitizedSplunkProviderSchema,
|
||||
SplunkProviderListItemSchema
|
||||
} from "@app/ee/services/audit-log-stream/splunk/splunk-provider-schemas";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SanitizedAuditLogStreamSchema = z.union([
|
||||
SanitizedCustomProviderSchema,
|
||||
SanitizedDatadogProviderSchema,
|
||||
SanitizedSplunkProviderSchema
|
||||
]);
|
||||
|
||||
const ProviderOptionsSchema = z.discriminatedUnion("provider", [
|
||||
CustomProviderListItemSchema,
|
||||
DatadogProviderListItemSchema,
|
||||
SplunkProviderListItemSchema
|
||||
]);
|
||||
|
||||
export const registerAuditLogStreamRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/options",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
providerOptions: ProviderOptionsSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: () => {
|
||||
const providerOptions = server.services.auditLogStream.listProviderOptions();
|
||||
|
||||
return { providerOptions };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStreams: SanitizedAuditLogStreamSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogStreams = await server.services.auditLogStream.list(req.permission);
|
||||
|
||||
return { auditLogStreams };
|
||||
}
|
||||
});
|
||||
};
|
51
backend/src/ee/routes/v1/audit-log-stream-routers/index.ts
Normal file
51
backend/src/ee/routes/v1/audit-log-stream-routers/index.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { LogProvider } from "@app/ee/services/audit-log-stream/audit-log-stream-enums";
|
||||
import {
|
||||
CreateCustomProviderLogStreamSchema,
|
||||
SanitizedCustomProviderSchema,
|
||||
UpdateCustomProviderLogStreamSchema
|
||||
} from "@app/ee/services/audit-log-stream/custom/custom-provider-schemas";
|
||||
import {
|
||||
CreateDatadogProviderLogStreamSchema,
|
||||
SanitizedDatadogProviderSchema,
|
||||
UpdateDatadogProviderLogStreamSchema
|
||||
} from "@app/ee/services/audit-log-stream/datadog/datadog-provider-schemas";
|
||||
import {
|
||||
CreateSplunkProviderLogStreamSchema,
|
||||
SanitizedSplunkProviderSchema,
|
||||
UpdateSplunkProviderLogStreamSchema
|
||||
} from "@app/ee/services/audit-log-stream/splunk/splunk-provider-schemas";
|
||||
|
||||
import { registerAuditLogStreamEndpoints } from "./audit-log-stream-endpoints";
|
||||
|
||||
export * from "./audit-log-stream-router";
|
||||
|
||||
export const AUDIT_LOG_STREAM_REGISTER_ROUTER_MAP: Record<LogProvider, (server: FastifyZodProvider) => Promise<void>> =
|
||||
{
|
||||
[LogProvider.Custom]: async (server: FastifyZodProvider) => {
|
||||
registerAuditLogStreamEndpoints({
|
||||
server,
|
||||
provider: LogProvider.Custom,
|
||||
sanitizedResponseSchema: SanitizedCustomProviderSchema,
|
||||
createSchema: CreateCustomProviderLogStreamSchema,
|
||||
updateSchema: UpdateCustomProviderLogStreamSchema
|
||||
});
|
||||
},
|
||||
[LogProvider.Datadog]: async (server: FastifyZodProvider) => {
|
||||
registerAuditLogStreamEndpoints({
|
||||
server,
|
||||
provider: LogProvider.Datadog,
|
||||
sanitizedResponseSchema: SanitizedDatadogProviderSchema,
|
||||
createSchema: CreateDatadogProviderLogStreamSchema,
|
||||
updateSchema: UpdateDatadogProviderLogStreamSchema
|
||||
});
|
||||
},
|
||||
[LogProvider.Splunk]: async (server: FastifyZodProvider) => {
|
||||
registerAuditLogStreamEndpoints({
|
||||
server,
|
||||
provider: LogProvider.Splunk,
|
||||
sanitizedResponseSchema: SanitizedSplunkProviderSchema,
|
||||
createSchema: CreateSplunkProviderLogStreamSchema,
|
||||
updateSchema: UpdateSplunkProviderLogStreamSchema
|
||||
});
|
||||
}
|
||||
};
|
@@ -3,7 +3,7 @@ import { registerProjectTemplateRouter } from "@app/ee/routes/v1/project-templat
|
||||
import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-router";
|
||||
import { registerAccessApprovalRequestRouter } from "./access-approval-request-router";
|
||||
import { registerAssumePrivilegeRouter } from "./assume-privilege-router";
|
||||
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
|
||||
import { AUDIT_LOG_STREAM_REGISTER_ROUTER_MAP, registerAuditLogStreamRouter } from "./audit-log-stream-routers";
|
||||
import { registerCaCrlRouter } from "./certificate-authority-crl-router";
|
||||
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
|
||||
import { registerKubernetesDynamicSecretLeaseRouter } from "./dynamic-secret-lease-routers/kubernetes-lease-router";
|
||||
@@ -114,7 +114,21 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerSecretRouter, { prefix: "/secrets" });
|
||||
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
|
||||
await server.register(registerGroupRouter, { prefix: "/groups" });
|
||||
await server.register(registerAuditLogStreamRouter, { prefix: "/audit-log-streams" });
|
||||
|
||||
await server.register(
|
||||
async (auditLogStreamRouter) => {
|
||||
await auditLogStreamRouter.register(registerAuditLogStreamRouter);
|
||||
|
||||
// Provider-specific endpoints
|
||||
await Promise.all(
|
||||
Object.entries(AUDIT_LOG_STREAM_REGISTER_ROUTER_MAP).map(([provider, router]) =>
|
||||
auditLogStreamRouter.register(router, { prefix: `/${provider}` })
|
||||
)
|
||||
);
|
||||
},
|
||||
{ prefix: "/audit-log-streams" }
|
||||
);
|
||||
|
||||
await server.register(registerUserAdditionalPrivilegeRouter, { prefix: "/user-project-additional-privilege" });
|
||||
await server.register(
|
||||
async (privilegeRouter) => {
|
||||
|
@@ -0,0 +1,5 @@
|
||||
export enum LogProvider {
|
||||
Datadog = "datadog",
|
||||
Splunk = "splunk",
|
||||
Custom = "custom"
|
||||
}
|
@@ -0,0 +1,13 @@
|
||||
import { LogProvider } from "./audit-log-stream-enums";
|
||||
import { TAuditLogStreamCredentials, TLogStreamFactory } from "./audit-log-stream-types";
|
||||
import { CustomProviderFactory } from "./custom/custom-provider-factory";
|
||||
import { DatadogProviderFactory } from "./datadog/datadog-provider-factory";
|
||||
import { SplunkProviderFactory } from "./splunk/splunk-provider-factory";
|
||||
|
||||
type TLogStreamFactoryImplementation = TLogStreamFactory<TAuditLogStreamCredentials>;
|
||||
|
||||
export const LOG_STREAM_FACTORY_MAP: Record<LogProvider, TLogStreamFactoryImplementation> = {
|
||||
[LogProvider.Datadog]: DatadogProviderFactory as TLogStreamFactoryImplementation,
|
||||
[LogProvider.Splunk]: SplunkProviderFactory as TLogStreamFactoryImplementation,
|
||||
[LogProvider.Custom]: CustomProviderFactory as TLogStreamFactoryImplementation
|
||||
};
|
@@ -1,21 +1,70 @@
|
||||
export function providerSpecificPayload(url: string) {
|
||||
const { hostname } = new URL(url);
|
||||
import { TAuditLogStreams } from "@app/db/schemas";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
const payload: Record<string, string> = {};
|
||||
import { TAuditLogStream, TAuditLogStreamCredentials } from "./audit-log-stream-types";
|
||||
import { getCustomProviderListItem } from "./custom/custom-provider-fns";
|
||||
import { getDatadogProviderListItem } from "./datadog/datadog-provider-fns";
|
||||
import { getSplunkProviderListItem } from "./splunk/splunk-provider-fns";
|
||||
|
||||
switch (hostname) {
|
||||
case "http-intake.logs.datadoghq.com":
|
||||
case "http-intake.logs.us3.datadoghq.com":
|
||||
case "http-intake.logs.us5.datadoghq.com":
|
||||
case "http-intake.logs.datadoghq.eu":
|
||||
case "http-intake.logs.ap1.datadoghq.com":
|
||||
case "http-intake.logs.ddog-gov.com":
|
||||
payload.ddsource = "infisical";
|
||||
payload.service = "audit-logs";
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
export const listProviderOptions = () => {
|
||||
return [getDatadogProviderListItem(), getSplunkProviderListItem(), getCustomProviderListItem()].sort((a, b) =>
|
||||
a.name.localeCompare(b.name)
|
||||
);
|
||||
};
|
||||
|
||||
return payload;
|
||||
}
|
||||
export const encryptLogStreamCredentials = async ({
|
||||
orgId,
|
||||
credentials,
|
||||
kmsService
|
||||
}: {
|
||||
orgId: string;
|
||||
credentials: TAuditLogStreamCredentials;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
}) => {
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
});
|
||||
|
||||
const { cipherTextBlob: encryptedCredentialsBlob } = encryptor({
|
||||
plainText: Buffer.from(JSON.stringify(credentials))
|
||||
});
|
||||
|
||||
return encryptedCredentialsBlob;
|
||||
};
|
||||
|
||||
export const decryptLogStreamCredentials = async ({
|
||||
orgId,
|
||||
encryptedCredentials,
|
||||
kmsService
|
||||
}: {
|
||||
orgId: string;
|
||||
encryptedCredentials: Buffer;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
}) => {
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
});
|
||||
|
||||
const decryptedPlainTextBlob = decryptor({
|
||||
cipherTextBlob: encryptedCredentials
|
||||
});
|
||||
|
||||
return JSON.parse(decryptedPlainTextBlob.toString()) as TAuditLogStreamCredentials;
|
||||
};
|
||||
|
||||
export const decryptLogStream = async (
|
||||
logStream: TAuditLogStreams,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
return {
|
||||
...logStream,
|
||||
credentials: await decryptLogStreamCredentials({
|
||||
encryptedCredentials: logStream.encryptedCredentials,
|
||||
orgId: logStream.orgId,
|
||||
kmsService
|
||||
})
|
||||
} as TAuditLogStream;
|
||||
};
|
||||
|
@@ -0,0 +1,14 @@
|
||||
import { AuditLogStreamsSchema } from "@app/db/schemas";
|
||||
|
||||
export const BaseProviderSchema = AuditLogStreamsSchema.omit({
|
||||
encryptedCredentials: true,
|
||||
provider: true,
|
||||
|
||||
// Old "archived" values
|
||||
encryptedHeadersAlgorithm: true,
|
||||
encryptedHeadersCiphertext: true,
|
||||
encryptedHeadersIV: true,
|
||||
encryptedHeadersKeyEncoding: true,
|
||||
encryptedHeadersTag: true,
|
||||
url: true
|
||||
});
|
@@ -1,242 +1,252 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
import { AxiosError } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { TAuditLogs } from "@app/db/schemas";
|
||||
import {
|
||||
decryptLogStream,
|
||||
decryptLogStreamCredentials,
|
||||
encryptLogStreamCredentials,
|
||||
listProviderOptions
|
||||
} from "@app/ee/services/audit-log-stream/audit-log-stream-fns";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
import { AUDIT_LOG_STREAM_TIMEOUT } from "../audit-log/audit-log-queue";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { TAuditLogStreamDALFactory } from "./audit-log-stream-dal";
|
||||
import { providerSpecificPayload } from "./audit-log-stream-fns";
|
||||
import { LogStreamHeaders, TAuditLogStreamServiceFactory } from "./audit-log-stream-types";
|
||||
import { LogProvider } from "./audit-log-stream-enums";
|
||||
import { LOG_STREAM_FACTORY_MAP } from "./audit-log-stream-factory";
|
||||
import { TAuditLogStream, TCreateAuditLogStreamDTO, TUpdateAuditLogStreamDTO } from "./audit-log-stream-types";
|
||||
import { TCustomProviderCredentials } from "./custom/custom-provider-types";
|
||||
|
||||
type TAuditLogStreamServiceFactoryDep = {
|
||||
export type TAuditLogStreamServiceFactoryDep = {
|
||||
auditLogStreamDAL: TAuditLogStreamDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TAuditLogStreamServiceFactory = ReturnType<typeof auditLogStreamServiceFactory>;
|
||||
|
||||
export const auditLogStreamServiceFactory = ({
|
||||
auditLogStreamDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
}: TAuditLogStreamServiceFactoryDep): TAuditLogStreamServiceFactory => {
|
||||
const create: TAuditLogStreamServiceFactory["create"] = async ({
|
||||
url,
|
||||
actor,
|
||||
headers = [],
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
licenseService,
|
||||
kmsService
|
||||
}: TAuditLogStreamServiceFactoryDep) => {
|
||||
const create = async ({ provider, credentials }: TCreateAuditLogStreamDTO, actor: OrgServiceActor) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
if (!plan.auditLogStreams) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create audit log streams due to plan restriction. Upgrade plan to create group."
|
||||
message: "Failed to create Audit Log Stream: Plan restriction. Upgrade plan to continue."
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
actor.type,
|
||||
actor.id,
|
||||
actor.orgId,
|
||||
actor.authMethod,
|
||||
actor.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
|
||||
|
||||
const appCfg = getConfig();
|
||||
if (appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
|
||||
const totalStreams = await auditLogStreamDAL.find({ orgId: actor.orgId });
|
||||
if (totalStreams.length >= plan.auditLogStreamLimit) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to create audit log streams due to plan limit reached. Kindly contact Infisical to add more streams."
|
||||
message: "Failed to create Audit Log Stream: Plan limit reached. Contact Infisical to increase quota."
|
||||
});
|
||||
}
|
||||
|
||||
// testing connection first
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
if (headers.length)
|
||||
headers.forEach(({ key, value }) => {
|
||||
streamHeaders[key] = value;
|
||||
});
|
||||
const factory = LOG_STREAM_FACTORY_MAP[provider]();
|
||||
const validatedCredentials = await factory.validateCredentials({ credentials });
|
||||
|
||||
await request
|
||||
.post(
|
||||
url,
|
||||
{ ...providerSpecificPayload(url), ping: "ok" },
|
||||
{
|
||||
headers: streamHeaders,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
}
|
||||
)
|
||||
.catch((err) => {
|
||||
throw new BadRequestError({ message: `Failed to connect with upstream source: ${(err as Error)?.message}` });
|
||||
});
|
||||
const encryptedCredentials = await encryptLogStreamCredentials({
|
||||
credentials: validatedCredentials,
|
||||
orgId: actor.orgId,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const encryptedHeaders = headers
|
||||
? crypto.encryption().symmetric().encryptWithRootEncryptionKey(JSON.stringify(headers))
|
||||
: undefined;
|
||||
const logStream = await auditLogStreamDAL.create({
|
||||
orgId: actorOrgId,
|
||||
url,
|
||||
...(encryptedHeaders
|
||||
? {
|
||||
encryptedHeadersCiphertext: encryptedHeaders.ciphertext,
|
||||
encryptedHeadersIV: encryptedHeaders.iv,
|
||||
encryptedHeadersTag: encryptedHeaders.tag,
|
||||
encryptedHeadersAlgorithm: encryptedHeaders.algorithm,
|
||||
encryptedHeadersKeyEncoding: encryptedHeaders.encoding
|
||||
}
|
||||
: {})
|
||||
orgId: actor.orgId,
|
||||
provider,
|
||||
encryptedCredentials
|
||||
});
|
||||
return logStream;
|
||||
|
||||
return { ...logStream, credentials: validatedCredentials } as TAuditLogStream;
|
||||
};
|
||||
|
||||
const updateById: TAuditLogStreamServiceFactory["updateById"] = async ({
|
||||
id,
|
||||
url,
|
||||
actor,
|
||||
headers = [],
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.auditLogStreams)
|
||||
const updateById = async (
|
||||
{ logStreamId, provider, credentials }: TUpdateAuditLogStreamDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
if (!plan.auditLogStreams) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update audit log streams due to plan restriction. Upgrade plan to create group."
|
||||
message: "Failed to update Audit Log Stream: Plan restriction. Upgrade plan to continue."
|
||||
});
|
||||
}
|
||||
|
||||
const logStream = await auditLogStreamDAL.findById(id);
|
||||
if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` });
|
||||
const logStream = await auditLogStreamDAL.findById(logStreamId);
|
||||
if (!logStream) throw new NotFoundError({ message: `Audit Log Stream with ID '${logStreamId}' not found` });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
actor.orgId,
|
||||
actor.authMethod,
|
||||
logStream.orgId
|
||||
);
|
||||
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
|
||||
const appCfg = getConfig();
|
||||
if (url && appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
// testing connection first
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
if (headers.length)
|
||||
headers.forEach(({ key, value }) => {
|
||||
streamHeaders[key] = value;
|
||||
});
|
||||
const finalCredentials = { ...credentials };
|
||||
|
||||
await request
|
||||
.post(
|
||||
url || logStream.url,
|
||||
{ ...providerSpecificPayload(url || logStream.url), ping: "ok" },
|
||||
{
|
||||
headers: streamHeaders,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
}
|
||||
)
|
||||
.catch((err) => {
|
||||
throw new Error(`Failed to connect with the source ${(err as Error)?.message}`);
|
||||
});
|
||||
// For the "Custom" provider, we must handle masked header values ('******').
|
||||
// These are placeholders from the frontend for secrets that haven't been changed.
|
||||
// We need to replace them with the original, unmasked values from the database.
|
||||
if (
|
||||
provider === LogProvider.Custom &&
|
||||
"headers" in finalCredentials &&
|
||||
Array.isArray(finalCredentials.headers) &&
|
||||
finalCredentials.headers.some((header) => header.value === "******")
|
||||
) {
|
||||
const decryptedOldCredentials = (await decryptLogStreamCredentials({
|
||||
encryptedCredentials: logStream.encryptedCredentials,
|
||||
orgId: logStream.orgId,
|
||||
kmsService
|
||||
})) as TCustomProviderCredentials;
|
||||
|
||||
const encryptedHeaders = headers
|
||||
? crypto.encryption().symmetric().encryptWithRootEncryptionKey(JSON.stringify(headers))
|
||||
: undefined;
|
||||
const updatedLogStream = await auditLogStreamDAL.updateById(id, {
|
||||
url,
|
||||
...(encryptedHeaders
|
||||
? {
|
||||
encryptedHeadersCiphertext: encryptedHeaders.ciphertext,
|
||||
encryptedHeadersIV: encryptedHeaders.iv,
|
||||
encryptedHeadersTag: encryptedHeaders.tag,
|
||||
encryptedHeadersAlgorithm: encryptedHeaders.algorithm,
|
||||
encryptedHeadersKeyEncoding: encryptedHeaders.encoding
|
||||
const oldHeadersMap = decryptedOldCredentials.headers.reduce<Record<string, string>>((acc, header) => {
|
||||
acc[header.key] = header.value;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
const finalHeaders: { key: string; value: string }[] = [];
|
||||
for (const header of finalCredentials.headers) {
|
||||
if (header.value === "******") {
|
||||
const oldValue = oldHeadersMap[header.key];
|
||||
if (oldValue) {
|
||||
finalHeaders.push({ key: header.key, value: oldValue });
|
||||
}
|
||||
: {})
|
||||
} else {
|
||||
finalHeaders.push(header);
|
||||
}
|
||||
}
|
||||
finalCredentials.headers = finalHeaders;
|
||||
}
|
||||
|
||||
const factory = LOG_STREAM_FACTORY_MAP[provider]();
|
||||
const validatedCredentials = await factory.validateCredentials({ credentials: finalCredentials });
|
||||
|
||||
const encryptedCredentials = await encryptLogStreamCredentials({
|
||||
credentials: validatedCredentials,
|
||||
orgId: actor.orgId,
|
||||
kmsService
|
||||
});
|
||||
return updatedLogStream;
|
||||
|
||||
const updatedLogStream = await auditLogStreamDAL.updateById(logStreamId, {
|
||||
encryptedCredentials
|
||||
});
|
||||
|
||||
return { ...updatedLogStream, credentials: validatedCredentials } as TAuditLogStream;
|
||||
};
|
||||
|
||||
const deleteById: TAuditLogStreamServiceFactory["deleteById"] = async ({
|
||||
id,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
|
||||
const deleteById = async (logStreamId: string, provider: LogProvider, actor: OrgServiceActor) => {
|
||||
const logStream = await auditLogStreamDAL.findById(logStreamId);
|
||||
if (!logStream) throw new NotFoundError({ message: `Audit Log Stream with ID '${logStreamId}' not found` });
|
||||
|
||||
const logStream = await auditLogStreamDAL.findById(id);
|
||||
if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` });
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
actor.orgId,
|
||||
actor.authMethod,
|
||||
logStream.orgId
|
||||
);
|
||||
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Settings);
|
||||
|
||||
const deletedLogStream = await auditLogStreamDAL.deleteById(id);
|
||||
return deletedLogStream;
|
||||
if (logStream.provider !== provider) {
|
||||
throw new BadRequestError({
|
||||
message: `Audit Log Stream with ID '${logStreamId}' is not for provider '${provider}'`
|
||||
});
|
||||
}
|
||||
|
||||
const deletedLogStream = await auditLogStreamDAL.deleteById(logStreamId);
|
||||
|
||||
return decryptLogStream(deletedLogStream, kmsService);
|
||||
};
|
||||
|
||||
const getById: TAuditLogStreamServiceFactory["getById"] = async ({
|
||||
id,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}) => {
|
||||
const logStream = await auditLogStreamDAL.findById(id);
|
||||
if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` });
|
||||
const getById = async (logStreamId: string, provider: LogProvider, actor: OrgServiceActor) => {
|
||||
const logStream = await auditLogStreamDAL.findById(logStreamId);
|
||||
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
|
||||
if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${logStreamId}' not found` });
|
||||
|
||||
const headers =
|
||||
logStream?.encryptedHeadersCiphertext && logStream?.encryptedHeadersIV && logStream?.encryptedHeadersTag
|
||||
? (JSON.parse(
|
||||
crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
tag: logStream.encryptedHeadersTag,
|
||||
iv: logStream.encryptedHeadersIV,
|
||||
ciphertext: logStream.encryptedHeadersCiphertext,
|
||||
keyEncoding: logStream.encryptedHeadersKeyEncoding as SecretKeyEncoding
|
||||
})
|
||||
) as LogStreamHeaders[])
|
||||
: undefined;
|
||||
|
||||
return { ...logStream, headers };
|
||||
};
|
||||
|
||||
const list: TAuditLogStreamServiceFactory["list"] = async ({ actor, actorId, actorOrgId, actorAuthMethod }) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
actor.type,
|
||||
actor.id,
|
||||
logStream.orgId,
|
||||
actor.authMethod,
|
||||
actor.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
|
||||
|
||||
const logStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
|
||||
return logStreams;
|
||||
if (logStream.provider !== provider) {
|
||||
throw new BadRequestError({
|
||||
message: `Audit Log Stream with ID '${logStreamId}' is not for provider '${provider}'`
|
||||
});
|
||||
}
|
||||
|
||||
return decryptLogStream(logStream, kmsService);
|
||||
};
|
||||
|
||||
const list = async (actor: OrgServiceActor) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
actor.orgId,
|
||||
actor.authMethod,
|
||||
actor.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
|
||||
|
||||
const logStreams = await auditLogStreamDAL.find({ orgId: actor.orgId });
|
||||
|
||||
return Promise.all(logStreams.map((stream) => decryptLogStream(stream, kmsService)));
|
||||
};
|
||||
|
||||
const streamLog = async (orgId: string, auditLog: TAuditLogs) => {
|
||||
const logStreams = await auditLogStreamDAL.find({ orgId });
|
||||
await Promise.allSettled(
|
||||
logStreams.map(async ({ provider, encryptedCredentials }) => {
|
||||
const credentials = await decryptLogStreamCredentials({
|
||||
encryptedCredentials,
|
||||
orgId,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const factory = LOG_STREAM_FACTORY_MAP[provider as LogProvider]();
|
||||
|
||||
try {
|
||||
await factory.streamLog({
|
||||
credentials,
|
||||
auditLog
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
error,
|
||||
`Failed to stream audit log [auditLogId=${auditLog.id}] [provider=${provider}] [orgId=${orgId}]${error instanceof AxiosError ? `: ${error.message}` : ""}`
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
return {
|
||||
@@ -244,6 +254,8 @@ export const auditLogStreamServiceFactory = ({
|
||||
updateById,
|
||||
deleteById,
|
||||
getById,
|
||||
list
|
||||
list,
|
||||
listProviderOptions,
|
||||
streamLog
|
||||
};
|
||||
};
|
||||
|
@@ -1,48 +1,38 @@
|
||||
import { TAuditLogStreams } from "@app/db/schemas";
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
import { TAuditLogs } from "@app/db/schemas";
|
||||
|
||||
export type LogStreamHeaders = {
|
||||
key: string;
|
||||
value: string;
|
||||
import { LogProvider } from "./audit-log-stream-enums";
|
||||
import { TCustomProvider, TCustomProviderCredentials } from "./custom/custom-provider-types";
|
||||
import { TDatadogProvider, TDatadogProviderCredentials } from "./datadog/datadog-provider-types";
|
||||
import { TSplunkProvider, TSplunkProviderCredentials } from "./splunk/splunk-provider-types";
|
||||
|
||||
export type TAuditLogStream = TDatadogProvider | TSplunkProvider | TCustomProvider;
|
||||
|
||||
export type TAuditLogStreamCredentials =
|
||||
| TDatadogProviderCredentials
|
||||
| TSplunkProviderCredentials
|
||||
| TCustomProviderCredentials;
|
||||
|
||||
export type TCreateAuditLogStreamDTO = {
|
||||
provider: LogProvider;
|
||||
credentials: TAuditLogStreamCredentials;
|
||||
};
|
||||
|
||||
export type TCreateAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
|
||||
url: string;
|
||||
headers?: LogStreamHeaders[];
|
||||
export type TUpdateAuditLogStreamDTO = {
|
||||
logStreamId: string;
|
||||
provider: LogProvider;
|
||||
credentials: TAuditLogStreamCredentials;
|
||||
};
|
||||
|
||||
export type TUpdateAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
|
||||
id: string;
|
||||
url?: string;
|
||||
headers?: LogStreamHeaders[];
|
||||
};
|
||||
export type TLogStreamFactoryValidateCredentials<C extends TAuditLogStreamCredentials> = (input: {
|
||||
credentials: C;
|
||||
}) => Promise<C>;
|
||||
|
||||
export type TDeleteAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
|
||||
id: string;
|
||||
};
|
||||
export type TLogStreamFactoryStreamLog<C extends TAuditLogStreamCredentials> = (input: {
|
||||
credentials: C;
|
||||
auditLog: TAuditLogs;
|
||||
}) => Promise<void>;
|
||||
|
||||
export type TListAuditLogStreamDTO = Omit<TOrgPermission, "orgId">;
|
||||
|
||||
export type TGetDetailsAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
|
||||
id: string;
|
||||
};
|
||||
|
||||
export type TAuditLogStreamServiceFactory = {
|
||||
create: (arg: TCreateAuditLogStreamDTO) => Promise<TAuditLogStreams>;
|
||||
updateById: (arg: TUpdateAuditLogStreamDTO) => Promise<TAuditLogStreams>;
|
||||
deleteById: (arg: TDeleteAuditLogStreamDTO) => Promise<TAuditLogStreams>;
|
||||
getById: (arg: TGetDetailsAuditLogStreamDTO) => Promise<{
|
||||
headers: LogStreamHeaders[] | undefined;
|
||||
orgId: string;
|
||||
url: string;
|
||||
id: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
encryptedHeadersCiphertext?: string | null | undefined;
|
||||
encryptedHeadersIV?: string | null | undefined;
|
||||
encryptedHeadersTag?: string | null | undefined;
|
||||
encryptedHeadersAlgorithm?: string | null | undefined;
|
||||
encryptedHeadersKeyEncoding?: string | null | undefined;
|
||||
}>;
|
||||
list: (arg: TListAuditLogStreamDTO) => Promise<TAuditLogStreams[]>;
|
||||
export type TLogStreamFactory<C extends TAuditLogStreamCredentials> = () => {
|
||||
validateCredentials: TLogStreamFactoryValidateCredentials<C>;
|
||||
streamLog: TLogStreamFactoryStreamLog<C>;
|
||||
};
|
||||
|
@@ -0,0 +1,67 @@
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
|
||||
import { AUDIT_LOG_STREAM_TIMEOUT } from "../../audit-log/audit-log-queue";
|
||||
import { TLogStreamFactoryStreamLog, TLogStreamFactoryValidateCredentials } from "../audit-log-stream-types";
|
||||
import { TCustomProviderCredentials } from "./custom-provider-types";
|
||||
|
||||
export const CustomProviderFactory = () => {
|
||||
const validateCredentials: TLogStreamFactoryValidateCredentials<TCustomProviderCredentials> = async ({
|
||||
credentials
|
||||
}) => {
|
||||
const { url, headers } = credentials;
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
if (headers.length) {
|
||||
headers.forEach(({ key, value }) => {
|
||||
streamHeaders[key] = value;
|
||||
});
|
||||
}
|
||||
|
||||
await request
|
||||
.post(
|
||||
url,
|
||||
{ ping: "ok" },
|
||||
{
|
||||
headers: streamHeaders,
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
}
|
||||
)
|
||||
.catch((err) => {
|
||||
throw new BadRequestError({ message: `Failed to connect with upstream source: ${(err as Error)?.message}` });
|
||||
});
|
||||
|
||||
return credentials;
|
||||
};
|
||||
|
||||
const streamLog: TLogStreamFactoryStreamLog<TCustomProviderCredentials> = async ({ credentials, auditLog }) => {
|
||||
const { url, headers } = credentials;
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
||||
if (headers.length) {
|
||||
headers.forEach(({ key, value }) => {
|
||||
streamHeaders[key] = value;
|
||||
});
|
||||
}
|
||||
|
||||
await request.post(url, auditLog, {
|
||||
headers: streamHeaders,
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
validateCredentials,
|
||||
streamLog
|
||||
};
|
||||
};
|
@@ -0,0 +1,8 @@
|
||||
import { LogProvider } from "../audit-log-stream-enums";
|
||||
|
||||
export const getCustomProviderListItem = () => {
|
||||
return {
|
||||
name: "Custom" as const,
|
||||
provider: LogProvider.Custom as const
|
||||
};
|
||||
};
|
@@ -0,0 +1,50 @@
|
||||
import RE2 from "re2";
|
||||
import { z } from "zod";
|
||||
|
||||
import { LogProvider } from "../audit-log-stream-enums";
|
||||
import { BaseProviderSchema } from "../audit-log-stream-schemas";
|
||||
|
||||
export const CustomProviderCredentialsSchema = z.object({
|
||||
url: z.string().url().trim().min(1).max(255),
|
||||
headers: z
|
||||
.object({
|
||||
key: z
|
||||
.string()
|
||||
.min(1)
|
||||
.refine((val) => new RE2(/^[^\n\r]+$/).test(val), "Header keys cannot contain newlines or carriage returns"),
|
||||
value: z
|
||||
.string()
|
||||
.min(1)
|
||||
.refine((val) => new RE2(/^[^\n\r]+$/).test(val), "Header values cannot contain newlines or carriage returns")
|
||||
})
|
||||
.array()
|
||||
});
|
||||
|
||||
const BaseCustomProviderSchema = BaseProviderSchema.extend({ provider: z.literal(LogProvider.Custom) });
|
||||
|
||||
export const CustomProviderSchema = BaseCustomProviderSchema.extend({
|
||||
credentials: CustomProviderCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedCustomProviderSchema = BaseCustomProviderSchema.extend({
|
||||
credentials: z.object({
|
||||
url: CustomProviderCredentialsSchema.shape.url,
|
||||
// Return header keys and a redacted value
|
||||
headers: CustomProviderCredentialsSchema.shape.headers.transform((headers) =>
|
||||
headers.map((header) => ({ ...header, value: "******" }))
|
||||
)
|
||||
})
|
||||
});
|
||||
|
||||
export const CustomProviderListItemSchema = z.object({
|
||||
name: z.literal("Custom"),
|
||||
provider: z.literal(LogProvider.Custom)
|
||||
});
|
||||
|
||||
export const CreateCustomProviderLogStreamSchema = z.object({
|
||||
credentials: CustomProviderCredentialsSchema
|
||||
});
|
||||
|
||||
export const UpdateCustomProviderLogStreamSchema = z.object({
|
||||
credentials: CustomProviderCredentialsSchema
|
||||
});
|
@@ -0,0 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { CustomProviderCredentialsSchema, CustomProviderSchema } from "./custom-provider-schemas";
|
||||
|
||||
export type TCustomProvider = z.infer<typeof CustomProviderSchema>;
|
||||
|
||||
export type TCustomProviderCredentials = z.infer<typeof CustomProviderCredentialsSchema>;
|
@@ -0,0 +1,67 @@
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
|
||||
import { AUDIT_LOG_STREAM_TIMEOUT } from "../../audit-log/audit-log-queue";
|
||||
import { TLogStreamFactoryStreamLog, TLogStreamFactoryValidateCredentials } from "../audit-log-stream-types";
|
||||
import { TDatadogProviderCredentials } from "./datadog-provider-types";
|
||||
|
||||
function createPayload(event: Record<string, unknown>) {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const ddtags = [`env:${appCfg.NODE_ENV || "unknown"}`].join(",");
|
||||
|
||||
return {
|
||||
...event,
|
||||
hostname: new URL(appCfg.SITE_URL || "http://infisical").hostname,
|
||||
ddsource: "infisical",
|
||||
service: "infisical",
|
||||
ddtags
|
||||
};
|
||||
}
|
||||
|
||||
export const DatadogProviderFactory = () => {
|
||||
const validateCredentials: TLogStreamFactoryValidateCredentials<TDatadogProviderCredentials> = async ({
|
||||
credentials
|
||||
}) => {
|
||||
const { url, token } = credentials;
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json", "DD-API-KEY": token };
|
||||
|
||||
await request
|
||||
.post(url, createPayload({ ping: "ok" }), {
|
||||
headers: streamHeaders,
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
})
|
||||
.catch((err) => {
|
||||
throw new BadRequestError({ message: `Failed to connect with Datadog: ${(err as Error)?.message}` });
|
||||
});
|
||||
|
||||
return credentials;
|
||||
};
|
||||
|
||||
const streamLog: TLogStreamFactoryStreamLog<TDatadogProviderCredentials> = async ({ credentials, auditLog }) => {
|
||||
const { url, token } = credentials;
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json", "DD-API-KEY": token };
|
||||
|
||||
await request.post(url, createPayload(auditLog), {
|
||||
headers: streamHeaders,
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
validateCredentials,
|
||||
streamLog
|
||||
};
|
||||
};
|
@@ -0,0 +1,8 @@
|
||||
import { LogProvider } from "../audit-log-stream-enums";
|
||||
|
||||
export const getDatadogProviderListItem = () => {
|
||||
return {
|
||||
name: "Datadog" as const,
|
||||
provider: LogProvider.Datadog as const
|
||||
};
|
||||
};
|
@@ -0,0 +1,38 @@
|
||||
import RE2 from "re2";
|
||||
import { z } from "zod";
|
||||
|
||||
import { LogProvider } from "../audit-log-stream-enums";
|
||||
import { BaseProviderSchema } from "../audit-log-stream-schemas";
|
||||
|
||||
export const DatadogProviderCredentialsSchema = z.object({
|
||||
url: z.string().url().trim().min(1).max(255),
|
||||
token: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((val) => new RE2(/^[a-fA-F0-9]{32}$/).test(val), "Invalid Datadog API key format")
|
||||
});
|
||||
|
||||
const BaseDatadogProviderSchema = BaseProviderSchema.extend({ provider: z.literal(LogProvider.Datadog) });
|
||||
|
||||
export const DatadogProviderSchema = BaseDatadogProviderSchema.extend({
|
||||
credentials: DatadogProviderCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedDatadogProviderSchema = BaseDatadogProviderSchema.extend({
|
||||
credentials: DatadogProviderCredentialsSchema.pick({
|
||||
url: true
|
||||
})
|
||||
});
|
||||
|
||||
export const DatadogProviderListItemSchema = z.object({
|
||||
name: z.literal("Datadog"),
|
||||
provider: z.literal(LogProvider.Datadog)
|
||||
});
|
||||
|
||||
export const CreateDatadogProviderLogStreamSchema = z.object({
|
||||
credentials: DatadogProviderCredentialsSchema
|
||||
});
|
||||
|
||||
export const UpdateDatadogProviderLogStreamSchema = z.object({
|
||||
credentials: DatadogProviderCredentialsSchema
|
||||
});
|
@@ -0,0 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { DatadogProviderCredentialsSchema, DatadogProviderSchema } from "./datadog-provider-schemas";
|
||||
|
||||
export type TDatadogProvider = z.infer<typeof DatadogProviderSchema>;
|
||||
|
||||
export type TDatadogProviderCredentials = z.infer<typeof DatadogProviderCredentialsSchema>;
|
@@ -0,0 +1,84 @@
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
|
||||
import { AUDIT_LOG_STREAM_TIMEOUT } from "../../audit-log/audit-log-queue";
|
||||
import { TLogStreamFactoryStreamLog, TLogStreamFactoryValidateCredentials } from "../audit-log-stream-types";
|
||||
import { TSplunkProviderCredentials } from "./splunk-provider-types";
|
||||
|
||||
function createPayload(event: Record<string, unknown>) {
|
||||
const appCfg = getConfig();
|
||||
|
||||
return {
|
||||
time: Math.floor(Date.now() / 1000),
|
||||
...(appCfg.SITE_URL && { host: new URL(appCfg.SITE_URL).host }),
|
||||
source: "infisical",
|
||||
sourcetype: "_json",
|
||||
event
|
||||
};
|
||||
}
|
||||
|
||||
async function createSplunkUrl(hostname: string) {
|
||||
let parsedHostname: string;
|
||||
try {
|
||||
parsedHostname = new URL(`https://${hostname}`).hostname;
|
||||
} catch (error) {
|
||||
throw new BadRequestError({ message: `Invalid Splunk hostname provided: ${(error as Error).message}` });
|
||||
}
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(`https://${parsedHostname}`);
|
||||
|
||||
return `https://${parsedHostname}:8088/services/collector/event`;
|
||||
}
|
||||
|
||||
export const SplunkProviderFactory = () => {
|
||||
const validateCredentials: TLogStreamFactoryValidateCredentials<TSplunkProviderCredentials> = async ({
|
||||
credentials
|
||||
}) => {
|
||||
const { hostname, token } = credentials;
|
||||
|
||||
const url = await createSplunkUrl(hostname);
|
||||
|
||||
const streamHeaders: RawAxiosRequestHeaders = {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Splunk ${token}`
|
||||
};
|
||||
|
||||
await request
|
||||
.post(url, createPayload({ ping: "ok" }), {
|
||||
headers: streamHeaders,
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
})
|
||||
.catch((err) => {
|
||||
throw new BadRequestError({ message: `Failed to connect with Splunk: ${(err as Error)?.message}` });
|
||||
});
|
||||
|
||||
return credentials;
|
||||
};
|
||||
|
||||
const streamLog: TLogStreamFactoryStreamLog<TSplunkProviderCredentials> = async ({ credentials, auditLog }) => {
|
||||
const { hostname, token } = credentials;
|
||||
|
||||
const url = await createSplunkUrl(hostname);
|
||||
|
||||
const streamHeaders: RawAxiosRequestHeaders = {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Splunk ${token}`
|
||||
};
|
||||
|
||||
await request.post(url, createPayload(auditLog), {
|
||||
headers: streamHeaders,
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
validateCredentials,
|
||||
streamLog
|
||||
};
|
||||
};
|
@@ -0,0 +1,8 @@
|
||||
import { LogProvider } from "../audit-log-stream-enums";
|
||||
|
||||
export const getSplunkProviderListItem = () => {
|
||||
return {
|
||||
name: "Splunk" as const,
|
||||
provider: LogProvider.Splunk as const
|
||||
};
|
||||
};
|
@@ -0,0 +1,59 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { LogProvider } from "../audit-log-stream-enums";
|
||||
import { BaseProviderSchema } from "../audit-log-stream-schemas";
|
||||
|
||||
export const SplunkProviderCredentialsSchema = z.object({
|
||||
hostname: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(255)
|
||||
.superRefine((val, ctx) => {
|
||||
if (val.includes("://")) {
|
||||
ctx.addIssue({
|
||||
code: "custom",
|
||||
message: "Hostname should not include protocol"
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const url = new URL(`https://${val}`);
|
||||
if (url.hostname !== val) {
|
||||
ctx.addIssue({
|
||||
code: "custom",
|
||||
message: "Must be a valid hostname without port or path"
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
ctx.addIssue({ code: "custom", message: "Invalid hostname" });
|
||||
}
|
||||
}),
|
||||
token: z.string().uuid().trim().min(1)
|
||||
});
|
||||
|
||||
const BaseSplunkProviderSchema = BaseProviderSchema.extend({ provider: z.literal(LogProvider.Splunk) });
|
||||
|
||||
export const SplunkProviderSchema = BaseSplunkProviderSchema.extend({
|
||||
credentials: SplunkProviderCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedSplunkProviderSchema = BaseSplunkProviderSchema.extend({
|
||||
credentials: SplunkProviderCredentialsSchema.pick({
|
||||
hostname: true
|
||||
})
|
||||
});
|
||||
|
||||
export const SplunkProviderListItemSchema = z.object({
|
||||
name: z.literal("Splunk"),
|
||||
provider: z.literal(LogProvider.Splunk)
|
||||
});
|
||||
|
||||
export const CreateSplunkProviderLogStreamSchema = z.object({
|
||||
credentials: SplunkProviderCredentialsSchema
|
||||
});
|
||||
|
||||
export const UpdateSplunkProviderLogStreamSchema = z.object({
|
||||
credentials: SplunkProviderCredentialsSchema
|
||||
});
|
@@ -0,0 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SplunkProviderCredentialsSchema, SplunkProviderSchema } from "./splunk-provider-schemas";
|
||||
|
||||
export type TSplunkProvider = z.infer<typeof SplunkProviderSchema>;
|
||||
|
||||
export type TSplunkProviderCredentials = z.infer<typeof SplunkProviderCredentialsSchema>;
|
@@ -1,22 +1,14 @@
|
||||
import { AxiosError, RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { TAuditLogStreamDALFactory } from "../audit-log-stream/audit-log-stream-dal";
|
||||
import { providerSpecificPayload } from "../audit-log-stream/audit-log-stream-fns";
|
||||
import { LogStreamHeaders } from "../audit-log-stream/audit-log-stream-types";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TAuditLogDALFactory } from "./audit-log-dal";
|
||||
import { TCreateAuditLogDTO } from "./audit-log-types";
|
||||
|
||||
type TAuditLogQueueServiceFactoryDep = {
|
||||
auditLogDAL: TAuditLogDALFactory;
|
||||
auditLogStreamDAL: Pick<TAuditLogStreamDALFactory, "find">;
|
||||
auditLogStreamService: Pick<TAuditLogStreamServiceFactory, "streamLog">;
|
||||
queueService: TQueueServiceFactory;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@@ -35,7 +27,7 @@ export const auditLogQueueServiceFactory = async ({
|
||||
queueService,
|
||||
projectDAL,
|
||||
licenseService,
|
||||
auditLogStreamDAL
|
||||
auditLogStreamService
|
||||
}: TAuditLogQueueServiceFactoryDep): Promise<TAuditLogQueueServiceFactory> => {
|
||||
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
||||
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
@@ -86,60 +78,7 @@ export const auditLogQueueServiceFactory = async ({
|
||||
userAgentType
|
||||
});
|
||||
|
||||
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
||||
await Promise.allSettled(
|
||||
logStreams.map(
|
||||
async ({
|
||||
url,
|
||||
encryptedHeadersTag,
|
||||
encryptedHeadersIV,
|
||||
encryptedHeadersKeyEncoding,
|
||||
encryptedHeadersCiphertext
|
||||
}) => {
|
||||
const streamHeaders =
|
||||
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
||||
? (JSON.parse(
|
||||
crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
||||
iv: encryptedHeadersIV,
|
||||
tag: encryptedHeadersTag,
|
||||
ciphertext: encryptedHeadersCiphertext
|
||||
})
|
||||
) as LogStreamHeaders[])
|
||||
: [];
|
||||
|
||||
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
||||
if (streamHeaders.length)
|
||||
streamHeaders.forEach(({ key, value }) => {
|
||||
headers[key] = value;
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await request.post(
|
||||
url,
|
||||
{ ...providerSpecificPayload(url), ...auditLog },
|
||||
{
|
||||
headers,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
}
|
||||
);
|
||||
return response;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
|
||||
);
|
||||
return error;
|
||||
}
|
||||
}
|
||||
)
|
||||
);
|
||||
await auditLogStreamService.streamLog(orgId, auditLog);
|
||||
}
|
||||
});
|
||||
|
||||
|
@@ -30,7 +30,7 @@ const generateUsername = (usernameTemplate?: string | null, identity?: { name: s
|
||||
export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
|
||||
const hostIps = await Promise.all(
|
||||
await Promise.all(
|
||||
providerInputs.host
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
@@ -48,10 +48,10 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
|
||||
return { ...providerInputs, hostIps };
|
||||
return { ...providerInputs };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema> & { hostIps: string[] }) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const client = new cassandra.Client({
|
||||
sslOptions,
|
||||
@@ -64,7 +64,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
},
|
||||
keyspace: providerInputs.keyspace,
|
||||
localDataCenter: providerInputs?.localDataCenter,
|
||||
contactPoints: providerInputs.hostIps
|
||||
contactPoints: providerInputs.host.split(",")
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
@@ -28,14 +28,14 @@ const generateUsername = (usernameTemplate?: string | null, identity?: { name: s
|
||||
export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema> & { hostIp: string }) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.hostIp}:${providerInputs.port}`),
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
...(providerInputs.ca && {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
|
@@ -28,15 +28,15 @@ const generateUsername = (usernameTemplate?: string | null, identity?: { name: s
|
||||
export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema> & { hostIp: string }) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.hostIp}`
|
||||
: `mongodb://${providerInputs.hostIp}:${providerInputs.port}`;
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
|
||||
|
||||
const client = new MongoClient(uri, {
|
||||
auth: {
|
||||
|
@@ -87,13 +87,13 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
|
||||
export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema> & { hostIp: string }) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${providerInputs.hostIp}:${providerInputs.port}/api`,
|
||||
baseURL: `${providerInputs.host}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password
|
||||
|
@@ -36,7 +36,7 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
|
||||
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("SAP ASE creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password"].includes(val)
|
||||
});
|
||||
@@ -45,16 +45,13 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
}
|
||||
return { ...providerInputs, hostIp };
|
||||
return { ...providerInputs };
|
||||
};
|
||||
|
||||
const $getClient = async (
|
||||
providerInputs: z.infer<typeof DynamicSecretSapAseSchema> & { hostIp: string },
|
||||
useMaster?: boolean
|
||||
) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
|
||||
const connectionString =
|
||||
`DRIVER={FreeTDS};` +
|
||||
`SERVER=${providerInputs.hostIp};` +
|
||||
`SERVER=${providerInputs.host};` +
|
||||
`PORT=${providerInputs.port};` +
|
||||
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
|
||||
`UID=${providerInputs.username};` +
|
||||
|
@@ -37,7 +37,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
|
||||
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("SAP Hana creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
@@ -49,12 +49,12 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
validateHandlebarTemplate("SAP Hana revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
return { ...providerInputs, hostIp };
|
||||
return { ...providerInputs };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema> & { hostIp: string }) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const client = hdb.createClient({
|
||||
host: providerInputs.hostIp,
|
||||
host: providerInputs.host,
|
||||
port: providerInputs.port,
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
|
@@ -150,8 +150,10 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema> & { hostIp: string }) => {
|
||||
const ssl = providerInputs.ca
|
||||
? { rejectUnauthorized: false, ca: providerInputs.ca, servername: providerInputs.host }
|
||||
: undefined;
|
||||
const isMsSQLClient = providerInputs.client === SqlProviders.MsSQL;
|
||||
|
||||
const db = knex({
|
||||
@@ -159,7 +161,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
connection: {
|
||||
database: providerInputs.database,
|
||||
port: providerInputs.port,
|
||||
host: providerInputs.host,
|
||||
host: providerInputs.client === SqlProviders.Postgres ? providerInputs.hostIp : providerInputs.host,
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
ssl,
|
||||
@@ -209,8 +211,8 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
let isConnected = false;
|
||||
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host, hostIp: providerInputs.hostIp });
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
||||
|
@@ -1,11 +1,11 @@
|
||||
import Redis from "ioredis";
|
||||
import { Cluster, Redis } from "ioredis";
|
||||
import { z } from "zod";
|
||||
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { BusEventSchema, TopicName } from "./types";
|
||||
|
||||
export const eventBusFactory = (redis: Redis) => {
|
||||
export const eventBusFactory = (redis: Redis | Cluster) => {
|
||||
const publisher = redis.duplicate();
|
||||
// Duplicate the publisher to create a subscriber.
|
||||
// This is necessary because Redis does not allow a single connection to both publish and subscribe.
|
||||
|
@@ -1,6 +1,6 @@
|
||||
/* eslint-disable no-continue */
|
||||
import { subject } from "@casl/ability";
|
||||
import Redis from "ioredis";
|
||||
import { Cluster, Redis } from "ioredis";
|
||||
|
||||
import { KeyStorePrefixes } from "@app/keystore/keystore";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@@ -12,7 +12,7 @@ import { BusEvent, RegisteredEvent } from "./types";
|
||||
const AUTH_REFRESH_INTERVAL = 60 * 1000;
|
||||
const HEART_BEAT_INTERVAL = 15 * 1000;
|
||||
|
||||
export const sseServiceFactory = (bus: TEventBusService, redis: Redis) => {
|
||||
export const sseServiceFactory = (bus: TEventBusService, redis: Redis | Cluster) => {
|
||||
const clients = new Set<EventStreamClient>();
|
||||
|
||||
const heartbeatInterval = setInterval(() => {
|
||||
|
@@ -3,7 +3,7 @@ import { Readable } from "node:stream";
|
||||
|
||||
import { MongoAbility, PureAbility } from "@casl/ability";
|
||||
import { MongoQuery } from "@ucast/mongo2js";
|
||||
import Redis from "ioredis";
|
||||
import { Cluster, Redis } from "ioredis";
|
||||
import { nanoid } from "nanoid";
|
||||
|
||||
import { ProjectType } from "@app/db/schemas";
|
||||
@@ -65,7 +65,7 @@ export type EventStreamClient = {
|
||||
matcher: PureAbility;
|
||||
};
|
||||
|
||||
export function createEventStreamClient(redis: Redis, options: IEventStreamClientOpts): EventStreamClient {
|
||||
export function createEventStreamClient(redis: Redis | Cluster, options: IEventStreamClientOpts): EventStreamClient {
|
||||
const rules = options.registered.map((r) => {
|
||||
const secretPath = r.conditions?.secretPath;
|
||||
const hasConditions = r.conditions?.environmentSlug || r.conditions?.secretPath;
|
||||
|
@@ -323,6 +323,8 @@ export const licenseServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
await updateSubscriptionOrgMemberCount(orgId);
|
||||
|
||||
const {
|
||||
data: { url }
|
||||
} = await licenseServerCloudApi.request.post(
|
||||
@@ -722,6 +724,16 @@ export const licenseServiceFactory = ({
|
||||
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
|
||||
};
|
||||
|
||||
const getCustomerId = () => {
|
||||
if (!selfHostedLicense) return "unknown";
|
||||
return selfHostedLicense?.customerId;
|
||||
};
|
||||
|
||||
const getLicenseId = () => {
|
||||
if (!selfHostedLicense) return "unknown";
|
||||
return selfHostedLicense?.licenseId;
|
||||
};
|
||||
|
||||
return {
|
||||
generateOrgCustomerId,
|
||||
removeOrgCustomer,
|
||||
@@ -736,6 +748,8 @@ export const licenseServiceFactory = ({
|
||||
return onPremFeatures;
|
||||
},
|
||||
getPlan,
|
||||
getCustomerId,
|
||||
getLicenseId,
|
||||
invalidateGetPlan,
|
||||
updateSubscriptionOrgMemberCount,
|
||||
refreshPlan,
|
||||
|
@@ -59,7 +59,7 @@ type TSecretReplicationServiceFactoryDep = {
|
||||
TSecretVersionV2DALFactory,
|
||||
"find" | "insertMany" | "update" | "findLatestVersionMany"
|
||||
>;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "updateById" | "findByFolderIds">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "updateById" | "findByFolderIds" | "findByIds">;
|
||||
folderDAL: Pick<
|
||||
TSecretFolderDALFactory,
|
||||
"findSecretPathByFolderIds" | "findBySecretPath" | "create" | "findOne" | "findByManySecretPath"
|
||||
|
@@ -38,6 +38,7 @@ export const KeyStorePrefixes = {
|
||||
SyncSecretIntegrationLastRunTimestamp: (projectId: string, environmentSlug: string, secretPath: string) =>
|
||||
`sync-integration-last-run-${projectId}-${environmentSlug}-${secretPath}` as const,
|
||||
SecretSyncLock: (syncId: string) => `secret-sync-mutex-${syncId}` as const,
|
||||
AppConnectionConcurrentJobs: (connectionId: string) => `app-connection-concurrency-${connectionId}` as const,
|
||||
SecretRotationLock: (rotationId: string) => `secret-rotation-v2-mutex-${rotationId}` as const,
|
||||
SecretScanningLock: (dataSourceId: string, resourceExternalId: string) =>
|
||||
`secret-scanning-v2-mutex-${dataSourceId}-${resourceExternalId}` as const,
|
||||
|
@@ -37,6 +37,8 @@ const envSchema = z
|
||||
.default("false")
|
||||
.transform((el) => el === "true"),
|
||||
REDIS_URL: zpStr(z.string().optional()),
|
||||
REDIS_USERNAME: zpStr(z.string().optional()),
|
||||
REDIS_PASSWORD: zpStr(z.string().optional()),
|
||||
REDIS_SENTINEL_HOSTS: zpStr(
|
||||
z
|
||||
.string()
|
||||
@@ -49,6 +51,12 @@ const envSchema = z
|
||||
REDIS_SENTINEL_ENABLE_TLS: zodStrBool.optional().describe("Whether to use TLS/SSL for Redis Sentinel connection"),
|
||||
REDIS_SENTINEL_USERNAME: zpStr(z.string().optional().describe("Authentication username for Redis Sentinel")),
|
||||
REDIS_SENTINEL_PASSWORD: zpStr(z.string().optional().describe("Authentication password for Redis Sentinel")),
|
||||
REDIS_CLUSTER_HOSTS: zpStr(
|
||||
z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("Comma-separated list of Redis Cluster host:port pairs. Eg: 192.168.65.254:6379,192.168.65.254:6380")
|
||||
),
|
||||
HOST: zpStr(z.string().default("localhost")),
|
||||
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
|
||||
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
|
||||
@@ -218,6 +226,8 @@ const envSchema = z
|
||||
),
|
||||
PARAMS_FOLDER_SECRET_DETECTION_ENTROPY: z.coerce.number().optional().default(3.7),
|
||||
|
||||
INFISICAL_PRIMARY_INSTANCE_URL: zpStr(z.string().optional()),
|
||||
|
||||
// HSM
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
@@ -335,8 +345,8 @@ const envSchema = z
|
||||
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
|
||||
)
|
||||
.refine(
|
||||
(data) => Boolean(data.REDIS_URL) || Boolean(data.REDIS_SENTINEL_HOSTS),
|
||||
"Either REDIS_URL or REDIS_SENTINEL_HOSTS must be defined."
|
||||
(data) => Boolean(data.REDIS_URL) || Boolean(data.REDIS_SENTINEL_HOSTS) || Boolean(data.REDIS_CLUSTER_HOSTS),
|
||||
"Either REDIS_URL, REDIS_SENTINEL_HOSTS or REDIS_CLUSTER_HOSTS must be defined."
|
||||
)
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
@@ -346,7 +356,7 @@ const envSchema = z
|
||||
: undefined,
|
||||
isCloud: Boolean(data.LICENSE_SERVER_KEY),
|
||||
isSmtpConfigured: Boolean(data.SMTP_HOST),
|
||||
isRedisConfigured: Boolean(data.REDIS_URL || data.REDIS_SENTINEL_HOSTS),
|
||||
isRedisConfigured: Boolean(data.REDIS_URL || data.REDIS_SENTINEL_HOSTS || data.REDIS_CLUSTER_HOSTS),
|
||||
isDevelopmentMode: data.NODE_ENV === "development",
|
||||
isTestMode: data.NODE_ENV === "test",
|
||||
isRotationDevelopmentMode:
|
||||
@@ -361,6 +371,12 @@ const envSchema = z
|
||||
const [host, port] = el.trim().split(":");
|
||||
return { host: host.trim(), port: Number(port.trim()) };
|
||||
}),
|
||||
REDIS_CLUSTER_HOSTS: data.REDIS_CLUSTER_HOSTS?.trim()
|
||||
?.split(",")
|
||||
.map((el) => {
|
||||
const [host, port] = el.trim().split(":");
|
||||
return { host: host.trim(), port: Number(port.trim()) };
|
||||
}),
|
||||
isSecretScanningConfigured:
|
||||
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
|
||||
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
|
||||
|
@@ -2,6 +2,11 @@ import { Redis } from "ioredis";
|
||||
|
||||
export type TRedisConfigKeys = Partial<{
|
||||
REDIS_URL: string;
|
||||
REDIS_USERNAME: string;
|
||||
REDIS_PASSWORD: string;
|
||||
|
||||
REDIS_CLUSTER_HOSTS: { host: string; port: number }[];
|
||||
|
||||
REDIS_SENTINEL_HOSTS: { host: string; port: number }[];
|
||||
REDIS_SENTINEL_MASTER_NAME: string;
|
||||
REDIS_SENTINEL_ENABLE_TLS: boolean;
|
||||
@@ -12,6 +17,15 @@ export type TRedisConfigKeys = Partial<{
|
||||
export const buildRedisFromConfig = (cfg: TRedisConfigKeys) => {
|
||||
if (cfg.REDIS_URL) return new Redis(cfg.REDIS_URL, { maxRetriesPerRequest: null });
|
||||
|
||||
if (cfg.REDIS_CLUSTER_HOSTS) {
|
||||
return new Redis.Cluster(cfg.REDIS_CLUSTER_HOSTS, {
|
||||
redisOptions: {
|
||||
username: cfg.REDIS_USERNAME,
|
||||
password: cfg.REDIS_PASSWORD
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return new Redis({
|
||||
// refine at tope will catch this case
|
||||
sentinels: cfg.REDIS_SENTINEL_HOSTS!,
|
||||
@@ -19,6 +33,8 @@ export const buildRedisFromConfig = (cfg: TRedisConfigKeys) => {
|
||||
maxRetriesPerRequest: null,
|
||||
sentinelUsername: cfg.REDIS_SENTINEL_USERNAME,
|
||||
sentinelPassword: cfg.REDIS_SENTINEL_PASSWORD,
|
||||
enableTLSForSentinelMode: cfg.REDIS_SENTINEL_ENABLE_TLS
|
||||
enableTLSForSentinelMode: cfg.REDIS_SENTINEL_ENABLE_TLS,
|
||||
username: cfg.REDIS_USERNAME,
|
||||
password: cfg.REDIS_PASSWORD
|
||||
});
|
||||
};
|
||||
|
@@ -250,8 +250,11 @@ const cryptographyFactory = () => {
|
||||
};
|
||||
};
|
||||
|
||||
const encryptWithRootEncryptionKey = (data: string) => {
|
||||
const appCfg = getConfig();
|
||||
const encryptWithRootEncryptionKey = (
|
||||
data: string,
|
||||
appCfgOverride?: Pick<TEnvConfig, "ROOT_ENCRYPTION_KEY" | "ENCRYPTION_KEY">
|
||||
) => {
|
||||
const appCfg = appCfgOverride || getConfig();
|
||||
const rootEncryptionKey = appCfg.ROOT_ENCRYPTION_KEY;
|
||||
const encryptionKey = appCfg.ENCRYPTION_KEY;
|
||||
|
||||
|
@@ -415,6 +415,7 @@ export const queueServiceFactory = (
|
||||
redisCfg: TRedisConfigKeys,
|
||||
{ dbConnectionUrl, dbRootCert }: { dbConnectionUrl: string; dbRootCert?: string }
|
||||
): TQueueServiceFactory => {
|
||||
const isClusterMode = Boolean(redisCfg?.REDIS_CLUSTER_HOSTS);
|
||||
const connection = buildRedisFromConfig(redisCfg);
|
||||
const queueContainer = {} as Record<
|
||||
QueueName,
|
||||
@@ -457,6 +458,8 @@ export const queueServiceFactory = (
|
||||
}
|
||||
|
||||
queueContainer[name] = new Queue(name as string, {
|
||||
// ref: docs.bullmq.io/bull/patterns/redis-cluster
|
||||
prefix: isClusterMode ? `{${name}}` : undefined,
|
||||
...queueSettings,
|
||||
...(crypto.isFipsModeEnabled()
|
||||
? {
|
||||
@@ -472,6 +475,7 @@ export const queueServiceFactory = (
|
||||
const appCfg = getConfig();
|
||||
if (appCfg.QUEUE_WORKERS_ENABLED && isQueueEnabled(name)) {
|
||||
workerContainer[name] = new Worker(name, jobFn, {
|
||||
prefix: isClusterMode ? `{${name}}` : undefined,
|
||||
...queueSettings,
|
||||
...(crypto.isFipsModeEnabled()
|
||||
? {
|
||||
|
@@ -12,7 +12,7 @@ import type { FastifyRateLimitOptions } from "@fastify/rate-limit";
|
||||
import ratelimiter from "@fastify/rate-limit";
|
||||
import { fastifyRequestContext } from "@fastify/request-context";
|
||||
import fastify from "fastify";
|
||||
import { Redis } from "ioredis";
|
||||
import { Cluster, Redis } from "ioredis";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||
@@ -43,7 +43,7 @@ type TMain = {
|
||||
queue: TQueueServiceFactory;
|
||||
keyStore: TKeyStoreFactory;
|
||||
hsmModule: HsmModule;
|
||||
redis: Redis;
|
||||
redis: Redis | Cluster;
|
||||
envConfig: TEnvConfig;
|
||||
superAdminDAL: TSuperAdminDALFactory;
|
||||
};
|
||||
@@ -76,6 +76,7 @@ export const main = async ({
|
||||
server.setValidatorCompiler(validatorCompiler);
|
||||
server.setSerializerCompiler(serializerCompiler);
|
||||
|
||||
// @ts-expect-error akhilmhdh: even on setting it fastify as Redis | Cluster it's throwing error
|
||||
server.decorate("redis", redis);
|
||||
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
|
@@ -107,110 +107,117 @@ export const extractAuth = async (req: FastifyRequest, jwtSecret: string) => {
|
||||
};
|
||||
|
||||
// ! Important: You can only 100% count on the `req.permission.orgId` field being present when the auth method is Identity Access Token (Machine Identity).
|
||||
export const injectIdentity = fp(async (server: FastifyZodProvider) => {
|
||||
server.decorateRequest("auth", null);
|
||||
server.addHook("onRequest", async (req) => {
|
||||
const appCfg = getConfig();
|
||||
export const injectIdentity = fp(
|
||||
async (server: FastifyZodProvider, opt: { shouldForwardWritesToPrimaryInstance?: boolean }) => {
|
||||
server.decorateRequest("auth", null);
|
||||
server.decorateRequest("shouldForwardWritesToPrimaryInstance", Boolean(opt.shouldForwardWritesToPrimaryInstance));
|
||||
server.addHook("onRequest", async (req) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (req.url.includes(".well-known/est") || req.url.includes("/api/v3/auth/")) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Authentication is handled on a route-level here.
|
||||
if (req.url.includes("/api/v1/workflow-integrations/microsoft-teams/message-endpoint")) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { authMode, token, actor } = await extractAuth(req, appCfg.AUTH_SECRET);
|
||||
|
||||
if (!authMode) return;
|
||||
|
||||
switch (authMode) {
|
||||
case AuthMode.JWT: {
|
||||
const { user, tokenVersionId, orgId } = await server.services.authToken.fnValidateJwtIdentity(token);
|
||||
requestContext.set("orgId", orgId);
|
||||
req.auth = {
|
||||
authMode: AuthMode.JWT,
|
||||
user,
|
||||
userId: user.id,
|
||||
tokenVersionId,
|
||||
actor,
|
||||
orgId: orgId as string,
|
||||
authMethod: token.authMethod,
|
||||
isMfaVerified: token.isMfaVerified,
|
||||
token
|
||||
};
|
||||
break;
|
||||
if (opt.shouldForwardWritesToPrimaryInstance && req.method !== "GET") {
|
||||
return;
|
||||
}
|
||||
case AuthMode.IDENTITY_ACCESS_TOKEN: {
|
||||
const identity = await server.services.identityAccessToken.fnValidateIdentityAccessToken(token, req.realIp);
|
||||
const serverCfg = await getServerCfg();
|
||||
requestContext.set("orgId", identity.orgId);
|
||||
req.auth = {
|
||||
authMode: AuthMode.IDENTITY_ACCESS_TOKEN,
|
||||
actor,
|
||||
orgId: identity.orgId,
|
||||
identityId: identity.identityId,
|
||||
identityName: identity.name,
|
||||
authMethod: null,
|
||||
isInstanceAdmin: serverCfg?.adminIdentityIds?.includes(identity.identityId),
|
||||
token
|
||||
};
|
||||
if (token?.identityAuth?.oidc) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
identityId: identity.identityId,
|
||||
oidc: token?.identityAuth?.oidc
|
||||
});
|
||||
|
||||
if (req.url.includes(".well-known/est") || req.url.includes("/api/v3/auth/")) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Authentication is handled on a route-level here.
|
||||
if (req.url.includes("/api/v1/workflow-integrations/microsoft-teams/message-endpoint")) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { authMode, token, actor } = await extractAuth(req, appCfg.AUTH_SECRET);
|
||||
|
||||
if (!authMode) return;
|
||||
|
||||
switch (authMode) {
|
||||
case AuthMode.JWT: {
|
||||
const { user, tokenVersionId, orgId } = await server.services.authToken.fnValidateJwtIdentity(token);
|
||||
requestContext.set("orgId", orgId);
|
||||
req.auth = {
|
||||
authMode: AuthMode.JWT,
|
||||
user,
|
||||
userId: user.id,
|
||||
tokenVersionId,
|
||||
actor,
|
||||
orgId: orgId as string,
|
||||
authMethod: token.authMethod,
|
||||
isMfaVerified: token.isMfaVerified,
|
||||
token
|
||||
};
|
||||
break;
|
||||
}
|
||||
if (token?.identityAuth?.kubernetes) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
case AuthMode.IDENTITY_ACCESS_TOKEN: {
|
||||
const identity = await server.services.identityAccessToken.fnValidateIdentityAccessToken(token, req.realIp);
|
||||
const serverCfg = await getServerCfg();
|
||||
requestContext.set("orgId", identity.orgId);
|
||||
req.auth = {
|
||||
authMode: AuthMode.IDENTITY_ACCESS_TOKEN,
|
||||
actor,
|
||||
orgId: identity.orgId,
|
||||
identityId: identity.identityId,
|
||||
kubernetes: token?.identityAuth?.kubernetes
|
||||
});
|
||||
identityName: identity.name,
|
||||
authMethod: null,
|
||||
isInstanceAdmin: serverCfg?.adminIdentityIds?.includes(identity.identityId),
|
||||
token
|
||||
};
|
||||
if (token?.identityAuth?.oidc) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
identityId: identity.identityId,
|
||||
oidc: token?.identityAuth?.oidc
|
||||
});
|
||||
}
|
||||
if (token?.identityAuth?.kubernetes) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
identityId: identity.identityId,
|
||||
kubernetes: token?.identityAuth?.kubernetes
|
||||
});
|
||||
}
|
||||
if (token?.identityAuth?.aws) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
identityId: identity.identityId,
|
||||
aws: token?.identityAuth?.aws
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (token?.identityAuth?.aws) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
identityId: identity.identityId,
|
||||
aws: token?.identityAuth?.aws
|
||||
});
|
||||
case AuthMode.SERVICE_TOKEN: {
|
||||
const serviceToken = await server.services.serviceToken.fnValidateServiceToken(token);
|
||||
requestContext.set("orgId", serviceToken.orgId);
|
||||
req.auth = {
|
||||
orgId: serviceToken.orgId,
|
||||
authMode: AuthMode.SERVICE_TOKEN as const,
|
||||
serviceToken,
|
||||
serviceTokenId: serviceToken.id,
|
||||
actor,
|
||||
authMethod: null,
|
||||
token
|
||||
};
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case AuthMode.API_KEY: {
|
||||
const user = await server.services.apiKey.fnValidateApiKey(token as string);
|
||||
req.auth = {
|
||||
authMode: AuthMode.API_KEY as const,
|
||||
userId: user.id,
|
||||
actor,
|
||||
user,
|
||||
orgId: "API_KEY", // We set the orgId to an arbitrary value, since we can't link an API key to a specific org. We have to deprecate API keys soon!
|
||||
authMethod: null,
|
||||
token: token as string
|
||||
};
|
||||
break;
|
||||
}
|
||||
case AuthMode.SCIM_TOKEN: {
|
||||
const { orgId, scimTokenId } = await server.services.scim.fnValidateScimToken(token);
|
||||
requestContext.set("orgId", orgId);
|
||||
req.auth = { authMode: AuthMode.SCIM_TOKEN, actor, scimTokenId, orgId, authMethod: null };
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new BadRequestError({ message: "Invalid token strategy provided" });
|
||||
}
|
||||
case AuthMode.SERVICE_TOKEN: {
|
||||
const serviceToken = await server.services.serviceToken.fnValidateServiceToken(token);
|
||||
requestContext.set("orgId", serviceToken.orgId);
|
||||
req.auth = {
|
||||
orgId: serviceToken.orgId,
|
||||
authMode: AuthMode.SERVICE_TOKEN as const,
|
||||
serviceToken,
|
||||
serviceTokenId: serviceToken.id,
|
||||
actor,
|
||||
authMethod: null,
|
||||
token
|
||||
};
|
||||
break;
|
||||
}
|
||||
case AuthMode.API_KEY: {
|
||||
const user = await server.services.apiKey.fnValidateApiKey(token as string);
|
||||
req.auth = {
|
||||
authMode: AuthMode.API_KEY as const,
|
||||
userId: user.id,
|
||||
actor,
|
||||
user,
|
||||
orgId: "API_KEY", // We set the orgId to an arbitrary value, since we can't link an API key to a specific org. We have to deprecate API keys soon!
|
||||
authMethod: null,
|
||||
token: token as string
|
||||
};
|
||||
break;
|
||||
}
|
||||
case AuthMode.SCIM_TOKEN: {
|
||||
const { orgId, scimTokenId } = await server.services.scim.fnValidateScimToken(token);
|
||||
requestContext.set("orgId", orgId);
|
||||
req.auth = { authMode: AuthMode.SCIM_TOKEN, actor, scimTokenId, orgId, authMethod: null };
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new BadRequestError({ message: "Invalid token strategy provided" });
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
@@ -10,6 +10,10 @@ interface TAuthOptions {
|
||||
export const verifyAuth =
|
||||
<T extends FastifyRequest>(authStrategies: AuthMode[], options: TAuthOptions = { requireOrg: true }) =>
|
||||
(req: T, _res: FastifyReply, done: HookHandlerDoneFunction) => {
|
||||
if (req.shouldForwardWritesToPrimaryInstance && req.method !== "GET") {
|
||||
return done();
|
||||
}
|
||||
|
||||
if (!Array.isArray(authStrategies)) throw new Error("Auth strategy must be array");
|
||||
if (!req.auth) throw new UnauthorizedError({ message: "Token missing" });
|
||||
|
||||
|
14
backend/src/server/plugins/primary-forwarding-mode.ts
Normal file
14
backend/src/server/plugins/primary-forwarding-mode.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import replyFrom from "@fastify/reply-from";
|
||||
import fp from "fastify-plugin";
|
||||
|
||||
export const forwardWritesToPrimary = fp(async (server, opt: { primaryUrl: string }) => {
|
||||
await server.register(replyFrom, {
|
||||
base: opt.primaryUrl
|
||||
});
|
||||
|
||||
server.addHook("preValidation", async (request, reply) => {
|
||||
if (request.url.startsWith("/api") && ["POST", "PUT", "DELETE", "PATCH"].includes(request.method)) {
|
||||
return reply.from(request.url);
|
||||
}
|
||||
});
|
||||
});
|
@@ -291,6 +291,8 @@ import { TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { invalidateCacheQueueFactory } from "@app/services/super-admin/invalidate-cache-queue";
|
||||
import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
import { getServerCfg, superAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { offlineUsageReportDALFactory } from "@app/services/offline-usage-report/offline-usage-report-dal";
|
||||
import { offlineUsageReportServiceFactory } from "@app/services/offline-usage-report/offline-usage-report-service";
|
||||
import { telemetryDALFactory } from "@app/services/telemetry/telemetry-dal";
|
||||
import { telemetryQueueServiceFactory } from "@app/services/telemetry/telemetry-queue";
|
||||
import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
@@ -310,6 +312,7 @@ import { injectAssumePrivilege } from "../plugins/auth/inject-assume-privilege";
|
||||
import { injectIdentity } from "../plugins/auth/inject-identity";
|
||||
import { injectPermission } from "../plugins/auth/inject-permission";
|
||||
import { injectRateLimits } from "../plugins/inject-rate-limits";
|
||||
import { forwardWritesToPrimary } from "../plugins/primary-forwarding-mode";
|
||||
import { registerV1Routes } from "./v1";
|
||||
import { initializeOauthConfigSync } from "./v1/sso-router";
|
||||
import { registerV2Routes } from "./v2";
|
||||
@@ -385,6 +388,7 @@ export const registerRoutes = async (
|
||||
const reminderRecipientDAL = reminderRecipientDALFactory(db);
|
||||
|
||||
const integrationDAL = integrationDALFactory(db);
|
||||
const offlineUsageReportDAL = offlineUsageReportDALFactory(db);
|
||||
const integrationAuthDAL = integrationAuthDALFactory(db);
|
||||
const webhookDAL = webhookDALFactory(db);
|
||||
const serviceTokenDAL = serviceTokenDALFactory(db);
|
||||
@@ -555,20 +559,22 @@ export const registerRoutes = async (
|
||||
permissionService
|
||||
});
|
||||
|
||||
const auditLogStreamService = auditLogStreamServiceFactory({
|
||||
licenseService,
|
||||
permissionService,
|
||||
auditLogStreamDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const auditLogQueue = await auditLogQueueServiceFactory({
|
||||
auditLogDAL,
|
||||
queueService,
|
||||
projectDAL,
|
||||
licenseService,
|
||||
auditLogStreamDAL
|
||||
auditLogStreamService
|
||||
});
|
||||
|
||||
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
|
||||
const auditLogStreamService = auditLogStreamServiceFactory({
|
||||
licenseService,
|
||||
permissionService,
|
||||
auditLogStreamDAL
|
||||
});
|
||||
const secretApprovalPolicyService = secretApprovalPolicyServiceFactory({
|
||||
projectEnvDAL,
|
||||
secretApprovalPolicyApproverDAL: sapApproverDAL,
|
||||
@@ -842,7 +848,14 @@ export const registerRoutes = async (
|
||||
licenseService,
|
||||
kmsService,
|
||||
microsoftTeamsService,
|
||||
invalidateCacheQueue
|
||||
invalidateCacheQueue,
|
||||
smtpService,
|
||||
tokenService
|
||||
});
|
||||
|
||||
const offlineUsageReportService = offlineUsageReportServiceFactory({
|
||||
offlineUsageReportDAL,
|
||||
licenseService
|
||||
});
|
||||
|
||||
const orgAdminService = orgAdminServiceFactory({
|
||||
@@ -2003,6 +2016,7 @@ export const registerRoutes = async (
|
||||
apiKey: apiKeyService,
|
||||
authToken: tokenService,
|
||||
superAdmin: superAdminService,
|
||||
offlineUsageReport: offlineUsageReportService,
|
||||
project: projectService,
|
||||
projectMembership: projectMembershipService,
|
||||
projectKey: projectKeyService,
|
||||
@@ -2135,8 +2149,14 @@ export const registerRoutes = async (
|
||||
user: userDAL,
|
||||
kmipClient: kmipClientDAL
|
||||
});
|
||||
const shouldForwardWritesToPrimaryInstance = Boolean(envConfig.INFISICAL_PRIMARY_INSTANCE_URL);
|
||||
if (shouldForwardWritesToPrimaryInstance) {
|
||||
logger.info(`Infisical primary instance is configured: ${envConfig.INFISICAL_PRIMARY_INSTANCE_URL}`);
|
||||
|
||||
await server.register(injectIdentity, { userDAL, serviceTokenDAL });
|
||||
await server.register(forwardWritesToPrimary, { primaryUrl: envConfig.INFISICAL_PRIMARY_INSTANCE_URL as string });
|
||||
}
|
||||
|
||||
await server.register(injectIdentity, { shouldForwardWritesToPrimaryInstance });
|
||||
await server.register(injectAssumePrivilege);
|
||||
await server.register(injectPermission);
|
||||
await server.register(injectRateLimits);
|
||||
|
@@ -246,13 +246,6 @@ export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({
|
||||
metadata: ResourceMetadataSchema.optional()
|
||||
});
|
||||
|
||||
export const SanitizedAuditLogStreamSchema = z.object({
|
||||
id: z.string(),
|
||||
url: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export const SanitizedProjectSchema = ProjectsSchema.pick({
|
||||
id: true,
|
||||
name: true,
|
||||
|
@@ -13,6 +13,7 @@ import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { invalidateCacheLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { addAuthOriginDomainCookie } from "@app/server/lib/cookie";
|
||||
import { GenericResourceNameSchema } from "@app/server/lib/schemas";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@@ -53,7 +54,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
defaultAuthOrgAuthMethod: z.string().nullish(),
|
||||
isSecretScanningDisabled: z.boolean(),
|
||||
kubernetesAutoFetchServiceAccountToken: z.boolean(),
|
||||
paramsFolderSecretDetectionEnabled: z.boolean()
|
||||
paramsFolderSecretDetectionEnabled: z.boolean(),
|
||||
isOfflineUsageReportsEnabled: z.boolean()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -69,7 +71,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
|
||||
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
|
||||
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
|
||||
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED
|
||||
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED,
|
||||
isOfflineUsageReportsEnabled: !!serverEnvs.LICENSE_KEY_OFFLINE
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -215,7 +218,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
membershipId: z.string(),
|
||||
role: z.string(),
|
||||
roleId: z.string().nullish()
|
||||
roleId: z.string().nullish(),
|
||||
status: z.string().nullish()
|
||||
})
|
||||
.array(),
|
||||
projects: z
|
||||
@@ -838,4 +842,121 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/organization-management/organizations",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: GenericResourceNameSchema,
|
||||
inviteAdminEmails: z.string().email().array().min(1)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
organization: OrganizationsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async (req) => {
|
||||
const organization = await server.services.superAdmin.createOrganization(req.body, req.permission);
|
||||
return { organization };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/organization-management/organizations/:organizationId/memberships/:membershipId/resend-invite",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
organizationId: z.string(),
|
||||
membershipId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
organizationMembership: OrgMembershipsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async (req) => {
|
||||
const organizationMembership = await server.services.superAdmin.resendOrgInvite(req.params, req.permission);
|
||||
return { organizationMembership };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/organization-management/organizations/:organizationId/access",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
organizationId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
organizationMembership: OrgMembershipsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async (req) => {
|
||||
const organizationMembership = await server.services.superAdmin.joinOrganization(
|
||||
req.params.organizationId,
|
||||
req.permission
|
||||
);
|
||||
return { organizationMembership };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/usage-report/generate",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
csvContent: z.string(),
|
||||
signature: z.string(),
|
||||
filename: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async () => {
|
||||
const result = await server.services.offlineUsageReport.generateUsageReportCSV();
|
||||
|
||||
return {
|
||||
csvContent: result.csvContent,
|
||||
signature: result.signature,
|
||||
filename: result.filename
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -2,10 +2,13 @@ import fastifyMultipart from "@fastify/multipart";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { VaultMappingType } from "@app/services/external-migration/external-migration-types";
|
||||
import {
|
||||
ExternalMigrationProviders,
|
||||
VaultMappingType
|
||||
} from "@app/services/external-migration/external-migration-types";
|
||||
|
||||
const MB25_IN_BYTES = 26214400;
|
||||
|
||||
@@ -81,4 +84,33 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/custom-migration-enabled/:provider",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
provider: z.nativeEnum(ExternalMigrationProviders)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
enabled: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const enabled = await server.services.migration.hasCustomVaultMigration({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
provider: req.params.provider
|
||||
});
|
||||
return { enabled };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -600,7 +600,7 @@ export const appConnectionServiceFactory = ({
|
||||
azureClientSecrets: azureClientSecretsConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
azureDevOps: azureDevOpsConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
auth0: auth0ConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
hcvault: hcVaultConnectionService(connectAppConnectionById),
|
||||
hcvault: hcVaultConnectionService(connectAppConnectionById, gatewayService),
|
||||
windmill: windmillConnectionService(connectAppConnectionById),
|
||||
teamcity: teamcityConnectionService(connectAppConnectionById),
|
||||
oci: ociConnectionService(connectAppConnectionById, licenseService),
|
||||
|
@@ -91,7 +91,7 @@ export const validateAuth0ConnectionCredentials = async ({ credentials }: TAuth0
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: (e as Error).message ?? `Unable to validate connection: verify credentials`
|
||||
message: (e as Error).message ?? "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -70,7 +70,7 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -186,7 +186,7 @@ export const validateAzureClientSecretsConnectionCredentials = async (config: TA
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -204,7 +204,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -186,7 +186,7 @@ export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureK
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -82,7 +82,7 @@ export const validateCamundaConnectionCredentials = async (appConnection: TCamun
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -89,7 +89,7 @@ export const validateDatabricksConnectionCredentials = async (appConnection: TDa
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -114,7 +114,7 @@ export const validateGitHubRadarConnectionCredentials = async (config: TGitHubRa
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
|
||||
|
@@ -447,7 +447,7 @@ export const validateGitHubConnectionCredentials = async (
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
|
||||
|
@@ -1,18 +1,18 @@
|
||||
import { AxiosError } from "axios";
|
||||
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
|
||||
import https from "https";
|
||||
|
||||
import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
import { HCVaultConnectionMethod } from "./hc-vault-connection-enums";
|
||||
import {
|
||||
THCVaultConnection,
|
||||
THCVaultConnectionConfig,
|
||||
THCVaultMountResponse,
|
||||
TValidateHCVaultConnectionCredentials
|
||||
} from "./hc-vault-connection-types";
|
||||
import { THCVaultConnection, THCVaultConnectionConfig, THCVaultMountResponse } from "./hc-vault-connection-types";
|
||||
|
||||
export const getHCVaultInstanceUrl = async (config: THCVaultConnectionConfig) => {
|
||||
const instanceUrl = removeTrailingSlash(config.credentials.instanceUrl);
|
||||
@@ -37,7 +37,78 @@ type TokenRespData = {
|
||||
};
|
||||
};
|
||||
|
||||
export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnectionCredentials) => {
|
||||
export const requestWithHCVaultGateway = async <T>(
|
||||
appConnection: { gatewayId?: string | null },
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
requestConfig: AxiosRequestConfig
|
||||
): Promise<AxiosResponse<T>> => {
|
||||
const { gatewayId } = appConnection;
|
||||
|
||||
// If gateway isn't set up, don't proxy request
|
||||
if (!gatewayId) {
|
||||
return request.request(requestConfig);
|
||||
}
|
||||
|
||||
const url = new URL(requestConfig.url as string);
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(url.toString());
|
||||
|
||||
const [targetHost] = await verifyHostInputValidity(url.hostname, true);
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(gatewayId);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
|
||||
return withGatewayProxy(
|
||||
async (proxyPort) => {
|
||||
const httpsAgent = new https.Agent({
|
||||
servername: targetHost
|
||||
});
|
||||
|
||||
url.protocol = "https:";
|
||||
url.host = `localhost:${proxyPort}`;
|
||||
|
||||
const finalRequestConfig: AxiosRequestConfig = {
|
||||
...requestConfig,
|
||||
url: url.toString(),
|
||||
httpsAgent,
|
||||
headers: {
|
||||
...requestConfig.headers,
|
||||
Host: targetHost
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
return await request.request(finalRequestConfig);
|
||||
} catch (error) {
|
||||
if (error instanceof AxiosError) {
|
||||
logger.error(
|
||||
{ message: error.message, data: (error.response as undefined | { data: unknown })?.data },
|
||||
"Error during HashiCorp Vault gateway request:"
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Tcp,
|
||||
targetHost,
|
||||
targetPort: url.port ? Number(url.port) : 8200, // 8200 is the default port for Vault self-hosted/dedicated
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export const getHCVaultAccessToken = async (
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
// Return access token directly if not using AppRole method
|
||||
if (connection.method !== HCVaultConnectionMethod.AppRole) {
|
||||
return connection.credentials.accessToken;
|
||||
@@ -46,16 +117,16 @@ export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnecti
|
||||
// Generate temporary token for AppRole method
|
||||
try {
|
||||
const { instanceUrl, roleId, secretId } = connection.credentials;
|
||||
const tokenResp = await request.post<TokenRespData>(
|
||||
`${removeTrailingSlash(instanceUrl)}/v1/auth/approle/login`,
|
||||
{ role_id: roleId, secret_id: secretId },
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const tokenResp = await requestWithHCVaultGateway<TokenRespData>(connection, gatewayService, {
|
||||
url: `${removeTrailingSlash(instanceUrl)}/v1/auth/approle/login`,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
|
||||
},
|
||||
data: { role_id: roleId, secret_id: secretId }
|
||||
});
|
||||
|
||||
if (tokenResp.status !== 200) {
|
||||
throw new BadRequestError({
|
||||
@@ -71,38 +142,55 @@ export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnecti
|
||||
}
|
||||
};
|
||||
|
||||
export const validateHCVaultConnectionCredentials = async (config: THCVaultConnectionConfig) => {
|
||||
const instanceUrl = await getHCVaultInstanceUrl(config);
|
||||
export const validateHCVaultConnectionCredentials = async (
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const instanceUrl = await getHCVaultInstanceUrl(connection);
|
||||
|
||||
try {
|
||||
const accessToken = await getHCVaultAccessToken(config);
|
||||
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
|
||||
|
||||
// Verify token
|
||||
await request.get(`${instanceUrl}/v1/auth/token/lookup-self`, {
|
||||
await requestWithHCVaultGateway(connection, gatewayService, {
|
||||
url: `${instanceUrl}/v1/auth/token/lookup-self`,
|
||||
method: "GET",
|
||||
headers: { "X-Vault-Token": accessToken }
|
||||
});
|
||||
|
||||
return config.credentials;
|
||||
return connection.credentials;
|
||||
} catch (error: unknown) {
|
||||
logger.error(error, "Unable to verify HC Vault connection");
|
||||
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
|
||||
});
|
||||
}
|
||||
|
||||
if (error instanceof BadRequestError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const listHCVaultMounts = async (appConnection: THCVaultConnection) => {
|
||||
const instanceUrl = await getHCVaultInstanceUrl(appConnection);
|
||||
const accessToken = await getHCVaultAccessToken(appConnection);
|
||||
export const listHCVaultMounts = async (
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const instanceUrl = await getHCVaultInstanceUrl(connection);
|
||||
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
|
||||
|
||||
const { data } = await request.get<THCVaultMountResponse>(`${instanceUrl}/v1/sys/mounts`, {
|
||||
const { data } = await requestWithHCVaultGateway<THCVaultMountResponse>(connection, gatewayService, {
|
||||
url: `${instanceUrl}/v1/sys/mounts`,
|
||||
method: "GET",
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(appConnection.credentials.namespace ? { "X-Vault-Namespace": appConnection.credentials.namespace } : {})
|
||||
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
|
||||
}
|
||||
});
|
||||
|
||||
|
@@ -55,11 +55,18 @@ export const HCVaultConnectionSchema = z.intersection(
|
||||
export const SanitizedHCVaultConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseHCVaultConnectionSchema.extend({
|
||||
method: z.literal(HCVaultConnectionMethod.AccessToken),
|
||||
credentials: HCVaultConnectionAccessTokenCredentialsSchema.pick({})
|
||||
credentials: HCVaultConnectionAccessTokenCredentialsSchema.pick({
|
||||
namespace: true,
|
||||
instanceUrl: true
|
||||
})
|
||||
}),
|
||||
BaseHCVaultConnectionSchema.extend({
|
||||
method: z.literal(HCVaultConnectionMethod.AppRole),
|
||||
credentials: HCVaultConnectionAppRoleCredentialsSchema.pick({})
|
||||
credentials: HCVaultConnectionAppRoleCredentialsSchema.pick({
|
||||
namespace: true,
|
||||
instanceUrl: true,
|
||||
roleId: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
@@ -81,7 +88,7 @@ export const ValidateHCVaultConnectionCredentialsSchema = z.discriminatedUnion("
|
||||
]);
|
||||
|
||||
export const CreateHCVaultConnectionSchema = ValidateHCVaultConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.HCVault)
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.HCVault, { supportsGateways: true })
|
||||
);
|
||||
|
||||
export const UpdateHCVaultConnectionSchema = z
|
||||
@@ -91,7 +98,7 @@ export const UpdateHCVaultConnectionSchema = z
|
||||
.optional()
|
||||
.describe(AppConnections.UPDATE(AppConnection.HCVault).credentials)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.HCVault));
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.HCVault, { supportsGateways: true }));
|
||||
|
||||
export const HCVaultConnectionListItemSchema = z.object({
|
||||
name: z.literal("HCVault"),
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
|
||||
@@ -11,12 +12,15 @@ type TGetAppConnectionFunc = (
|
||||
actor: OrgServiceActor
|
||||
) => Promise<THCVaultConnection>;
|
||||
|
||||
export const hcVaultConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||
export const hcVaultConnectionService = (
|
||||
getAppConnection: TGetAppConnectionFunc,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const listMounts = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.HCVault, connectionId, actor);
|
||||
|
||||
try {
|
||||
const mounts = await listHCVaultMounts(appConnection);
|
||||
const mounts = await listHCVaultMounts(appConnection, gatewayService);
|
||||
return mounts;
|
||||
} catch (error) {
|
||||
logger.error(error, "Failed to establish connection with Hashicorp Vault");
|
||||
|
@@ -56,7 +56,7 @@ const getConnectionConfig = ({
|
||||
? {
|
||||
rejectUnauthorized: sslRejectUnauthorized,
|
||||
ca: sslCertificate,
|
||||
servername: host
|
||||
serverName: host
|
||||
}
|
||||
: false
|
||||
};
|
||||
@@ -90,7 +90,7 @@ export const getSqlConnectionClient = async (appConnection: Pick<TSqlConnection,
|
||||
connection: {
|
||||
database,
|
||||
port,
|
||||
host,
|
||||
host: app === AppConnection.Postgres ? host : baseHost,
|
||||
user: username,
|
||||
password,
|
||||
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
|
||||
@@ -135,7 +135,7 @@ export const executeWithPotentialGateway = async <T>(
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Tcp,
|
||||
targetHost,
|
||||
targetHost: app === AppConnection.Postgres ? targetHost : credentials.host,
|
||||
targetPort: credentials.port,
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
|
@@ -453,23 +453,24 @@ export const authLoginServiceFactory = ({
|
||||
|
||||
const selectedOrg = await orgDAL.findById(organizationId);
|
||||
|
||||
if (!selectedOrgMembership) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: `User does not have access to the organization named ${selectedOrg?.name}`
|
||||
});
|
||||
}
|
||||
|
||||
// Check if authEnforced is true and the current auth method is not an enforced method
|
||||
if (
|
||||
selectedOrg.authEnforced &&
|
||||
!isAuthMethodSaml(decodedToken.authMethod) &&
|
||||
decodedToken.authMethod !== AuthMethod.OIDC
|
||||
decodedToken.authMethod !== AuthMethod.OIDC &&
|
||||
!(selectedOrg.bypassOrgAuthEnabled && selectedOrgMembership.userRole === OrgMembershipRole.Admin)
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with the auth method required by your organization."
|
||||
});
|
||||
}
|
||||
|
||||
if (!selectedOrgMembership) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: `User does not have access to the organization named ${selectedOrg?.name}`
|
||||
});
|
||||
}
|
||||
|
||||
if (selectedOrg.googleSsoAuthEnforced && decodedToken.authMethod !== AuthMethod.GOOGLE) {
|
||||
const canBypass = selectedOrg.bypassOrgAuthEnabled && selectedOrgMembership.userRole === OrgMembershipRole.Admin;
|
||||
|
||||
|
@@ -1190,7 +1190,9 @@ export const internalCertificateAuthorityServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
collectionId = certificateTemplate.pkiCollectionId as string;
|
||||
if (!collectionId) {
|
||||
collectionId = certificateTemplate.pkiCollectionId as string;
|
||||
}
|
||||
ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(certificateTemplate.caId);
|
||||
}
|
||||
|
||||
|
@@ -408,19 +408,123 @@ export const transformToInfisicalFormatNamespaceToProjects = (
|
||||
};
|
||||
};
|
||||
|
||||
export const transformToInfisicalFormatKeyVaultToProjectsCustomC1 = (vaultData: VaultData[]): InfisicalImportData => {
|
||||
const projects: Array<{ name: string; id: string }> = [];
|
||||
const environments: Array<{ name: string; id: string; projectId: string; envParentId?: string }> = [];
|
||||
const folders: Array<{ id: string; name: string; environmentId: string; parentFolderId?: string }> = [];
|
||||
const secrets: Array<{ id: string; name: string; environmentId: string; value: string; folderId?: string }> = [];
|
||||
|
||||
// track created entities to avoid duplicates
|
||||
const projectMap = new Map<string, string>(); // team name -> projectId
|
||||
const environmentMap = new Map<string, string>(); // team-name:envName -> environmentId
|
||||
const folderMap = new Map<string, string>(); // team-name:envName:folderPath -> folderId
|
||||
|
||||
for (const data of vaultData) {
|
||||
const { path, secretData } = data;
|
||||
|
||||
const pathParts = path.split("/").filter(Boolean);
|
||||
if (pathParts.length < 2) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// first level: environment (dev, prod, staging, etc.)
|
||||
const environmentName = pathParts[0];
|
||||
// second level: team name (team1, team2, etc.)
|
||||
const teamName = pathParts[1];
|
||||
// remaining parts: folder structure
|
||||
const folderParts = pathParts.slice(2);
|
||||
|
||||
// create project (team) if if doesn't exist
|
||||
if (!projectMap.has(teamName)) {
|
||||
const projectId = uuidv4();
|
||||
projectMap.set(teamName, projectId);
|
||||
projects.push({
|
||||
name: teamName,
|
||||
id: projectId
|
||||
});
|
||||
}
|
||||
const projectId = projectMap.get(teamName)!;
|
||||
|
||||
// create environment (dev, prod, etc.) for team
|
||||
const envKey = `${teamName}:${environmentName}`;
|
||||
if (!environmentMap.has(envKey)) {
|
||||
const environmentId = uuidv4();
|
||||
environmentMap.set(envKey, environmentId);
|
||||
environments.push({
|
||||
name: environmentName,
|
||||
id: environmentId,
|
||||
projectId
|
||||
});
|
||||
}
|
||||
const environmentId = environmentMap.get(envKey)!;
|
||||
|
||||
// create folder structure for path segments
|
||||
let currentFolderId: string | undefined;
|
||||
let currentPath = "";
|
||||
|
||||
for (const folderName of folderParts) {
|
||||
currentPath = currentPath ? `${currentPath}/${folderName}` : folderName;
|
||||
const folderKey = `${teamName}:${environmentName}:${currentPath}`;
|
||||
|
||||
if (!folderMap.has(folderKey)) {
|
||||
const folderId = uuidv4();
|
||||
folderMap.set(folderKey, folderId);
|
||||
folders.push({
|
||||
id: folderId,
|
||||
name: folderName,
|
||||
environmentId,
|
||||
parentFolderId: currentFolderId || environmentId
|
||||
});
|
||||
currentFolderId = folderId;
|
||||
} else {
|
||||
currentFolderId = folderMap.get(folderKey)!;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(secretData)) {
|
||||
secrets.push({
|
||||
id: uuidv4(),
|
||||
name: key,
|
||||
environmentId,
|
||||
value: String(value),
|
||||
folderId: currentFolderId
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
projects,
|
||||
environments,
|
||||
folders,
|
||||
secrets
|
||||
};
|
||||
};
|
||||
|
||||
// refer to internal doc for more details on which ID's belong to which orgs.
|
||||
// when its a custom migration, then it doesn't matter which mapping type is used (as of now).
|
||||
export const vaultMigrationTransformMappings: Record<
|
||||
string,
|
||||
(vaultData: VaultData[], mappingType: VaultMappingType) => InfisicalImportData
|
||||
> = {
|
||||
"68c57ab3-cea5-41fc-ae38-e156b10c14d2": transformToInfisicalFormatKeyVaultToProjectsCustomC1
|
||||
} as const;
|
||||
|
||||
export const importVaultDataFn = async (
|
||||
{
|
||||
vaultAccessToken,
|
||||
vaultNamespace,
|
||||
vaultUrl,
|
||||
mappingType,
|
||||
gatewayId
|
||||
gatewayId,
|
||||
orgId
|
||||
}: {
|
||||
vaultAccessToken: string;
|
||||
vaultNamespace?: string;
|
||||
vaultUrl: string;
|
||||
mappingType: VaultMappingType;
|
||||
gatewayId?: string;
|
||||
orgId: string;
|
||||
},
|
||||
{ gatewayService }: { gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId"> }
|
||||
) => {
|
||||
@@ -432,6 +536,25 @@ export const importVaultDataFn = async (
|
||||
});
|
||||
}
|
||||
|
||||
let transformFn: (vaultData: VaultData[], mappingType: VaultMappingType) => InfisicalImportData;
|
||||
|
||||
if (mappingType === VaultMappingType.Custom) {
|
||||
transformFn = vaultMigrationTransformMappings[orgId];
|
||||
|
||||
if (!transformFn) {
|
||||
throw new BadRequestError({
|
||||
message: "Please contact our sales team to enable custom vault migrations."
|
||||
});
|
||||
}
|
||||
} else {
|
||||
transformFn = transformToInfisicalFormatNamespaceToProjects;
|
||||
}
|
||||
|
||||
logger.info(
|
||||
{ orgId, mappingType },
|
||||
`[importVaultDataFn]: Running ${orgId in vaultMigrationTransformMappings ? "custom" : "default"} transform`
|
||||
);
|
||||
|
||||
const vaultApi = vaultFactory(gatewayService);
|
||||
|
||||
const vaultData = await vaultApi.collectVaultData({
|
||||
@@ -441,7 +564,5 @@ export const importVaultDataFn = async (
|
||||
gatewayId
|
||||
});
|
||||
|
||||
const infisicalData = transformToInfisicalFormatNamespaceToProjects(vaultData, mappingType);
|
||||
|
||||
return infisicalData;
|
||||
return transformFn(vaultData, mappingType);
|
||||
};
|
||||
|
@@ -5,9 +5,20 @@ import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { decryptEnvKeyDataFn, importVaultDataFn, parseEnvKeyDataFn } from "./external-migration-fns";
|
||||
import {
|
||||
decryptEnvKeyDataFn,
|
||||
importVaultDataFn,
|
||||
parseEnvKeyDataFn,
|
||||
vaultMigrationTransformMappings
|
||||
} from "./external-migration-fns";
|
||||
import { TExternalMigrationQueueFactory } from "./external-migration-queue";
|
||||
import { ExternalPlatforms, TImportEnvKeyDataDTO, TImportVaultDataDTO } from "./external-migration-types";
|
||||
import {
|
||||
ExternalMigrationProviders,
|
||||
ExternalPlatforms,
|
||||
THasCustomVaultMigrationDTO,
|
||||
TImportEnvKeyDataDTO,
|
||||
TImportVaultDataDTO
|
||||
} from "./external-migration-types";
|
||||
|
||||
type TExternalMigrationServiceFactoryDep = {
|
||||
permissionService: TPermissionServiceFactory;
|
||||
@@ -101,7 +112,8 @@ export const externalMigrationServiceFactory = ({
|
||||
vaultNamespace,
|
||||
vaultUrl,
|
||||
mappingType,
|
||||
gatewayId
|
||||
gatewayId,
|
||||
orgId: actorOrgId
|
||||
},
|
||||
{
|
||||
gatewayService
|
||||
@@ -127,8 +139,37 @@ export const externalMigrationServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
const hasCustomVaultMigration = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
provider
|
||||
}: THasCustomVaultMigrationDTO) => {
|
||||
const { membership } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
if (membership.role !== OrgMembershipRole.Admin) {
|
||||
throw new ForbiddenRequestError({ message: "Only admins can check custom migration status" });
|
||||
}
|
||||
|
||||
if (provider !== ExternalMigrationProviders.Vault) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid provider. Vault is the only supported provider for custom migrations."
|
||||
});
|
||||
}
|
||||
|
||||
return actorOrgId in vaultMigrationTransformMappings;
|
||||
};
|
||||
|
||||
return {
|
||||
importEnvKeyData,
|
||||
importVaultData
|
||||
importVaultData,
|
||||
hasCustomVaultMigration
|
||||
};
|
||||
};
|
||||
|
@@ -4,7 +4,8 @@ import { ActorAuthMethod, ActorType } from "../auth/auth-type";
|
||||
|
||||
export enum VaultMappingType {
|
||||
Namespace = "namespace",
|
||||
KeyVault = "key-vault"
|
||||
KeyVault = "key-vault",
|
||||
Custom = "custom"
|
||||
}
|
||||
|
||||
export type InfisicalImportData = {
|
||||
@@ -26,6 +27,10 @@ export type TImportEnvKeyDataDTO = {
|
||||
encryptedJson: { nonce: string; data: string };
|
||||
} & Omit<TOrgPermission, "orgId">;
|
||||
|
||||
export type THasCustomVaultMigrationDTO = {
|
||||
provider: ExternalMigrationProviders;
|
||||
} & Omit<TOrgPermission, "orgId">;
|
||||
|
||||
export type TImportVaultDataDTO = {
|
||||
vaultAccessToken: string;
|
||||
vaultNamespace?: string;
|
||||
@@ -111,3 +116,8 @@ export enum ExternalPlatforms {
|
||||
EnvKey = "EnvKey",
|
||||
Vault = "Vault"
|
||||
}
|
||||
|
||||
export enum ExternalMigrationProviders {
|
||||
Vault = "vault",
|
||||
EnvKey = "env-key"
|
||||
}
|
||||
|
@@ -39,7 +39,7 @@ const getIntegrationSecretsV2 = async (
|
||||
},
|
||||
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "find" | "findByFolderId">,
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath">,
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds" | "findByIds">
|
||||
) => {
|
||||
const content: Record<string, boolean> = {};
|
||||
if (dto.depth > MAX_SYNC_SECRET_DEPTH) {
|
||||
@@ -300,7 +300,7 @@ export const deleteIntegrationSecrets = async ({
|
||||
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
||||
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "find" | "findByFolderId">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath" | "findBySecretPath">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds" | "findByIds">;
|
||||
secretDAL: Pick<TSecretDALFactory, "findByFolderId">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
}) => {
|
||||
|
@@ -40,7 +40,7 @@ type TIntegrationServiceFactoryDep = {
|
||||
projectBotService: TProjectBotServiceFactory;
|
||||
secretQueueService: Pick<TSecretQueueFactory, "syncIntegrations">;
|
||||
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "find" | "findByFolderId">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds" | "findByIds">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
secretDAL: Pick<TSecretDALFactory, "findByFolderId">;
|
||||
};
|
||||
|
@@ -0,0 +1,208 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { ProjectType, TableName } from "@app/db/schemas";
|
||||
|
||||
export type TOfflineUsageReportDALFactory = ReturnType<typeof offlineUsageReportDALFactory>;
|
||||
|
||||
export const offlineUsageReportDALFactory = (db: TDbClient) => {
|
||||
const getUserMetrics = async () => {
|
||||
// Get total users and admin users
|
||||
const userMetrics = (await db
|
||||
.from(TableName.Users)
|
||||
.select(
|
||||
db.raw(
|
||||
`
|
||||
COUNT(*) as total_users,
|
||||
COUNT(CASE WHEN "superAdmin" = true THEN 1 END) as admin_users
|
||||
`
|
||||
)
|
||||
)
|
||||
.where({ isGhost: false })
|
||||
.first()) as { total_users: string; admin_users: string } | undefined;
|
||||
|
||||
// Get users by auth method
|
||||
const authMethodStats = (await db
|
||||
.from(TableName.Users)
|
||||
.select(
|
||||
db.raw(`
|
||||
unnest("authMethods") as auth_method,
|
||||
COUNT(*) as count
|
||||
`)
|
||||
)
|
||||
.where({ isGhost: false })
|
||||
.whereNotNull("authMethods")
|
||||
.groupBy(db.raw('unnest("authMethods")'))) as Array<{ auth_method: string; count: string }>;
|
||||
|
||||
const usersByAuthMethod = authMethodStats.reduce(
|
||||
(acc: Record<string, number>, row: { auth_method: string; count: string }) => {
|
||||
acc[row.auth_method] = parseInt(row.count, 10);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, number>
|
||||
);
|
||||
|
||||
return {
|
||||
totalUsers: parseInt(userMetrics?.total_users || "0", 10),
|
||||
adminUsers: parseInt(userMetrics?.admin_users || "0", 10),
|
||||
usersByAuthMethod
|
||||
};
|
||||
};
|
||||
|
||||
const getMachineIdentityMetrics = async () => {
|
||||
// Get total machine identities
|
||||
const identityMetrics = (await db
|
||||
.from(TableName.Identity)
|
||||
.select(
|
||||
db.raw(
|
||||
`
|
||||
COUNT(*) as total_identities
|
||||
`
|
||||
)
|
||||
)
|
||||
.first()) as { total_identities: string } | undefined;
|
||||
|
||||
// Get identities by auth method
|
||||
const authMethodStats = (await db
|
||||
.from(TableName.Identity)
|
||||
.select("authMethod")
|
||||
.count("* as count")
|
||||
.whereNotNull("authMethod")
|
||||
.groupBy("authMethod")) as Array<{ authMethod: string; count: string }>;
|
||||
|
||||
const machineIdentitiesByAuthMethod = authMethodStats.reduce(
|
||||
(acc: Record<string, number>, row: { authMethod: string; count: string }) => {
|
||||
acc[row.authMethod] = parseInt(row.count, 10);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, number>
|
||||
);
|
||||
|
||||
return {
|
||||
totalMachineIdentities: parseInt(identityMetrics?.total_identities || "0", 10),
|
||||
machineIdentitiesByAuthMethod
|
||||
};
|
||||
};
|
||||
|
||||
const getProjectMetrics = async () => {
|
||||
// Get total projects and projects by type
|
||||
const projectMetrics = (await db
|
||||
.from(TableName.Project)
|
||||
.select("type")
|
||||
.count("* as count")
|
||||
.groupBy("type")) as Array<{ type: string; count: string }>;
|
||||
|
||||
const totalProjects = projectMetrics.reduce(
|
||||
(sum, row: { type: string; count: string }) => sum + parseInt(row.count, 10),
|
||||
0
|
||||
);
|
||||
const projectsByType = projectMetrics.reduce(
|
||||
(acc: Record<string, number>, row: { type: string; count: string }) => {
|
||||
acc[row.type] = parseInt(row.count, 10);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, number>
|
||||
);
|
||||
|
||||
// Calculate average secrets per project
|
||||
const secretsPerProject = (await db
|
||||
.from(`${TableName.SecretV2} as s`)
|
||||
.select("p.id as projectId")
|
||||
.count("s.id as count")
|
||||
.leftJoin(`${TableName.SecretFolder} as sf`, "s.folderId", "sf.id")
|
||||
.leftJoin(`${TableName.Environment} as e`, "sf.envId", "e.id")
|
||||
.leftJoin(`${TableName.Project} as p`, "e.projectId", "p.id")
|
||||
.where("p.type", ProjectType.SecretManager)
|
||||
.groupBy("p.id")
|
||||
.whereNotNull("p.id")) as Array<{ projectId: string; count: string }>;
|
||||
|
||||
const averageSecretsPerProject =
|
||||
secretsPerProject.length > 0
|
||||
? secretsPerProject.reduce(
|
||||
(sum, row: { projectId: string; count: string }) => sum + parseInt(row.count, 10),
|
||||
0
|
||||
) / secretsPerProject.length
|
||||
: 0;
|
||||
|
||||
return {
|
||||
totalProjects,
|
||||
projectsByType,
|
||||
averageSecretsPerProject: Math.round(averageSecretsPerProject * 100) / 100
|
||||
};
|
||||
};
|
||||
|
||||
const getSecretMetrics = async () => {
|
||||
// Get total secrets
|
||||
const totalSecretsResult = (await db.from(TableName.SecretV2).count("* as count").first()) as
|
||||
| { count: string }
|
||||
| undefined;
|
||||
|
||||
const totalSecrets = parseInt(totalSecretsResult?.count || "0", 10);
|
||||
|
||||
// Get secrets by project
|
||||
const secretsByProject = (await db
|
||||
.from(`${TableName.SecretV2} as s`)
|
||||
.select("p.id as projectId", "p.name as projectName")
|
||||
.count("s.id as secretCount")
|
||||
.leftJoin(`${TableName.SecretFolder} as sf`, "s.folderId", "sf.id")
|
||||
.leftJoin(`${TableName.Environment} as e`, "sf.envId", "e.id")
|
||||
.leftJoin(`${TableName.Project} as p`, "e.projectId", "p.id")
|
||||
.where("p.type", ProjectType.SecretManager)
|
||||
.groupBy("p.id", "p.name")
|
||||
.whereNotNull("p.id")) as Array<{ projectId: string; projectName: string; secretCount: string }>;
|
||||
|
||||
return {
|
||||
totalSecrets,
|
||||
secretsByProject: secretsByProject.map(
|
||||
(row: { projectId: string; projectName: string; secretCount: string }) => ({
|
||||
projectId: row.projectId,
|
||||
projectName: row.projectName,
|
||||
secretCount: parseInt(row.secretCount, 10)
|
||||
})
|
||||
)
|
||||
};
|
||||
};
|
||||
|
||||
const getSecretSyncMetrics = async () => {
|
||||
const totalSecretSyncsResult = (await db.from(TableName.SecretSync).count("* as count").first()) as
|
||||
| { count: string }
|
||||
| undefined;
|
||||
|
||||
return {
|
||||
totalSecretSyncs: parseInt(totalSecretSyncsResult?.count || "0", 10)
|
||||
};
|
||||
};
|
||||
|
||||
const getDynamicSecretMetrics = async () => {
|
||||
const totalDynamicSecretsResult = (await db.from(TableName.DynamicSecret).count("* as count").first()) as
|
||||
| { count: string }
|
||||
| undefined;
|
||||
|
||||
return {
|
||||
totalDynamicSecrets: parseInt(totalDynamicSecretsResult?.count || "0", 10)
|
||||
};
|
||||
};
|
||||
|
||||
const getSecretRotationMetrics = async () => {
|
||||
// Check both v1 and v2 secret rotation tables
|
||||
const [v1RotationsResult, v2RotationsResult] = await Promise.all([
|
||||
db.from(TableName.SecretRotation).count("* as count").first() as Promise<{ count: string } | undefined>,
|
||||
db.from(TableName.SecretRotationV2).count("* as count").first() as Promise<{ count: string } | undefined>
|
||||
]);
|
||||
|
||||
const totalV1Rotations = parseInt(v1RotationsResult?.count || "0", 10);
|
||||
const totalV2Rotations = parseInt(v2RotationsResult?.count || "0", 10);
|
||||
|
||||
return {
|
||||
totalSecretRotations: totalV1Rotations + totalV2Rotations
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
getUserMetrics,
|
||||
getMachineIdentityMetrics,
|
||||
getProjectMetrics,
|
||||
getSecretMetrics,
|
||||
getSecretSyncMetrics,
|
||||
getDynamicSecretMetrics,
|
||||
getSecretRotationMetrics
|
||||
};
|
||||
};
|
@@ -0,0 +1,133 @@
|
||||
import crypto from "crypto";
|
||||
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TOfflineUsageReportDALFactory } from "./offline-usage-report-dal";
|
||||
|
||||
type TOfflineUsageReportServiceFactoryDep = {
|
||||
offlineUsageReportDAL: TOfflineUsageReportDALFactory;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getCustomerId" | "getLicenseId">;
|
||||
};
|
||||
|
||||
export type TOfflineUsageReportServiceFactory = ReturnType<typeof offlineUsageReportServiceFactory>;
|
||||
|
||||
export const offlineUsageReportServiceFactory = ({
|
||||
offlineUsageReportDAL,
|
||||
licenseService
|
||||
}: TOfflineUsageReportServiceFactoryDep) => {
|
||||
const signReportContent = (content: string, licenseId: string): string => {
|
||||
const contentHash = crypto.createHash("sha256").update(content).digest("hex");
|
||||
const hmac = crypto.createHmac("sha256", licenseId);
|
||||
hmac.update(contentHash);
|
||||
return hmac.digest("hex");
|
||||
};
|
||||
|
||||
const verifyReportContent = (content: string, signature: string, licenseId: string): boolean => {
|
||||
const expectedSignature = signReportContent(content, licenseId);
|
||||
return signature === expectedSignature;
|
||||
};
|
||||
|
||||
const generateUsageReportCSV = async () => {
|
||||
const cfg = getConfig();
|
||||
if (!cfg.LICENSE_KEY_OFFLINE) {
|
||||
throw new BadRequestError({
|
||||
message: "Offline usage reports are not enabled. LICENSE_KEY_OFFLINE must be configured."
|
||||
});
|
||||
}
|
||||
|
||||
const customerId = licenseService.getCustomerId() as string;
|
||||
const licenseId = licenseService.getLicenseId();
|
||||
|
||||
const [
|
||||
userMetrics,
|
||||
machineIdentityMetrics,
|
||||
projectMetrics,
|
||||
secretMetrics,
|
||||
secretSyncMetrics,
|
||||
dynamicSecretMetrics,
|
||||
secretRotationMetrics
|
||||
] = await Promise.all([
|
||||
offlineUsageReportDAL.getUserMetrics(),
|
||||
offlineUsageReportDAL.getMachineIdentityMetrics(),
|
||||
offlineUsageReportDAL.getProjectMetrics(),
|
||||
offlineUsageReportDAL.getSecretMetrics(),
|
||||
offlineUsageReportDAL.getSecretSyncMetrics(),
|
||||
offlineUsageReportDAL.getDynamicSecretMetrics(),
|
||||
offlineUsageReportDAL.getSecretRotationMetrics()
|
||||
]);
|
||||
|
||||
const headers = [
|
||||
"Total Users",
|
||||
"Admin Users",
|
||||
"Total Identities",
|
||||
"Total Projects",
|
||||
"Total Secrets",
|
||||
"Total Secret Syncs",
|
||||
"Total Dynamic Secrets",
|
||||
"Total Secret Rotations",
|
||||
"Avg Secrets Per Project"
|
||||
];
|
||||
|
||||
const allUserAuthMethods = Object.keys(userMetrics.usersByAuthMethod);
|
||||
allUserAuthMethods.forEach((method) => {
|
||||
headers.push(`Users Auth ${method}`);
|
||||
});
|
||||
|
||||
const allIdentityAuthMethods = Object.keys(machineIdentityMetrics.machineIdentitiesByAuthMethod);
|
||||
allIdentityAuthMethods.forEach((method) => {
|
||||
headers.push(`Identities Auth ${method}`);
|
||||
});
|
||||
|
||||
const allProjectTypes = Object.keys(projectMetrics.projectsByType);
|
||||
allProjectTypes.forEach((type) => {
|
||||
headers.push(`Projects ${type}`);
|
||||
});
|
||||
|
||||
headers.push("Signature");
|
||||
|
||||
const dataRow: (string | number)[] = [
|
||||
userMetrics.totalUsers,
|
||||
userMetrics.adminUsers,
|
||||
machineIdentityMetrics.totalMachineIdentities,
|
||||
projectMetrics.totalProjects,
|
||||
secretMetrics.totalSecrets,
|
||||
secretSyncMetrics.totalSecretSyncs,
|
||||
dynamicSecretMetrics.totalDynamicSecrets,
|
||||
secretRotationMetrics.totalSecretRotations,
|
||||
projectMetrics.averageSecretsPerProject
|
||||
];
|
||||
|
||||
allUserAuthMethods.forEach((method) => {
|
||||
dataRow.push(userMetrics.usersByAuthMethod[method] || 0);
|
||||
});
|
||||
allIdentityAuthMethods.forEach((method) => {
|
||||
dataRow.push(machineIdentityMetrics.machineIdentitiesByAuthMethod[method] || 0);
|
||||
});
|
||||
|
||||
allProjectTypes.forEach((type) => {
|
||||
dataRow.push(projectMetrics.projectsByType[type] || 0);
|
||||
});
|
||||
|
||||
const headersWithoutSignature = headers.slice(0, -1);
|
||||
const contentWithoutSignature = [headersWithoutSignature.join(","), dataRow.join(",")].join("\n");
|
||||
|
||||
const signature = signReportContent(contentWithoutSignature, licenseId);
|
||||
dataRow.push(signature);
|
||||
|
||||
const csvContent = [headers.join(","), dataRow.join(",")].join("\n");
|
||||
|
||||
return {
|
||||
csvContent,
|
||||
signature,
|
||||
filename: `infisical-usage-report-${customerId}-${new Date().toISOString().split("T")[0]}.csv`
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
generateUsageReportCSV,
|
||||
verifyReportSignature: (csvContent: string, signature: string, licenseId: string) =>
|
||||
verifyReportContent(csvContent, signature, licenseId)
|
||||
};
|
||||
};
|
@@ -0,0 +1,42 @@
|
||||
export interface TUsageMetrics {
|
||||
// User metrics
|
||||
totalUsers: number;
|
||||
usersByAuthMethod: Record<string, number>;
|
||||
adminUsers: number;
|
||||
|
||||
// Machine identity metrics
|
||||
totalMachineIdentities: number;
|
||||
machineIdentitiesByAuthMethod: Record<string, number>;
|
||||
|
||||
// Project metrics
|
||||
totalProjects: number;
|
||||
projectsByType: Record<string, number>;
|
||||
averageSecretsPerProject: number;
|
||||
|
||||
// Secret metrics
|
||||
totalSecrets: number;
|
||||
totalSecretSyncs: number;
|
||||
totalDynamicSecrets: number;
|
||||
totalSecretRotations: number;
|
||||
}
|
||||
|
||||
export interface TUsageReportMetadata {
|
||||
generatedAt: string;
|
||||
instanceId: string;
|
||||
reportVersion: string;
|
||||
}
|
||||
|
||||
export interface TUsageReport {
|
||||
metadata: TUsageReportMetadata;
|
||||
metrics: TUsageMetrics;
|
||||
signature?: string;
|
||||
}
|
||||
|
||||
export interface TGenerateUsageReportDTO {
|
||||
includeSignature?: boolean;
|
||||
}
|
||||
|
||||
export interface TVerifyUsageReportDTO {
|
||||
reportData: string;
|
||||
signature: string;
|
||||
}
|
@@ -83,6 +83,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
.select(db.ref("id").withSchema(TableName.OrgMembership).as("orgMembershipId"))
|
||||
.select(db.ref("role").withSchema(TableName.OrgMembership).as("orgMembershipRole"))
|
||||
.select(db.ref("roleId").withSchema(TableName.OrgMembership).as("orgMembershipRoleId"))
|
||||
.select(db.ref("status").withSchema(TableName.OrgMembership).as("orgMembershipStatus"))
|
||||
.select(db.ref("name").withSchema(TableName.OrgRoles).as("orgMembershipRoleName"));
|
||||
|
||||
const formattedDocs = sqlNestRelationships({
|
||||
@@ -112,7 +113,8 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
orgMembershipId,
|
||||
orgMembershipRole,
|
||||
orgMembershipRoleName,
|
||||
orgMembershipRoleId
|
||||
orgMembershipRoleId,
|
||||
orgMembershipStatus
|
||||
}) => ({
|
||||
user: {
|
||||
id: userId,
|
||||
@@ -121,6 +123,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
firstName,
|
||||
lastName
|
||||
},
|
||||
status: orgMembershipStatus,
|
||||
membershipId: orgMembershipId,
|
||||
role: orgMembershipRoleName || orgMembershipRole, // custom role name or pre-defined role name
|
||||
roleId: orgMembershipRoleId
|
||||
@@ -488,6 +491,15 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const bulkCreateMemberships = async (data: TOrgMembershipsInsert[], tx?: Knex) => {
|
||||
try {
|
||||
const memberships = await (tx || db)(TableName.OrgMembership).insert(data).returning("*");
|
||||
return memberships;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Create org memberships" });
|
||||
}
|
||||
};
|
||||
|
||||
const updateMembershipById = async (id: string, data: TOrgMembershipsUpdate, tx?: Knex) => {
|
||||
try {
|
||||
const [membership] = await (tx || db)(TableName.OrgMembership).where({ id }).update(data).returning("*");
|
||||
@@ -668,6 +680,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
findMembership,
|
||||
findMembershipWithScimFilter,
|
||||
createMembership,
|
||||
bulkCreateMemberships,
|
||||
updateMembershipById,
|
||||
deleteMembershipById,
|
||||
deleteMembershipsById,
|
||||
|
@@ -528,15 +528,18 @@ export const orgServiceFactory = ({
|
||||
/*
|
||||
* Create organization
|
||||
* */
|
||||
const createOrganization = async ({
|
||||
userId,
|
||||
userEmail,
|
||||
orgName
|
||||
}: {
|
||||
userId: string;
|
||||
orgName: string;
|
||||
userEmail?: string | null;
|
||||
}) => {
|
||||
const createOrganization = async (
|
||||
{
|
||||
userId,
|
||||
userEmail,
|
||||
orgName
|
||||
}: {
|
||||
userId?: string;
|
||||
orgName: string;
|
||||
userEmail?: string | null;
|
||||
},
|
||||
trx?: Knex
|
||||
) => {
|
||||
const { privateKey, publicKey } = await crypto.encryption().asymmetric().generateKeyPair();
|
||||
const key = crypto.randomBytes(32).toString("base64");
|
||||
const {
|
||||
@@ -555,22 +558,25 @@ export const orgServiceFactory = ({
|
||||
} = crypto.encryption().symmetric().encryptWithRootEncryptionKey(key);
|
||||
|
||||
const customerId = await licenseService.generateOrgCustomerId(orgName, userEmail);
|
||||
const organization = await orgDAL.transaction(async (tx) => {
|
||||
|
||||
const createOrg = async (tx: Knex) => {
|
||||
// akhilmhdh: for now this is auto created. in future we can input from user and for previous users just modifiy
|
||||
const org = await orgDAL.create(
|
||||
{ name: orgName, customerId, slug: slugify(`${orgName}-${alphaNumericNanoId(4)}`) },
|
||||
tx
|
||||
);
|
||||
await orgDAL.createMembership(
|
||||
{
|
||||
userId,
|
||||
orgId: org.id,
|
||||
role: OrgMembershipRole.Admin,
|
||||
status: OrgMembershipStatus.Accepted,
|
||||
isActive: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (userId) {
|
||||
await orgDAL.createMembership(
|
||||
{
|
||||
userId,
|
||||
orgId: org.id,
|
||||
role: OrgMembershipRole.Admin,
|
||||
status: OrgMembershipStatus.Accepted,
|
||||
isActive: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
await orgBotDAL.create(
|
||||
{
|
||||
name: org.name,
|
||||
@@ -590,7 +596,9 @@ export const orgServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
return org;
|
||||
});
|
||||
};
|
||||
|
||||
const organization = await (trx ? createOrg(trx) : orgDAL.transaction(createOrg));
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
return organization;
|
||||
|
@@ -127,6 +127,27 @@ export const secretImportDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findByIds = async (ids: string[], tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())(TableName.SecretImport)
|
||||
.whereIn(`${TableName.SecretImport}.id`, ids)
|
||||
.join(TableName.Environment, `${TableName.SecretImport}.importEnv`, `${TableName.Environment}.id`)
|
||||
.select(
|
||||
db.ref("*").withSchema(TableName.SecretImport) as unknown as keyof TSecretImports,
|
||||
db.ref("slug").withSchema(TableName.Environment),
|
||||
db.ref("name").withSchema(TableName.Environment),
|
||||
db.ref("id").withSchema(TableName.Environment).as("envId")
|
||||
);
|
||||
|
||||
return docs.map(({ envId, slug, name, ...el }) => ({
|
||||
...el,
|
||||
importEnv: { id: envId, slug, name }
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find secret imports by ids" });
|
||||
}
|
||||
};
|
||||
|
||||
const getProjectImportCount = async (
|
||||
{ search, ...filter }: Partial<TSecretImports & { projectId: string; search?: string }>,
|
||||
tx?: Knex
|
||||
@@ -325,6 +346,7 @@ export const secretImportDALFactory = (db: TDbClient) => {
|
||||
...secretImportOrm,
|
||||
find,
|
||||
findById,
|
||||
findByIds,
|
||||
findByFolderIds,
|
||||
findLastImportPosition,
|
||||
updateAllPosition,
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import RE2 from "re2";
|
||||
|
||||
import { SecretType, TSecretImports, TSecrets, TSecretsV2 } from "@app/db/schemas";
|
||||
import { groupBy, unique } from "@app/lib/fn";
|
||||
|
||||
@@ -54,6 +56,74 @@ type TSecretImportSecretsV2 = {
|
||||
|
||||
const LEVEL_BREAK = 10;
|
||||
const getImportUniqKey = (envSlug: string, path: string) => `${envSlug}=${path}`;
|
||||
const RESERVED_IMPORT_REGEX = new RE2("/__reserve_replication_([a-f0-9-]{36})");
|
||||
|
||||
/**
|
||||
* Processes reserved imports by resolving them to their replication source.
|
||||
*/
|
||||
const processReservedImports = async <
|
||||
T extends {
|
||||
isReserved?: boolean | null;
|
||||
importPath: string;
|
||||
importEnv: { id: string; slug: string; name: string };
|
||||
folderId: string;
|
||||
}
|
||||
>(
|
||||
imports: T[],
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "findByIds">
|
||||
): Promise<T[]> => {
|
||||
const reservedImportIds: string[] = [];
|
||||
|
||||
imports.forEach((secretImport) => {
|
||||
if (secretImport.isReserved) {
|
||||
const reservedMatch = RESERVED_IMPORT_REGEX.exec(secretImport.importPath);
|
||||
if (reservedMatch) {
|
||||
const referencedImportId = reservedMatch[1];
|
||||
reservedImportIds.push(referencedImportId);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (reservedImportIds.length === 0) {
|
||||
return imports;
|
||||
}
|
||||
|
||||
try {
|
||||
const importDetailsMap = new Map<
|
||||
string,
|
||||
{ importPath: string; importEnv: { id: string; slug: string; name: string } }
|
||||
>();
|
||||
|
||||
const referencedImports = await secretImportDAL.findByIds(reservedImportIds);
|
||||
referencedImports.forEach((referencedImport) => {
|
||||
importDetailsMap.set(referencedImport.id, {
|
||||
importPath: referencedImport.importPath,
|
||||
importEnv: referencedImport.importEnv
|
||||
});
|
||||
});
|
||||
|
||||
return imports.map((secretImport) => {
|
||||
if (secretImport.isReserved) {
|
||||
const reservedMatch = RESERVED_IMPORT_REGEX.exec(secretImport.importPath);
|
||||
if (reservedMatch) {
|
||||
const referencedImportId = reservedMatch[1];
|
||||
const referencedDetails = importDetailsMap.get(referencedImportId);
|
||||
|
||||
if (referencedDetails) {
|
||||
return {
|
||||
...secretImport,
|
||||
importPath: referencedDetails.importPath,
|
||||
importEnv: referencedDetails.importEnv
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
return secretImport;
|
||||
});
|
||||
} catch (error) {
|
||||
return imports;
|
||||
}
|
||||
};
|
||||
export const fnSecretsFromImports = async ({
|
||||
allowedImports: possibleCyclicImports,
|
||||
folderDAL,
|
||||
@@ -167,7 +237,7 @@ export const fnSecretsV2FromImports = async ({
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath">;
|
||||
viewSecretValue: boolean;
|
||||
secretDAL: Pick<TSecretV2BridgeDALFactory, "find">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "findByFolderIds">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "findByFolderIds" | "findByIds">;
|
||||
decryptor: (value?: Buffer | null) => string;
|
||||
expandSecretReferences?: (inputSecret: {
|
||||
value?: string;
|
||||
@@ -188,6 +258,10 @@ export const fnSecretsV2FromImports = async ({
|
||||
})[];
|
||||
}[] = [{ secretImports: rootSecretImports, depth: 0, parentImportedSecrets: [] }];
|
||||
|
||||
const processedSecretImports = await processReservedImports(rootSecretImports, secretImportDAL);
|
||||
|
||||
stack[0] = { secretImports: processedSecretImports, depth: 0, parentImportedSecrets: [] };
|
||||
|
||||
const processedImports: TSecretImportSecretsV2[] = [];
|
||||
|
||||
while (stack.length) {
|
||||
|
@@ -12,7 +12,7 @@ type TAWSParameterStoreRecord = Record<string, AWS.SSM.Parameter>;
|
||||
type TAWSParameterStoreMetadataRecord = Record<string, AWS.SSM.ParameterMetadata>;
|
||||
type TAWSParameterStoreTagsRecord = Record<string, Record<string, string>>;
|
||||
|
||||
const MAX_RETRIES = 5;
|
||||
const MAX_RETRIES = 10;
|
||||
const BATCH_SIZE = 10;
|
||||
|
||||
const getSSM = async (secretSync: TAwsParameterStoreSyncWithCredentials) => {
|
||||
|
@@ -38,7 +38,7 @@ type TAwsSecretsRecord = Record<string, SecretListEntry>;
|
||||
type TAwsSecretValuesRecord = Record<string, SecretValueEntry>;
|
||||
type TAwsSecretDescriptionsRecord = Record<string, DescribeSecretResponse>;
|
||||
|
||||
const MAX_RETRIES = 5;
|
||||
const MAX_RETRIES = 10;
|
||||
const BATCH_SIZE = 20;
|
||||
|
||||
const getSecretsManagerClient = async (secretSync: TAwsSecretsManagerSyncWithCredentials) => {
|
||||
|
@@ -1,9 +1,13 @@
|
||||
import { isAxiosError } from "axios";
|
||||
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { getHCVaultAccessToken, getHCVaultInstanceUrl } from "@app/services/app-connection/hc-vault";
|
||||
import {
|
||||
getHCVaultAccessToken,
|
||||
getHCVaultInstanceUrl,
|
||||
requestWithHCVaultGateway,
|
||||
THCVaultConnection
|
||||
} from "@app/services/app-connection/hc-vault";
|
||||
import {
|
||||
THCVaultListVariables,
|
||||
THCVaultListVariablesResponse,
|
||||
@@ -14,19 +18,20 @@ import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
|
||||
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
|
||||
import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
const listHCVaultVariables = async ({ instanceUrl, namespace, mount, accessToken, path }: THCVaultListVariables) => {
|
||||
await blockLocalAndPrivateIpAddresses(instanceUrl);
|
||||
|
||||
const listHCVaultVariables = async (
|
||||
{ instanceUrl, namespace, mount, accessToken, path }: THCVaultListVariables,
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
try {
|
||||
const { data } = await request.get<THCVaultListVariablesResponse>(
|
||||
`${instanceUrl}/v1/${removeTrailingSlash(mount)}/data/${path}`,
|
||||
{
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(namespace ? { "X-Vault-Namespace": namespace } : {})
|
||||
}
|
||||
const { data } = await requestWithHCVaultGateway<THCVaultListVariablesResponse>(connection, gatewayService, {
|
||||
url: `${instanceUrl}/v1/${removeTrailingSlash(mount)}/data/${path}`,
|
||||
method: "GET",
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(namespace ? { "X-Vault-Namespace": namespace } : {})
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
return data.data.data;
|
||||
} catch (error: unknown) {
|
||||
@@ -39,33 +44,29 @@ const listHCVaultVariables = async ({ instanceUrl, namespace, mount, accessToken
|
||||
};
|
||||
|
||||
// Hashicorp Vault updates all variables in one batch. This is to respect their versioning
|
||||
const updateHCVaultVariables = async ({
|
||||
path,
|
||||
instanceUrl,
|
||||
namespace,
|
||||
accessToken,
|
||||
mount,
|
||||
data
|
||||
}: TPostHCVaultVariable) => {
|
||||
await blockLocalAndPrivateIpAddresses(instanceUrl);
|
||||
|
||||
return request.post(
|
||||
`${instanceUrl}/v1/${removeTrailingSlash(mount)}/data/${path}`,
|
||||
{
|
||||
data
|
||||
const updateHCVaultVariables = async (
|
||||
{ path, instanceUrl, namespace, accessToken, mount, data }: TPostHCVaultVariable,
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
return requestWithHCVaultGateway(connection, gatewayService, {
|
||||
url: `${instanceUrl}/v1/${removeTrailingSlash(mount)}/data/${path}`,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(namespace ? { "X-Vault-Namespace": namespace } : {}),
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(namespace ? { "X-Vault-Namespace": namespace } : {}),
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
data: { data }
|
||||
});
|
||||
};
|
||||
|
||||
export const HCVaultSyncFns = {
|
||||
syncSecrets: async (secretSync: THCVaultSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
syncSecrets: async (
|
||||
secretSync: THCVaultSyncWithCredentials,
|
||||
secretMap: TSecretMap,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const {
|
||||
connection,
|
||||
environment,
|
||||
@@ -74,16 +75,20 @@ export const HCVaultSyncFns = {
|
||||
} = secretSync;
|
||||
|
||||
const { namespace } = connection.credentials;
|
||||
const accessToken = await getHCVaultAccessToken(connection);
|
||||
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
|
||||
const instanceUrl = await getHCVaultInstanceUrl(connection);
|
||||
|
||||
const variables = await listHCVaultVariables({
|
||||
instanceUrl,
|
||||
accessToken,
|
||||
namespace,
|
||||
mount,
|
||||
path
|
||||
});
|
||||
const variables = await listHCVaultVariables(
|
||||
{
|
||||
instanceUrl,
|
||||
accessToken,
|
||||
namespace,
|
||||
mount,
|
||||
path
|
||||
},
|
||||
connection,
|
||||
gatewayService
|
||||
);
|
||||
let tainted = false;
|
||||
|
||||
for (const entry of Object.entries(secretMap)) {
|
||||
@@ -110,24 +115,36 @@ export const HCVaultSyncFns = {
|
||||
if (!tainted) return;
|
||||
|
||||
try {
|
||||
await updateHCVaultVariables({ accessToken, instanceUrl, namespace, mount, path, data: variables });
|
||||
await updateHCVaultVariables(
|
||||
{ accessToken, instanceUrl, namespace, mount, path, data: variables },
|
||||
connection,
|
||||
gatewayService
|
||||
);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error
|
||||
});
|
||||
}
|
||||
},
|
||||
removeSecrets: async (secretSync: THCVaultSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
removeSecrets: async (
|
||||
secretSync: THCVaultSyncWithCredentials,
|
||||
secretMap: TSecretMap,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const {
|
||||
connection,
|
||||
destinationConfig: { mount, path }
|
||||
} = secretSync;
|
||||
|
||||
const { namespace } = connection.credentials;
|
||||
const accessToken = await getHCVaultAccessToken(connection);
|
||||
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
|
||||
const instanceUrl = await getHCVaultInstanceUrl(connection);
|
||||
|
||||
const variables = await listHCVaultVariables({ instanceUrl, namespace, accessToken, mount, path });
|
||||
const variables = await listHCVaultVariables(
|
||||
{ instanceUrl, namespace, accessToken, mount, path },
|
||||
connection,
|
||||
gatewayService
|
||||
);
|
||||
|
||||
for await (const [key] of Object.entries(variables)) {
|
||||
if (key in secretMap) {
|
||||
@@ -136,30 +153,41 @@ export const HCVaultSyncFns = {
|
||||
}
|
||||
|
||||
try {
|
||||
await updateHCVaultVariables({ accessToken, instanceUrl, namespace, mount, path, data: variables });
|
||||
await updateHCVaultVariables(
|
||||
{ accessToken, instanceUrl, namespace, mount, path, data: variables },
|
||||
connection,
|
||||
gatewayService
|
||||
);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error
|
||||
});
|
||||
}
|
||||
},
|
||||
getSecrets: async (secretSync: THCVaultSyncWithCredentials) => {
|
||||
getSecrets: async (
|
||||
secretSync: THCVaultSyncWithCredentials,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const {
|
||||
connection,
|
||||
destinationConfig: { mount, path }
|
||||
} = secretSync;
|
||||
|
||||
const { namespace } = connection.credentials;
|
||||
const accessToken = await getHCVaultAccessToken(connection);
|
||||
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
|
||||
const instanceUrl = await getHCVaultInstanceUrl(connection);
|
||||
|
||||
const variables = await listHCVaultVariables({
|
||||
instanceUrl,
|
||||
namespace,
|
||||
accessToken,
|
||||
mount,
|
||||
path
|
||||
});
|
||||
const variables = await listHCVaultVariables(
|
||||
{
|
||||
instanceUrl,
|
||||
namespace,
|
||||
accessToken,
|
||||
mount,
|
||||
path
|
||||
},
|
||||
connection,
|
||||
gatewayService
|
||||
);
|
||||
|
||||
return Object.fromEntries(Object.entries(variables).map(([key, value]) => [key, { value }]));
|
||||
}
|
||||
|
@@ -244,7 +244,7 @@ export const SecretSyncFns = {
|
||||
case SecretSync.Windmill:
|
||||
return WindmillSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.HCVault:
|
||||
return HCVaultSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
return HCVaultSyncFns.syncSecrets(secretSync, schemaSecretMap, gatewayService);
|
||||
case SecretSync.TeamCity:
|
||||
return TeamCitySyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.OCIVault:
|
||||
@@ -283,7 +283,7 @@ export const SecretSyncFns = {
|
||||
},
|
||||
getSecrets: async (
|
||||
secretSync: TSecretSyncWithCredentials,
|
||||
{ kmsService, appConnectionDAL }: TSyncSecretDeps
|
||||
{ kmsService, appConnectionDAL, gatewayService }: TSyncSecretDeps
|
||||
): Promise<TSecretMap> => {
|
||||
let secretMap: TSecretMap;
|
||||
switch (secretSync.destination) {
|
||||
@@ -341,7 +341,7 @@ export const SecretSyncFns = {
|
||||
secretMap = await WindmillSyncFns.getSecrets(secretSync);
|
||||
break;
|
||||
case SecretSync.HCVault:
|
||||
secretMap = await HCVaultSyncFns.getSecrets(secretSync);
|
||||
secretMap = await HCVaultSyncFns.getSecrets(secretSync, gatewayService);
|
||||
break;
|
||||
case SecretSync.TeamCity:
|
||||
secretMap = await TeamCitySyncFns.getSecrets(secretSync);
|
||||
@@ -451,7 +451,7 @@ export const SecretSyncFns = {
|
||||
case SecretSync.Windmill:
|
||||
return WindmillSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.HCVault:
|
||||
return HCVaultSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
return HCVaultSyncFns.removeSecrets(secretSync, schemaSecretMap, gatewayService);
|
||||
case SecretSync.TeamCity:
|
||||
return TeamCitySyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.OCIVault:
|
||||
|
@@ -80,8 +80,8 @@ type TSecretSyncQueueFactoryDep = {
|
||||
| "deleteMany"
|
||||
| "invalidateSecretCacheByProjectId"
|
||||
>;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">;
|
||||
secretSyncDAL: Pick<TSecretSyncDALFactory, "findById" | "find" | "updateById" | "deleteById">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds" | "findByIds">;
|
||||
secretSyncDAL: Pick<TSecretSyncDALFactory, "findById" | "find" | "updateById" | "deleteById" | "update">;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
|
||||
projectDAL: TProjectDALFactory;
|
||||
@@ -104,17 +104,15 @@ type SecretSyncActionJob = Job<
|
||||
TQueueSecretSyncSyncSecretsByIdDTO | TQueueSecretSyncImportSecretsByIdDTO | TQueueSecretSyncRemoveSecretsByIdDTO
|
||||
>;
|
||||
|
||||
const JITTER_MS = 10 * 1000;
|
||||
const REQUEUE_MS = 30 * 1000;
|
||||
const REQUEUE_LIMIT = 30;
|
||||
const CONNECTION_CONCURRENCY_LIMIT = 3;
|
||||
|
||||
const getRequeueDelay = (failureCount?: number) => {
|
||||
if (!failureCount) return 0;
|
||||
|
||||
const baseDelay = 1000;
|
||||
const maxDelay = 30000;
|
||||
|
||||
const delay = Math.min(baseDelay * 2 ** failureCount, maxDelay);
|
||||
|
||||
const jitter = delay * (0.5 + Math.random() * 0.5);
|
||||
|
||||
return jitter;
|
||||
const jitter = Math.random() * JITTER_MS;
|
||||
if (!failureCount) return jitter;
|
||||
return REQUEUE_MS + jitter;
|
||||
};
|
||||
|
||||
export const secretSyncQueueFactory = ({
|
||||
@@ -193,6 +191,46 @@ export const secretSyncQueueFactory = ({
|
||||
folderCommitService
|
||||
});
|
||||
|
||||
const $isConnectionConcurrencyLimitReached = async (connectionId: string) => {
|
||||
const concurrencyCount = await keyStore.getItem(KeyStorePrefixes.AppConnectionConcurrentJobs(connectionId));
|
||||
|
||||
if (!concurrencyCount) return false;
|
||||
|
||||
const count = Number.parseInt(concurrencyCount, 10);
|
||||
|
||||
if (Number.isNaN(count)) return false;
|
||||
|
||||
return count >= CONNECTION_CONCURRENCY_LIMIT;
|
||||
};
|
||||
|
||||
const $incrementConnectionConcurrencyCount = async (connectionId: string) => {
|
||||
const concurrencyCount = await keyStore.getItem(KeyStorePrefixes.AppConnectionConcurrentJobs(connectionId));
|
||||
|
||||
const currentCount = Number.parseInt(concurrencyCount || "0", 10);
|
||||
|
||||
const incrementedCount = Number.isNaN(currentCount) ? 1 : currentCount + 1;
|
||||
|
||||
await keyStore.setItemWithExpiry(
|
||||
KeyStorePrefixes.AppConnectionConcurrentJobs(connectionId),
|
||||
(REQUEUE_MS * REQUEUE_LIMIT) / 1000, // in seconds
|
||||
incrementedCount
|
||||
);
|
||||
};
|
||||
|
||||
const $decrementConnectionConcurrencyCount = async (connectionId: string) => {
|
||||
const concurrencyCount = await keyStore.getItem(KeyStorePrefixes.AppConnectionConcurrentJobs(connectionId));
|
||||
|
||||
const currentCount = Number.parseInt(concurrencyCount || "0", 10);
|
||||
|
||||
const decrementedCount = Math.max(0, Number.isNaN(currentCount) ? 0 : currentCount - 1);
|
||||
|
||||
await keyStore.setItemWithExpiry(
|
||||
KeyStorePrefixes.AppConnectionConcurrentJobs(connectionId),
|
||||
(REQUEUE_MS * REQUEUE_LIMIT) / 1000, // in seconds
|
||||
decrementedCount
|
||||
);
|
||||
};
|
||||
|
||||
const $getInfisicalSecrets = async (
|
||||
secretSync: TSecretSyncRaw | TSecretSyncWithCredentials,
|
||||
includeImports = true
|
||||
@@ -416,15 +454,11 @@ export const secretSyncQueueFactory = ({
|
||||
return importedSecretMap;
|
||||
};
|
||||
|
||||
const $handleSyncSecretsJob = async (job: TSecretSyncSyncSecretsDTO) => {
|
||||
const $handleSyncSecretsJob = async (job: TSecretSyncSyncSecretsDTO, secretSync: TSecretSyncRaw) => {
|
||||
const {
|
||||
data: { syncId, auditLogInfo }
|
||||
} = job;
|
||||
|
||||
const secretSync = await secretSyncDAL.findById(syncId);
|
||||
|
||||
if (!secretSync) throw new Error(`Cannot find secret sync with ID ${syncId}`);
|
||||
|
||||
await enterpriseSyncCheck(
|
||||
licenseService,
|
||||
secretSync.destination as SecretSync,
|
||||
@@ -566,15 +600,11 @@ export const secretSyncQueueFactory = ({
|
||||
logger.info("SecretSync Sync Job with ID %s Completed", job.id);
|
||||
};
|
||||
|
||||
const $handleImportSecretsJob = async (job: TSecretSyncImportSecretsDTO) => {
|
||||
const $handleImportSecretsJob = async (job: TSecretSyncImportSecretsDTO, secretSync: TSecretSyncRaw) => {
|
||||
const {
|
||||
data: { syncId, auditLogInfo, importBehavior }
|
||||
} = job;
|
||||
|
||||
const secretSync = await secretSyncDAL.findById(syncId);
|
||||
|
||||
if (!secretSync) throw new Error(`Cannot find secret sync with ID ${syncId}`);
|
||||
|
||||
await secretSyncDAL.updateById(syncId, {
|
||||
importStatus: SecretSyncStatus.Running
|
||||
});
|
||||
@@ -683,15 +713,11 @@ export const secretSyncQueueFactory = ({
|
||||
logger.info("SecretSync Import Job with ID %s Completed", job.id);
|
||||
};
|
||||
|
||||
const $handleRemoveSecretsJob = async (job: TSecretSyncRemoveSecretsDTO) => {
|
||||
const $handleRemoveSecretsJob = async (job: TSecretSyncRemoveSecretsDTO, secretSync: TSecretSyncRaw) => {
|
||||
const {
|
||||
data: { syncId, auditLogInfo, deleteSyncOnComplete }
|
||||
} = job;
|
||||
|
||||
const secretSync = await secretSyncDAL.findById(syncId);
|
||||
|
||||
if (!secretSync) throw new Error(`Cannot find secret sync with ID ${syncId}`);
|
||||
|
||||
await enterpriseSyncCheck(
|
||||
licenseService,
|
||||
secretSync.destination as SecretSync,
|
||||
@@ -894,6 +920,17 @@ export const secretSyncQueueFactory = ({
|
||||
|
||||
const secretSyncs = await secretSyncDAL.find({ folderId: folder.id, isAutoSyncEnabled: true });
|
||||
|
||||
await secretSyncDAL.update(
|
||||
{
|
||||
$in: {
|
||||
id: secretSyncs.map((sync) => sync.id)
|
||||
}
|
||||
},
|
||||
{
|
||||
syncStatus: SecretSyncStatus.Pending
|
||||
}
|
||||
);
|
||||
|
||||
await Promise.all(secretSyncs.map((secretSync) => queueSecretSyncSyncSecretsById({ syncId: secretSync.id })));
|
||||
};
|
||||
|
||||
@@ -904,7 +941,7 @@ export const secretSyncQueueFactory = ({
|
||||
case QueueJobs.SecretSyncSyncSecrets: {
|
||||
const { failedToAcquireLockCount = 0, ...rest } = job.data as TQueueSecretSyncSyncSecretsByIdDTO;
|
||||
|
||||
if (failedToAcquireLockCount < 10) {
|
||||
if (failedToAcquireLockCount < REQUEUE_LIMIT) {
|
||||
await queueSecretSyncSyncSecretsById({ ...rest, failedToAcquireLockCount: failedToAcquireLockCount + 1 });
|
||||
return;
|
||||
}
|
||||
@@ -974,6 +1011,26 @@ export const secretSyncQueueFactory = ({
|
||||
| TQueueSecretSyncImportSecretsByIdDTO
|
||||
| TQueueSecretSyncRemoveSecretsByIdDTO;
|
||||
|
||||
const secretSync = await secretSyncDAL.findById(syncId);
|
||||
|
||||
if (!secretSync) throw new Error(`Cannot find secret sync with ID ${syncId}`);
|
||||
|
||||
const { connectionId } = secretSync;
|
||||
|
||||
if (job.name === QueueJobs.SecretSyncSyncSecrets) {
|
||||
const isConcurrentLimitReached = await $isConnectionConcurrencyLimitReached(connectionId);
|
||||
|
||||
if (isConcurrentLimitReached) {
|
||||
logger.info(
|
||||
`SecretSync Concurrency limit reached [syncId=${syncId}] [job=${job.name}] [connectionId=${connectionId}]`
|
||||
);
|
||||
|
||||
await $handleAcquireLockFailure(job as SecretSyncActionJob);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let lock: Awaited<ReturnType<typeof keyStore.acquireLock>>;
|
||||
|
||||
try {
|
||||
@@ -993,20 +1050,26 @@ export const secretSyncQueueFactory = ({
|
||||
|
||||
try {
|
||||
switch (job.name) {
|
||||
case QueueJobs.SecretSyncSyncSecrets:
|
||||
await $handleSyncSecretsJob(job as TSecretSyncSyncSecretsDTO);
|
||||
case QueueJobs.SecretSyncSyncSecrets: {
|
||||
await $incrementConnectionConcurrencyCount(connectionId);
|
||||
await $handleSyncSecretsJob(job as TSecretSyncSyncSecretsDTO, secretSync);
|
||||
break;
|
||||
}
|
||||
case QueueJobs.SecretSyncImportSecrets:
|
||||
await $handleImportSecretsJob(job as TSecretSyncImportSecretsDTO);
|
||||
await $handleImportSecretsJob(job as TSecretSyncImportSecretsDTO, secretSync);
|
||||
break;
|
||||
case QueueJobs.SecretSyncRemoveSecrets:
|
||||
await $handleRemoveSecretsJob(job as TSecretSyncRemoveSecretsDTO);
|
||||
await $handleRemoveSecretsJob(job as TSecretSyncRemoveSecretsDTO, secretSync);
|
||||
break;
|
||||
default:
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
throw new Error(`Unhandled Secret Sync Job ${job.name}`);
|
||||
}
|
||||
} finally {
|
||||
if (job.name === QueueJobs.SecretSyncSyncSecrets) {
|
||||
await $decrementConnectionConcurrencyCount(connectionId);
|
||||
}
|
||||
|
||||
await lock.release();
|
||||
}
|
||||
});
|
||||
|
@@ -108,7 +108,7 @@ type TSecretV2BridgeServiceFactoryDep = {
|
||||
| "findBySecretPathMultiEnv"
|
||||
| "findSecretPathByFolderIds"
|
||||
>;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds" | "findByIds">;
|
||||
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "handleSecretReminder" | "removeSecretReminder">;
|
||||
secretApprovalPolicyService: Pick<TSecretApprovalPolicyServiceFactory, "getSecretApprovalPolicy">;
|
||||
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "create" | "transaction">;
|
||||
|
@@ -86,7 +86,7 @@ type TSecretQueueFactoryDep = {
|
||||
integrationAuthService: Pick<TIntegrationAuthServiceFactory, "getIntegrationAccessToken">;
|
||||
folderDAL: TSecretFolderDALFactory;
|
||||
secretDAL: TSecretDALFactory;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds" | "findByIds">;
|
||||
webhookDAL: Pick<TWebhookDALFactory, "findAllWebhooks" | "transaction" | "update" | "bulkUpdate">;
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne" | "find">;
|
||||
projectDAL: TProjectDALFactory;
|
||||
|
@@ -0,0 +1,69 @@
|
||||
import { Heading, Section, Text } from "@react-email/components";
|
||||
import React from "react";
|
||||
|
||||
import { BaseButton } from "./BaseButton";
|
||||
import { BaseEmailWrapper, BaseEmailWrapperProps } from "./BaseEmailWrapper";
|
||||
import { BaseLink } from "./BaseLink";
|
||||
|
||||
interface OrganizationAssignmentTemplateProps extends Omit<BaseEmailWrapperProps, "preview" | "title"> {
|
||||
inviterFirstName?: string;
|
||||
inviterUsername?: string;
|
||||
organizationName: string;
|
||||
callback_url: string;
|
||||
}
|
||||
|
||||
export const OrganizationAssignmentTemplate = ({
|
||||
organizationName,
|
||||
inviterFirstName,
|
||||
inviterUsername,
|
||||
callback_url,
|
||||
siteUrl
|
||||
}: OrganizationAssignmentTemplateProps) => {
|
||||
return (
|
||||
<BaseEmailWrapper
|
||||
title="New Organization"
|
||||
preview="You've been added to a new organization on Infisical."
|
||||
siteUrl={siteUrl}
|
||||
>
|
||||
<Heading className="text-black text-[18px] leading-[28px] text-center font-normal p-0 mx-0">
|
||||
You've been added to the organization
|
||||
<br />
|
||||
<strong>{organizationName}</strong> on <strong>Infisical</strong>
|
||||
</Heading>
|
||||
<Section className="px-[24px] mb-[28px] mt-[36px] pt-[12px] pb-[8px] border text-center border-solid border-gray-200 rounded-md bg-gray-50">
|
||||
<Text className="text-black text-[14px] leading-[24px]">
|
||||
{inviterFirstName && inviterUsername ? (
|
||||
<>
|
||||
<strong>{inviterFirstName}</strong> (
|
||||
<BaseLink href={`mailto:${inviterUsername}`}>{inviterUsername}</BaseLink>) has added you as an
|
||||
organization admin to <strong>{organizationName}</strong>.
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
An instance admin has added you as an organization admin to <strong>{organizationName}</strong>.
|
||||
</>
|
||||
)}
|
||||
</Text>
|
||||
</Section>
|
||||
<Section className="text-center">
|
||||
<BaseButton href={callback_url}>View Dashboard</BaseButton>
|
||||
</Section>
|
||||
<Section className="mt-[24px] bg-gray-50 pt-[2px] pb-[16px] border border-solid border-gray-200 px-[24px] rounded-md text-gray-800">
|
||||
<Text className="mb-[0px]">
|
||||
<strong>About Infisical:</strong> Infisical is an all-in-one platform to securely manage application secrets,
|
||||
certificates, SSH keys, and configurations across your team and infrastructure.
|
||||
</Text>
|
||||
</Section>
|
||||
</BaseEmailWrapper>
|
||||
);
|
||||
};
|
||||
|
||||
export default OrganizationAssignmentTemplate;
|
||||
|
||||
OrganizationAssignmentTemplate.PreviewProps = {
|
||||
organizationName: "Example Organization",
|
||||
inviterFirstName: "Jane",
|
||||
inviterUsername: "jane@infisical.com",
|
||||
siteUrl: "https://infisical.com",
|
||||
callback_url: "https://app.infisical.com"
|
||||
} as OrganizationAssignmentTemplateProps;
|
@@ -9,6 +9,7 @@ export * from "./IntegrationSyncFailedTemplate";
|
||||
export * from "./NewDeviceLoginTemplate";
|
||||
export * from "./OrgAdminBreakglassAccessTemplate";
|
||||
export * from "./OrgAdminProjectGrantAccessTemplate";
|
||||
export * from "./OrganizationAssignmentTemplate";
|
||||
export * from "./OrganizationInvitationTemplate";
|
||||
export * from "./PasswordResetTemplate";
|
||||
export * from "./PasswordSetupTemplate";
|
||||
|
@@ -18,6 +18,7 @@ import {
|
||||
NewDeviceLoginTemplate,
|
||||
OrgAdminBreakglassAccessTemplate,
|
||||
OrgAdminProjectGrantAccessTemplate,
|
||||
OrganizationAssignmentTemplate,
|
||||
OrganizationInvitationTemplate,
|
||||
PasswordResetTemplate,
|
||||
PasswordSetupTemplate,
|
||||
@@ -61,6 +62,7 @@ export enum SmtpTemplates {
|
||||
// HistoricalSecretList = "historicalSecretLeakIncident", not used anymore?
|
||||
NewDeviceJoin = "newDevice",
|
||||
OrgInvite = "organizationInvitation",
|
||||
OrgAssignment = "organizationAssignment",
|
||||
ResetPassword = "passwordReset",
|
||||
SetupPassword = "passwordSetup",
|
||||
SecretLeakIncident = "secretLeakIncident",
|
||||
@@ -94,6 +96,7 @@ export enum SmtpHost {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const EmailTemplateMap: Record<SmtpTemplates, React.FC<any>> = {
|
||||
[SmtpTemplates.OrgInvite]: OrganizationInvitationTemplate,
|
||||
[SmtpTemplates.OrgAssignment]: OrganizationAssignmentTemplate,
|
||||
[SmtpTemplates.NewDeviceJoin]: NewDeviceLoginTemplate,
|
||||
[SmtpTemplates.SignupEmailVerification]: SignupEmailVerificationTemplate,
|
||||
[SmtpTemplates.EmailMfa]: EmailMfaTemplate,
|
||||
|
@@ -1,6 +1,13 @@
|
||||
import { CronJob } from "cron";
|
||||
|
||||
import { IdentityAuthMethod, OrgMembershipRole, TSuperAdmin, TSuperAdminUpdate } from "@app/db/schemas";
|
||||
import {
|
||||
IdentityAuthMethod,
|
||||
OrgMembershipRole,
|
||||
OrgMembershipStatus,
|
||||
TSuperAdmin,
|
||||
TSuperAdminUpdate,
|
||||
TUsers
|
||||
} from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import {
|
||||
@@ -13,7 +20,12 @@ import {
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { isDisposableEmail } from "@app/lib/validator";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TIdentityDALFactory } from "@app/services/identity/identity-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
import { TAuthLoginFactory } from "../auth/auth-login-service";
|
||||
import { ActorType, AuthMethod, AuthTokenType } from "../auth/auth-type";
|
||||
@@ -43,7 +55,9 @@ import {
|
||||
TAdminGetUsersDTO,
|
||||
TAdminIntegrationConfig,
|
||||
TAdminSignUpDTO,
|
||||
TGetOrganizationsDTO
|
||||
TCreateOrganizationDTO,
|
||||
TGetOrganizationsDTO,
|
||||
TResendOrgInviteDTO
|
||||
} from "./super-admin-types";
|
||||
|
||||
type TSuperAdminServiceFactoryDep = {
|
||||
@@ -59,11 +73,13 @@ type TSuperAdminServiceFactoryDep = {
|
||||
authService: Pick<TAuthLoginFactory, "generateUserTokens">;
|
||||
kmsService: Pick<TKmsServiceFactory, "encryptWithRootKey" | "decryptWithRootKey" | "updateEncryptionStrategy">;
|
||||
kmsRootConfigDAL: TKmsRootConfigDALFactory;
|
||||
orgService: Pick<TOrgServiceFactory, "createOrganization">;
|
||||
orgService: Pick<TOrgServiceFactory, "createOrganization" | "inviteUserToOrganization">;
|
||||
keyStore: Pick<TKeyStoreFactory, "getItem" | "setItemWithExpiry" | "deleteItem" | "deleteItems">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures" | "updateSubscriptionOrgMemberCount">;
|
||||
microsoftTeamsService: Pick<TMicrosoftTeamsServiceFactory, "initializeTeamsBot">;
|
||||
invalidateCacheQueue: TInvalidateCacheQueueFactory;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
tokenService: TAuthTokenServiceFactory;
|
||||
};
|
||||
|
||||
export type TSuperAdminServiceFactory = ReturnType<typeof superAdminServiceFactory>;
|
||||
@@ -123,7 +139,9 @@ export const superAdminServiceFactory = ({
|
||||
identityTokenAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
microsoftTeamsService,
|
||||
invalidateCacheQueue
|
||||
invalidateCacheQueue,
|
||||
smtpService,
|
||||
tokenService
|
||||
}: TSuperAdminServiceFactoryDep) => {
|
||||
const initServerCfg = async () => {
|
||||
// TODO(akhilmhdh): bad pattern time less change this later to me itself
|
||||
@@ -732,6 +750,159 @@ export const superAdminServiceFactory = ({
|
||||
return organizations;
|
||||
};
|
||||
|
||||
const createOrganization = async (
|
||||
{ name, inviteAdminEmails: emails }: TCreateOrganizationDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const inviteAdminEmails = [...new Set(emails)];
|
||||
|
||||
if (!appCfg.isDevelopmentMode && appCfg.isCloud)
|
||||
throw new BadRequestError({ message: "This endpoint is not supported for cloud instances" });
|
||||
|
||||
const serverAdmin = await userDAL.findById(actor.id);
|
||||
const plan = licenseService.onPremFeatures;
|
||||
|
||||
const isEmailInvalid = await isDisposableEmail(inviteAdminEmails);
|
||||
if (isEmailInvalid) {
|
||||
throw new BadRequestError({
|
||||
message: "Disposable emails are not allowed",
|
||||
name: "InviteUser"
|
||||
});
|
||||
}
|
||||
|
||||
const { organization, users: usersToEmail } = await orgDAL.transaction(async (tx) => {
|
||||
const org = await orgService.createOrganization(
|
||||
{
|
||||
orgName: name,
|
||||
userEmail: serverAdmin?.email ?? serverAdmin?.username // identities can be server admins so we can't require this
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const users: Pick<TUsers, "id" | "firstName" | "lastName" | "email" | "username" | "isAccepted">[] = [];
|
||||
|
||||
for await (const inviteeEmail of inviteAdminEmails) {
|
||||
const usersByUsername = await userDAL.findUserByUsername(inviteeEmail, tx);
|
||||
let inviteeUser =
|
||||
usersByUsername?.length > 1
|
||||
? usersByUsername.find((el) => el.username === inviteeEmail)
|
||||
: usersByUsername?.[0];
|
||||
|
||||
// if the user doesn't exist we create the user with the email
|
||||
if (!inviteeUser) {
|
||||
// TODO(carlos): will be removed once the function receives usernames instead of emails
|
||||
const usersByEmail = await userDAL.findUserByEmail(inviteeEmail, tx);
|
||||
if (usersByEmail?.length === 1) {
|
||||
[inviteeUser] = usersByEmail;
|
||||
} else {
|
||||
inviteeUser = await userDAL.create(
|
||||
{
|
||||
isAccepted: false,
|
||||
email: inviteeEmail,
|
||||
username: inviteeEmail,
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const inviteeUserId = inviteeUser?.id;
|
||||
const existingEncryptionKey = await userDAL.findUserEncKeyByUserId(inviteeUserId, tx);
|
||||
|
||||
// when user is missing the encrytion keys
|
||||
// this could happen either if user doesn't exist or user didn't find step 3 of generating the encryption keys of srp
|
||||
// So what we do is we generate a random secure password and then encrypt it with a random pub-private key
|
||||
// Then when user sign in (as login is not possible as isAccepted is false) we rencrypt the private key with the user password
|
||||
if (!inviteeUser || (inviteeUser && !inviteeUser?.isAccepted && !existingEncryptionKey)) {
|
||||
await userDAL.createUserEncryption(
|
||||
{
|
||||
userId: inviteeUserId,
|
||||
encryptionVersion: 2
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (plan?.slug !== "enterprise" && plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
|
||||
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
|
||||
throw new BadRequestError({
|
||||
name: "InviteUser",
|
||||
message: "Failed to invite member due to member limit reached. Upgrade plan to invite more members."
|
||||
});
|
||||
}
|
||||
|
||||
await orgDAL.createMembership(
|
||||
{
|
||||
userId: inviteeUser.id,
|
||||
inviteEmail: inviteeEmail,
|
||||
orgId: org.id,
|
||||
role: OrgMembershipRole.Admin,
|
||||
status: inviteeUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited,
|
||||
isActive: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
users.push(inviteeUser);
|
||||
}
|
||||
|
||||
return { organization: org, users };
|
||||
});
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
await Promise.allSettled(
|
||||
usersToEmail.map(async (user) => {
|
||||
if (!user.email) return;
|
||||
|
||||
if (user.isAccepted) {
|
||||
return smtpService.sendMail({
|
||||
template: SmtpTemplates.OrgAssignment,
|
||||
subjectLine: "You've been added to an Infisical organization",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
inviterFirstName: serverAdmin?.firstName,
|
||||
inviterUsername: serverAdmin?.email,
|
||||
organizationName: organization.name,
|
||||
email: user.email,
|
||||
organizationId: organization.id,
|
||||
callback_url: `${appCfg.SITE_URL}/login?org_id=${organization.id}`
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// new user, send regular invite
|
||||
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_ORG_INVITATION,
|
||||
userId: user.id,
|
||||
orgId: organization.id
|
||||
});
|
||||
|
||||
return smtpService.sendMail({
|
||||
template: SmtpTemplates.OrgInvite,
|
||||
subjectLine: "Infisical organization invitation",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
inviterFirstName: serverAdmin?.firstName,
|
||||
inviterUsername: serverAdmin?.email,
|
||||
organizationName: organization.name,
|
||||
email: user.email,
|
||||
organizationId: organization.id,
|
||||
token,
|
||||
callback_url: `${appCfg.SITE_URL}/signupinvite`
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
return organization;
|
||||
};
|
||||
|
||||
const deleteOrganization = async (organizationId: string) => {
|
||||
const organization = await orgDAL.deleteById(organizationId);
|
||||
return organization;
|
||||
@@ -763,6 +934,86 @@ export const superAdminServiceFactory = ({
|
||||
return organizationMembership;
|
||||
};
|
||||
|
||||
const joinOrganization = async (orgId: string, actor: OrgServiceActor) => {
|
||||
const serverAdmin = await userDAL.findById(actor.id);
|
||||
|
||||
if (!serverAdmin) {
|
||||
throw new NotFoundError({ message: "Could not find server admin user" });
|
||||
}
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
if (!org) {
|
||||
throw new NotFoundError({ message: `Could not organization with ID "${orgId}"` });
|
||||
}
|
||||
|
||||
const existingOrgMembership = await orgMembershipDAL.findOne({ userId: serverAdmin.id, orgId });
|
||||
|
||||
if (existingOrgMembership) {
|
||||
throw new BadRequestError({ message: `You are already a part of the organization with ID ${orgId}` });
|
||||
}
|
||||
|
||||
const orgMembership = await orgDAL.createMembership({
|
||||
userId: serverAdmin.id,
|
||||
orgId: org.id,
|
||||
role: OrgMembershipRole.Admin,
|
||||
status: OrgMembershipStatus.Accepted,
|
||||
isActive: true
|
||||
});
|
||||
|
||||
return orgMembership;
|
||||
};
|
||||
|
||||
const resendOrgInvite = async ({ organizationId, membershipId }: TResendOrgInviteDTO, actor: OrgServiceActor) => {
|
||||
const orgMembership = await orgMembershipDAL.findOne({ id: membershipId, orgId: organizationId });
|
||||
|
||||
if (!orgMembership) {
|
||||
throw new NotFoundError({ name: "Organization Membership", message: "Organization membership not found" });
|
||||
}
|
||||
|
||||
if (orgMembership.status === OrgMembershipStatus.Accepted) {
|
||||
throw new BadRequestError({
|
||||
message: "This user has already accepted their invitation."
|
||||
});
|
||||
}
|
||||
|
||||
if (!orgMembership.userId) {
|
||||
throw new NotFoundError({ message: "Cannot find user associated with Org Membership." });
|
||||
}
|
||||
|
||||
if (!orgMembership.inviteEmail) {
|
||||
throw new BadRequestError({ message: "No invite email associated with user." });
|
||||
}
|
||||
|
||||
const org = await orgDAL.findOrgById(orgMembership.orgId);
|
||||
|
||||
const appCfg = getConfig();
|
||||
const serverAdmin = await userDAL.findById(actor.id);
|
||||
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_ORG_INVITATION,
|
||||
userId: orgMembership.userId,
|
||||
orgId: orgMembership.orgId
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.OrgInvite,
|
||||
subjectLine: "Infisical organization invitation",
|
||||
recipients: [orgMembership.inviteEmail],
|
||||
substitutions: {
|
||||
inviterFirstName: serverAdmin?.firstName,
|
||||
inviterUsername: serverAdmin?.email,
|
||||
organizationName: org?.name,
|
||||
email: orgMembership.inviteEmail,
|
||||
organizationId: orgMembership.orgId,
|
||||
token,
|
||||
callback_url: `${appCfg.SITE_URL}/signupinvite`
|
||||
}
|
||||
});
|
||||
|
||||
return orgMembership;
|
||||
};
|
||||
|
||||
const getIdentities = async ({ offset, limit, searchTerm }: TAdminGetIdentitiesDTO) => {
|
||||
const identities = await identityDAL.getIdentitiesByFilter({
|
||||
limit,
|
||||
@@ -901,6 +1152,9 @@ export const superAdminServiceFactory = ({
|
||||
initializeEnvConfigSync,
|
||||
getEnvOverrides,
|
||||
getEnvOverridesOrganized,
|
||||
deleteUsers
|
||||
deleteUsers,
|
||||
createOrganization,
|
||||
joinOrganization,
|
||||
resendOrgInvite
|
||||
};
|
||||
};
|
||||
|
@@ -34,6 +34,16 @@ export type TGetOrganizationsDTO = {
|
||||
searchTerm: string;
|
||||
};
|
||||
|
||||
export type TCreateOrganizationDTO = {
|
||||
name: string;
|
||||
inviteAdminEmails: string[];
|
||||
};
|
||||
|
||||
export type TResendOrgInviteDTO = {
|
||||
organizationId: string;
|
||||
membershipId: string;
|
||||
};
|
||||
|
||||
export enum LoginMethod {
|
||||
EMAIL = "email",
|
||||
GOOGLE = "google",
|
||||
|
40
docs/.eslintrc.js
Normal file
40
docs/.eslintrc.js
Normal file
@@ -0,0 +1,40 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
browser: true,
|
||||
es2021: true,
|
||||
node: true,
|
||||
},
|
||||
extends: [
|
||||
'eslint:recommended',
|
||||
'plugin:react/recommended',
|
||||
'plugin:react/jsx-runtime',
|
||||
],
|
||||
parser: '@babel/eslint-parser',
|
||||
parserOptions: {
|
||||
ecmaVersion: 2021,
|
||||
sourceType: 'module',
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
requireConfigFile: false,
|
||||
babelOptions: {
|
||||
presets: ['@babel/preset-react'],
|
||||
},
|
||||
},
|
||||
plugins: ['react'],
|
||||
rules: {
|
||||
'react/jsx-uses-react': 'error',
|
||||
'react/jsx-uses-vars': 'error',
|
||||
},
|
||||
settings: {
|
||||
react: {
|
||||
version: 'detect',
|
||||
},
|
||||
},
|
||||
ignorePatterns: [
|
||||
'node_modules/',
|
||||
'dist/',
|
||||
'build/',
|
||||
'*.config.js',
|
||||
],
|
||||
};
|
@@ -98,6 +98,7 @@
|
||||
{
|
||||
"group": "App Connections",
|
||||
"pages": [
|
||||
"integrations/app-connections",
|
||||
"integrations/app-connections/overview",
|
||||
{
|
||||
"group": "Connections",
|
||||
@@ -184,6 +185,7 @@
|
||||
{
|
||||
"group": "User Authentication",
|
||||
"pages": [
|
||||
"integrations/user-authentication",
|
||||
"documentation/platform/auth-methods/email-password",
|
||||
{
|
||||
"group": "SSO",
|
||||
@@ -243,6 +245,7 @@
|
||||
{
|
||||
"group": "Machine Identities",
|
||||
"pages": [
|
||||
"integrations/machine-authentication",
|
||||
"documentation/platform/identities/alicloud-auth",
|
||||
"documentation/platform/identities/aws-auth",
|
||||
"documentation/platform/identities/azure-auth",
|
||||
@@ -417,6 +420,7 @@
|
||||
{
|
||||
"group": "Secret Rotation",
|
||||
"pages": [
|
||||
"integrations/secret-rotations",
|
||||
"documentation/platform/secret-rotation/overview",
|
||||
"documentation/platform/secret-rotation/auth0-client-secret",
|
||||
"documentation/platform/secret-rotation/aws-iam-user-secret",
|
||||
@@ -432,6 +436,7 @@
|
||||
{
|
||||
"group": "Dynamic Secrets",
|
||||
"pages": [
|
||||
"integrations/dynamic-secrets",
|
||||
"documentation/platform/dynamic-secrets/overview",
|
||||
"documentation/platform/dynamic-secrets/aws-elasticache",
|
||||
"documentation/platform/dynamic-secrets/aws-iam",
|
||||
@@ -502,6 +507,7 @@
|
||||
{
|
||||
"group": "Secret Syncs",
|
||||
"pages": [
|
||||
"integrations/secret-syncs",
|
||||
"integrations/secret-syncs/overview",
|
||||
{
|
||||
"group": "Syncs",
|
||||
@@ -607,6 +613,7 @@
|
||||
{
|
||||
"group": "Framework Integrations",
|
||||
"pages": [
|
||||
"integrations/framework-integrations",
|
||||
"integrations/frameworks/spring-boot-maven",
|
||||
"integrations/frameworks/react",
|
||||
"integrations/frameworks/vue",
|
||||
|
@@ -6,84 +6,133 @@ description: "Learn how to stream Infisical Audit Logs to external logging provi
|
||||
<Info>
|
||||
Audit log streams is a paid feature.
|
||||
|
||||
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
|
||||
then you should contact team@infisical.com to purchase an enterprise license to use it.
|
||||
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical, then you should contact team@infisical.com to purchase an enterprise license to use it.
|
||||
</Info>
|
||||
|
||||
Infisical Audit Log Streaming enables you to transmit your organization's Audit Logs to external logging providers for monitoring and analysis.
|
||||
|
||||
The logs are formatted in JSON, requiring your logging provider to support JSON-based log parsing.
|
||||
|
||||
Infisical Audit Log Streaming enables you to transmit your organization's audit logs to external logging providers for monitoring and analysis.
|
||||
|
||||
## Overview
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to Organization Settings in your sidebar." />
|
||||
<Step title="Select Audit Log Streams Tab.">
|
||||

|
||||
</Step>
|
||||
<Step title="Click on Create">
|
||||

|
||||
<Step title="Create Stream">
|
||||
1. Navigate to **Organization Settings**
|
||||
2. Select the **Audit Log Streams** tab
|
||||
3. Click **Add Log Stream**
|
||||
|
||||
Provide the following values
|
||||
<ParamField path="Endpoint URL" type="string" required>
|
||||
The HTTPS endpoint URL of the logging provider that collects the JSON stream.
|
||||
</ParamField>
|
||||
<ParamField path="Headers" type="string" >
|
||||
The HTTP headers for the logging provider for identification and authentication.
|
||||
</ParamField>
|
||||

|
||||
</Step>
|
||||
<Step title="Select Provider">
|
||||
If your log provider is included in this list, select it. Otherwise click on **Custom** to input your own Endpoint URL and headers.
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Input Credentials">
|
||||
Depending on your chosen provider, you'll be asked to input different credentials.
|
||||
|
||||
For **Custom**, you need to input an endpoint URL and headers.
|
||||
|
||||

|
||||
|
||||
Once you're finished, click **Create Log Stream**.
|
||||
</Step>
|
||||
<Step title="Log Stream Created">
|
||||
Your audit logs are now ready to be streamed.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||

|
||||
Your Audit Logs are now ready to be streamed.
|
||||
|
||||
## Example Providers
|
||||
|
||||
### Better Stack
|
||||
<AccordionGroup>
|
||||
<Accordion title="Better Stack">
|
||||
You can stream to Better Stack using a **Custom** log stream.
|
||||
|
||||
<Steps>
|
||||
<Step title="Select Connect Source">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide a name and select platform"/>
|
||||
<Step title="Provide Audit Log Stream inputs">
|
||||

|
||||
<Steps>
|
||||
<Step title="Connect Source">
|
||||
On Better Stack, select **Connect Source** and click **Create source** after providing a name.
|
||||
|
||||
1. Copy the **endpoint** from Better Stack to the **Endpoint URL** field.
|
||||
3. Create a new header with key **Authorization** and set the value as **Bearer \<source token from betterstack\>**.
|
||||
</Step>
|
||||
</Steps>
|
||||

|
||||
|
||||
### Datadog
|
||||
Once your source is created, take note of the **endpoint** and **Source token** for the next step.
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to API Keys section">
|
||||

|
||||
</Step>
|
||||
<Step title="Select New Key and provide a key name">
|
||||

|
||||

|
||||
</Step>
|
||||
<Step title="Find your Datadog region specific logging endpoint.">
|
||||

|
||||

|
||||
</Step>
|
||||
<Step title="Create Audit Log Stream on Infisical">
|
||||
On Infisical, create a new audit log stream and select the **Custom** option.
|
||||
|
||||
1. Navigate to the [Datadog Send Logs API documentation](https://docs.datadoghq.com/api/latest/logs/?code-lang=curl&site=us5#send-logs).
|
||||
2. Pick your Datadog account region.
|
||||
3. Obtain your Datadog logging endpoint URL.
|
||||
</Step>
|
||||
<Step title="Provide audit log stream inputs">
|
||||

|
||||

|
||||
|
||||
1. Copy the **logging endpoint** from Datadog to the **Endpoint URL** field.
|
||||
2. Copy the **API Key** from previous step
|
||||
3. Create a new header with key **DD-API-KEY** and set the value as **API Key**.
|
||||
</Step>
|
||||
</Steps>
|
||||
1. Fill in the endpoint URL with your Better Stack source endpoint
|
||||
2. Create a new header with key `Authorization` and set the value as `Bearer <betterstack-src-token>`
|
||||
|
||||
## Audit Log Stream Data
|
||||

|
||||
|
||||
Each log entry sent to the external logging provider will follow the same structure.
|
||||
Once you're finished, click **Create Log Stream**.
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
<Accordion title="Datadog">
|
||||
You can stream to Datadog using the **Datadog** provider log stream.
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to API Keys section">
|
||||

|
||||
</Step>
|
||||
<Step title="Select New Key and provide a key name">
|
||||

|
||||

|
||||
</Step>
|
||||
<Step title="Create Audit Log Stream on Infisical">
|
||||
On Infisical, create a new audit log stream and select the **Datadog** provider option.
|
||||
|
||||
Input your **Datadog Region** and the **Token** obtained from step 2.
|
||||
|
||||

|
||||
|
||||
Once you're finished, click **Create Log Stream**.
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
<Accordion title="Splunk">
|
||||
You can stream to Splunk using the **Splunk** provider log stream.
|
||||
|
||||
<Steps>
|
||||
<Step title="Obtain Splunk Token">
|
||||
Navigate to **Settings** > **Data Inputs**.
|
||||
|
||||

|
||||
|
||||
Click on **HTTP Event Collector**.
|
||||
|
||||

|
||||
|
||||
Click on **New Token** in the top left.
|
||||
|
||||

|
||||
|
||||
Provide a name and click **Next**.
|
||||
|
||||

|
||||
|
||||
On the next page, click **Review** and then **Submit** at the top. On the final page you'll see your token.
|
||||
|
||||
Copy the **Token Value** and your Splunk hostname from the URL to be used for later.
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Create Audit Log Stream on Infisical">
|
||||
On Infisical, create a new audit log stream and select the **Splunk** provider option.
|
||||
|
||||
Input your **Splunk Hostname** and the **Token** obtained from step 1.
|
||||
|
||||

|
||||
|
||||
Once you're finished, click **Create Log Stream**.
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
||||
### Example Log Entry
|
||||
|
||||
@@ -117,106 +166,109 @@ Each log entry sent to the external logging provider will follow the same struct
|
||||
```
|
||||
|
||||
### Audit Logs Structure
|
||||
|
||||
<Warning>
|
||||
Streamed audit log structure **varies based on provider**, but they all share the audit log fields shown below.
|
||||
</Warning>
|
||||
|
||||
<ParamField path="id" type="string" required>
|
||||
The unique identifier for the log entry.
|
||||
The unique identifier for the log entry.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="actor" type="platform | user | service | identity | scimClient | unknownUser" required>
|
||||
The entity responsible for performing or causing the event; this can be a user or service.
|
||||
The entity responsible for performing or causing the event; this can be a user or service.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="actorMetadata" type="object" required>
|
||||
The metadata associated with the actor. This varies based on the actor type.
|
||||
The metadata associated with the actor. This varies based on the actor type.
|
||||
|
||||
<Accordion title="User Metadata">
|
||||
This metadata is present when the `actor` field is set to `user`.
|
||||
<AccordionGroup>
|
||||
<Accordion title="User Metadata">
|
||||
This metadata is present when the `actor` field is set to `user`.
|
||||
|
||||
<ParamField path="userId" type="string" required>
|
||||
The unique identifier for the actor.
|
||||
</ParamField>
|
||||
<ParamField path="email" type="string" required>
|
||||
The email address of the actor.
|
||||
</ParamField>
|
||||
<ParamField path="username" type="string" required>
|
||||
The username of the actor.
|
||||
</ParamField>
|
||||
</Accordion>
|
||||
<ParamField path="userId" type="string" required>
|
||||
The unique identifier for the actor.
|
||||
</ParamField>
|
||||
<ParamField path="email" type="string" required>
|
||||
The email address of the actor.
|
||||
</ParamField>
|
||||
<ParamField path="username" type="string" required>
|
||||
The username of the actor.
|
||||
</ParamField>
|
||||
</Accordion>
|
||||
<Accordion title="Identity Metadata">
|
||||
This metadata is present when the `actor` field is set to `identity`.
|
||||
|
||||
<Accordion title="Identity Metadata">
|
||||
This metadata is present when the `actor` field is set to `identity`.
|
||||
<ParamField path="identityId" type="string" required>
|
||||
The unique identifier for the identity.
|
||||
</ParamField>
|
||||
<ParamField path="name" type="string" required>
|
||||
The name of the identity.
|
||||
</ParamField>
|
||||
</Accordion>
|
||||
<Accordion title="Service Token Metadata">
|
||||
This metadata is present when the `actor` field is set to `service`.
|
||||
|
||||
<ParamField path="identityId" type="string" required>
|
||||
The unique identifier for the identity.
|
||||
</ParamField>
|
||||
<ParamField path="name" type="string" required>
|
||||
The name of the identity.
|
||||
</ParamField>
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="Service Token Metadata">
|
||||
This metadata is present when the `actor` field is set to `service`.
|
||||
|
||||
<ParamField path="serviceId" type="string" required>
|
||||
The unique identifier for the service.
|
||||
</ParamField>
|
||||
<ParamField path="name" type="string" required>
|
||||
The name of the service.
|
||||
</ParamField>
|
||||
</Accordion>
|
||||
|
||||
|
||||
<Note>
|
||||
If the `actor` field is set to `platform`, `scimClient`, or `unknownUser`, the `actorMetadata` field will be an empty object.
|
||||
</Note>
|
||||
<ParamField path="serviceId" type="string" required>
|
||||
The unique identifier for the service.
|
||||
</ParamField>
|
||||
<ParamField path="name" type="string" required>
|
||||
The name of the service.
|
||||
</ParamField>
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
||||
<Note>
|
||||
If the `actor` field is set to `platform`, `scimClient`, or `unknownUser`, the `actorMetadata` field will be an empty object.
|
||||
</Note>
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="ipAddress" type="string" required>
|
||||
The IP address of the actor.
|
||||
The IP address of the actor.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="eventType" type="string" required>
|
||||
The type of event that occurred. Below you can see a list of possible event types. More event types will be added in the future as we expand our audit logs further.
|
||||
The type of event that occurred. Below you can see a list of possible event types. More event types will be added in the future as we expand our audit logs further.
|
||||
|
||||
`get-secrets`, `delete-secrets`, `get-secret`, `create-secret`, `update-secret`, `delete-secret`, `get-workspace-key`, `authorize-integration`, `update-integration-auth`, `unauthorize-integration`, `create-integration`, `delete-integration`, `add-trusted-ip`, `update-trusted-ip`, `delete-trusted-ip`, `create-service-token`, `delete-service-token`, `create-identity`, `update-identity`, `delete-identity`, `login-identity-universal-auth`, `add-identity-universal-auth`, `update-identity-universal-auth`, `get-identity-universal-auth`, `create-identity-universal-auth-client-secret`, `revoke-identity-universal-auth-client-secret`, `get-identity-universal-auth-client-secret`, `create-environment`, `update-environment`, `delete-environment`, `add-workspace-member`, `remove-workspace-member`, `create-folder`, `update-folder`, `delete-folder`, `create-webhook`, `update-webhook-status`, `delete-webhook`, `webhook-triggered`, `get-secret-imports`, `create-secret-import`, `update-secret-import`, `delete-secret-import`, `update-user-workspace-role`, `update-user-workspace-denied-permissions`, `create-certificate-authority`, `get-certificate-authority`, `update-certificate-authority`, `delete-certificate-authority`, `get-certificate-authority-csr`, `get-certificate-authority-cert`, `sign-intermediate`, `import-certificate-authority-cert`, `get-certificate-authority-crl`, `issue-cert`, `get-cert`, `delete-cert`, `revoke-cert`, `get-cert-body`, `create-pki-alert`, `get-pki-alert`, `update-pki-alert`, `delete-pki-alert`, `create-pki-collection`, `get-pki-collection`, `update-pki-collection`, `delete-pki-collection`, `get-pki-collection-items`, `add-pki-collection-item`, `delete-pki-collection-item`, `org-admin-accessed-project`, `create-certificate-template`, `update-certificate-template`, `delete-certificate-template`, `get-certificate-template`, `create-certificate-template-est-config`, `update-certificate-template-est-config`, `get-certificate-template-est-config`, `update-project-slack-config`, `get-project-slack-config`, `integration-synced`, `create-shared-secret`, `delete-shared-secret`, `read-shared-secret`.
|
||||
`get-secrets`, `delete-secrets`, `get-secret`, `create-secret`, `update-secret`, `delete-secret`, `get-workspace-key`, `authorize-integration`, `update-integration-auth`, `unauthorize-integration`, `create-integration`, `delete-integration`, `add-trusted-ip`, `update-trusted-ip`, `delete-trusted-ip`, `create-service-token`, `delete-service-token`, `create-identity`, `update-identity`, `delete-identity`, `login-identity-universal-auth`, `add-identity-universal-auth`, `update-identity-universal-auth`, `get-identity-universal-auth`, `create-identity-universal-auth-client-secret`, `revoke-identity-universal-auth-client-secret`, `get-identity-universal-auth-client-secret`, `create-environment`, `update-environment`, `delete-environment`, `add-workspace-member`, `remove-workspace-member`, `create-folder`, `update-folder`, `delete-folder`, `create-webhook`, `update-webhook-status`, `delete-webhook`, `webhook-triggered`, `get-secret-imports`, `create-secret-import`, `update-secret-import`, `delete-secret-import`, `update-user-workspace-role`, `update-user-workspace-denied-permissions`, `create-certificate-authority`, `get-certificate-authority`, `update-certificate-authority`, `delete-certificate-authority`, `get-certificate-authority-csr`, `get-certificate-authority-cert`, `sign-intermediate`, `import-certificate-authority-cert`, `get-certificate-authority-crl`, `issue-cert`, `get-cert`, `delete-cert`, `revoke-cert`, `get-cert-body`, `create-pki-alert`, `get-pki-alert`, `update-pki-alert`, `delete-pki-alert`, `create-pki-collection`, `get-pki-collection`, `update-pki-collection`, `delete-pki-collection`, `get-pki-collection-items`, `add-pki-collection-item`, `delete-pki-collection-item`, `org-admin-accessed-project`, `create-certificate-template`, `update-certificate-template`, `delete-certificate-template`, `get-certificate-template`, `create-certificate-template-est-config`, `update-certificate-template-est-config`, `get-certificate-template-est-config`, `update-project-slack-config`, `get-project-slack-config`, `integration-synced`, `create-shared-secret`, `delete-shared-secret`, `read-shared-secret`.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="eventMetadata" type="object" required>
|
||||
The metadata associated with the event. This varies based on the event type.
|
||||
The metadata associated with the event. This varies based on the event type.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="userAgent" type="string">
|
||||
The user agent of the actor, if applicable.
|
||||
The user agent of the actor, if applicable.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="userAgentType" type="web | cli | k8-operator | terraform | other | InfisicalPythonSDK | InfisicalNodeSDK">
|
||||
The type of user agent.
|
||||
The type of user agent.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="expiresAt" type="string" required>
|
||||
The expiration date of the log entry. When this date is reached, the log entry will be deleted from Infisical.
|
||||
The expiration date of the log entry. When this date is reached, the log entry will be deleted from Infisical.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="createdAt" type="string" required>
|
||||
The creation date of the log entry.
|
||||
The creation date of the log entry.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="updatedAt" type="string" required>
|
||||
The last update date of the log entry. This is unlikely to be out of sync with the `createdAt` field, as we do not update log entries after they've been created.
|
||||
The last update date of the log entry. This is unlikely to be out of sync with the `createdAt` field, as we do not update log entries after they've been created.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="orgId" type="string" required>
|
||||
The unique identifier for the organization where the event occurred.
|
||||
The unique identifier for the organization where the event occurred.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="projectId" type="string">
|
||||
The unique identifier for the project where the event occurred.
|
||||
The unique identifier for the project where the event occurred.
|
||||
|
||||
The `projectId` field will only be present if the event occurred at the project level, not the organization level.
|
||||
The `projectId` field will only be present if the event occurred at the project level, not the organization level.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="projectName" type="string">
|
||||
The name of the project where the event occurred.
|
||||
The name of the project where the event occurred.
|
||||
|
||||
The `projectName` field will only be present if the event occurred at the project level, not the organization level.
|
||||
The `projectName` field will only be present if the event occurred at the project level, not the organization level.
|
||||
</ParamField>
|
||||
|
@@ -8,6 +8,7 @@ Every time a secret change is performed, a new version of the same secret is cre
|
||||
Such versions can be accessed visually by opening up the [secret sidebar](/documentation/platform/project#drawer) (as seen below) or [retrieved via API](/api-reference/endpoints/secrets/read)
|
||||
by specifying the `version` query parameter.
|
||||
|
||||

|
||||

|
||||
|
||||
The secret versioning functionality is heavily connected to [Point-in-time Recovery](/documentation/platform/pit-recovery) of secrets in Infisical.
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 643 KiB After Width: | Height: | Size: 542 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user