mirror of
https://github.com/Infisical/infisical.git
synced 2025-04-17 19:37:38 +00:00
feat: added project level migrations for kms convernsion
This commit is contained in:
@ -24,13 +24,13 @@ export default {
|
||||
transformMode: "ssr",
|
||||
async setup() {
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const envConfig = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
const redis = new Redis(envConfig.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
@ -42,6 +42,7 @@ export default {
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
@ -52,14 +53,24 @@ export default {
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
|
||||
const server = await main({
|
||||
db,
|
||||
smtp,
|
||||
logger,
|
||||
queue,
|
||||
keyStore,
|
||||
hsmModule: hsmModule.getModule(),
|
||||
redis,
|
||||
envConfig
|
||||
});
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
@ -73,8 +84,8 @@ export default {
|
||||
organizationId: seedData1.organization.id,
|
||||
accessVersion: 1
|
||||
},
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
envConfig.AUTH_SECRET,
|
||||
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
@ -108,4 +119,4 @@ export default {
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
};
|
6
backend/src/@types/fastify.d.ts
vendored
6
backend/src/@types/fastify.d.ts
vendored
@ -93,6 +93,12 @@ import { TUserEngagementServiceFactory } from "@app/services/user-engagement/use
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
requestId: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module "fastify" {
|
||||
interface Session {
|
||||
callbackPort: string;
|
||||
|
@ -4,6 +4,7 @@ import "ts-node/register";
|
||||
import dotenv from "dotenv";
|
||||
import type { Knex } from "knex";
|
||||
import path from "path";
|
||||
import { getMigrationEnvConfig } from "./migrations/utils/env-config";
|
||||
|
||||
// Update with your config settings. .
|
||||
dotenv.config({
|
||||
@ -13,20 +14,22 @@ dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env")
|
||||
});
|
||||
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
|
||||
export default {
|
||||
development: {
|
||||
client: "postgres",
|
||||
connection: {
|
||||
connectionString: process.env.DB_CONNECTION_URI,
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
user: process.env.DB_USER,
|
||||
database: process.env.DB_NAME,
|
||||
password: process.env.DB_PASSWORD,
|
||||
ssl: process.env.DB_ROOT_CERT
|
||||
connectionString: envConfig.DB_CONNECTION_URI,
|
||||
host: envConfig.DB_HOST,
|
||||
port: envConfig.DB_PORT,
|
||||
user: envConfig.DB_USER,
|
||||
database: envConfig.DB_NAME,
|
||||
password: envConfig.DB_PASSWORD,
|
||||
ssl: envConfig.DB_ROOT_CERT
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(process.env.DB_ROOT_CERT, "base64").toString("ascii")
|
||||
ca: Buffer.from(envConfig.DB_ROOT_CERT, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
@ -44,16 +47,16 @@ export default {
|
||||
production: {
|
||||
client: "postgres",
|
||||
connection: {
|
||||
connectionString: process.env.DB_CONNECTION_URI,
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
user: process.env.DB_USER,
|
||||
database: process.env.DB_NAME,
|
||||
password: process.env.DB_PASSWORD,
|
||||
ssl: process.env.DB_ROOT_CERT
|
||||
connectionString: envConfig.DB_CONNECTION_URI,
|
||||
host: envConfig.DB_HOST,
|
||||
port: envConfig.DB_PORT,
|
||||
user: envConfig.DB_USER,
|
||||
database: envConfig.DB_NAME,
|
||||
password: envConfig.DB_PASSWORD,
|
||||
ssl: envConfig.DB_ROOT_CERT
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(process.env.DB_ROOT_CERT, "base64").toString("ascii")
|
||||
ca: Buffer.from(envConfig.DB_ROOT_CERT, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
|
110
backend/src/db/migrations/20241127091918_webhook-to-kms.ts
Normal file
110
backend/src/db/migrations/20241127091918_webhook-to-kms.ts
Normal file
@ -0,0 +1,110 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { newRingBuffer } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
||||
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||
|
||||
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (!hasEncryptedKey) t.binary("encryptedPassKey");
|
||||
if (!hasEncryptedUrl) t.binary("encryptedUrl");
|
||||
});
|
||||
}
|
||||
|
||||
await initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
newRingBuffer<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const webhooks = await knex(TableName.Webhook)
|
||||
.where({})
|
||||
.leftJoin(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
|
||||
.select("url", "encryptedSecretKey", "iv", "tag", "keyEncoding", "urlCipherText", "urlIV", "urlTag", "id", "envId")
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment));
|
||||
|
||||
const updatedWebhooks = await Promise.all(
|
||||
webhooks.map(async (el) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: el.projectId
|
||||
});
|
||||
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
|
||||
}
|
||||
|
||||
let encryptedSecretKey = null;
|
||||
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
|
||||
const decyptedSecretKey = infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.iv,
|
||||
tag: el.tag,
|
||||
ciphertext: el.encryptedSecretKey
|
||||
});
|
||||
encryptedSecretKey = projectKmsService.encryptor({ plainText: Buffer.from(decyptedSecretKey, "utf8") });
|
||||
}
|
||||
|
||||
const decryptedUrl =
|
||||
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.urlIV,
|
||||
tag: el.urlTag,
|
||||
ciphertext: el.urlCipherText
|
||||
})
|
||||
: null;
|
||||
|
||||
const encryptedUrl = projectKmsService.encryptor({ plainText: Buffer.from(decryptedUrl || el.url) });
|
||||
return { id: el.id, encryptedUrl, encryptedSecretKey, envId: el.envId };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedWebhooks.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.Webhook)
|
||||
.insert(
|
||||
updatedWebhooks.slice(i, i + BATCH_SIZE).map((el) => ({
|
||||
id: el.id,
|
||||
envId: el.envId,
|
||||
url: "",
|
||||
encryptedUrl: el.encryptedUrl,
|
||||
encryptedPassKey: el.encryptedSecretKey
|
||||
}))
|
||||
)
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (!hasEncryptedUrl) t.binary("encryptedUrl").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
||||
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||
|
||||
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (hasEncryptedKey) t.dropColumn("encryptedPassKey");
|
||||
if (hasEncryptedUrl) t.dropColumn("encryptedUrl");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,89 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { newRingBuffer } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
||||
|
||||
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (!hasEncryptedRotationData) t.binary("encryptedRotationData");
|
||||
});
|
||||
}
|
||||
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
newRingBuffer<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const secretRotations = await knex(TableName.SecretRotation)
|
||||
.leftJoin(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretRotation}.envId`)
|
||||
.select(selectAllTableCols(TableName.SecretRotation))
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment));
|
||||
|
||||
const updatedRotationData = await Promise.all(
|
||||
secretRotations.map(async (el) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: el.projectId
|
||||
});
|
||||
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
|
||||
}
|
||||
|
||||
const decryptedRotationData =
|
||||
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.encryptedDataIV,
|
||||
tag: el.encryptedDataTag,
|
||||
ciphertext: el.encryptedData
|
||||
})
|
||||
: null;
|
||||
|
||||
const encryptedRotationData = decryptedRotationData
|
||||
? projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedRotationData)
|
||||
})
|
||||
: null;
|
||||
return { ...el, encryptedRotationData };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedRotationData.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.SecretRotation)
|
||||
.insert(updatedRotationData.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (!hasEncryptedRotationData) t.binary("encryptedRotationData").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
||||
|
||||
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (hasEncryptedRotationData) t.dropColumn("encryptedRotationData");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,90 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { newRingBuffer } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
||||
|
||||
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (!hasEncryptedInputColumn) t.binary("encryptedInput");
|
||||
});
|
||||
}
|
||||
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
newRingBuffer<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const dynamicSecretRootCredentials = await knex(TableName.DynamicSecret)
|
||||
.leftJoin(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
|
||||
.leftJoin(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.select(selectAllTableCols(TableName.DynamicSecret))
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment));
|
||||
|
||||
const updatedDynamicSecrets = await Promise.all(
|
||||
dynamicSecretRootCredentials.map(async (el) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: el.projectId
|
||||
});
|
||||
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
|
||||
}
|
||||
|
||||
const decryptedInputData =
|
||||
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.inputIV,
|
||||
tag: el.inputTag,
|
||||
ciphertext: el.inputCiphertext
|
||||
})
|
||||
: null;
|
||||
|
||||
const encryptedInput = decryptedInputData
|
||||
? projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedInputData)
|
||||
})
|
||||
: null;
|
||||
return { ...el, encryptedInput };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.DynamicSecret)
|
||||
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (!hasEncryptedInputColumn) t.binary("encryptedInput").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
||||
|
||||
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (hasEncryptedInputColumn) t.dropColumn("encryptedInput");
|
||||
});
|
||||
}
|
||||
}
|
49
backend/src/db/migrations/utils/env-config.ts
Normal file
49
backend/src/db/migrations/utils/env-config.ts
Normal file
@ -0,0 +1,49 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { zpStr } from "@app/lib/zod";
|
||||
|
||||
const envSchema = z
|
||||
.object({
|
||||
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
|
||||
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
|
||||
),
|
||||
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
|
||||
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
|
||||
DB_PORT: zpStr(z.string().describe("Postgres database port").optional()).default("5432"),
|
||||
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
|
||||
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
|
||||
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
|
||||
// TODO(akhilmhdh): will be changed to one
|
||||
ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
// HSM
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
||||
HSM_SLOT: z.coerce.number().optional().default(0)
|
||||
})
|
||||
// To ensure that basic encryption is always possible.
|
||||
.refine(
|
||||
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
|
||||
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
|
||||
)
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
isHsmConfigured:
|
||||
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined
|
||||
}));
|
||||
|
||||
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
|
||||
|
||||
export const getMigrationEnvConfig = () => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error("Invalid environment variables. Check the error below");
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(parsedEnv.error.issues);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
return Object.freeze(parsedEnv.data);
|
||||
};
|
19
backend/src/db/migrations/utils/ring-buffer.ts
Normal file
19
backend/src/db/migrations/utils/ring-buffer.ts
Normal file
@ -0,0 +1,19 @@
|
||||
export const newRingBuffer = <T>(bufferSize = 10) => {
|
||||
const bufferItems: { id: string; item: T }[] = [];
|
||||
let bufferIndex = 0;
|
||||
|
||||
const push = (id: string, item: T) => {
|
||||
if (bufferItems.length < bufferSize) {
|
||||
bufferItems.push({ id, item });
|
||||
} else {
|
||||
bufferItems[bufferIndex] = { id, item };
|
||||
}
|
||||
bufferIndex = (bufferIndex + 1) % bufferSize;
|
||||
};
|
||||
|
||||
const getItem = (id: string) => {
|
||||
return bufferItems.find((i) => i.id === id)?.item;
|
||||
};
|
||||
|
||||
return { push, getItem };
|
||||
};
|
50
backend/src/db/migrations/utils/services.ts
Normal file
50
backend/src/db/migrations/utils/services.ts
Normal file
@ -0,0 +1,50 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
|
||||
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
|
||||
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
|
||||
import { kmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { orgDALFactory } from "@app/services/org/org-dal";
|
||||
import { projectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { TMigrationEnvConfig } from "./env-config";
|
||||
|
||||
type TDependencies = {
|
||||
envConfig: TMigrationEnvConfig;
|
||||
db: Knex;
|
||||
keyStore: TKeyStoreFactory;
|
||||
};
|
||||
|
||||
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule: hsmModule.getModule(),
|
||||
envConfig
|
||||
});
|
||||
|
||||
const orgDAL = orgDALFactory(db);
|
||||
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
|
||||
const kmsDAL = kmskeyDALFactory(db);
|
||||
const internalKmsDAL = internalKmsDALFactory(db);
|
||||
const projectDAL = projectDALFactory(db);
|
||||
|
||||
const kmsService = kmsServiceFactory({
|
||||
kmsRootConfigDAL,
|
||||
keyStore,
|
||||
kmsDAL,
|
||||
internalKmsDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
hsmService,
|
||||
envConfig
|
||||
});
|
||||
|
||||
await hsmService.startService();
|
||||
|
||||
return { kmsService };
|
||||
};
|
@ -1,25 +1,23 @@
|
||||
import * as pkcs11js from "pkcs11js";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { TEnvConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { HsmModule } from "./hsm-types";
|
||||
|
||||
export const initializeHsmModule = () => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
export const initializeHsmModule = (envConfig: Pick<TEnvConfig, "isHsmConfigured" | "HSM_LIB_PATH">) => {
|
||||
// Create a new instance of PKCS11 module
|
||||
const pkcs11 = new pkcs11js.PKCS11();
|
||||
let isInitialized = false;
|
||||
|
||||
const initialize = () => {
|
||||
if (!appCfg.isHsmConfigured) {
|
||||
if (!envConfig.isHsmConfigured) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Load the PKCS#11 module
|
||||
pkcs11.load(appCfg.HSM_LIB_PATH!);
|
||||
pkcs11.load(envConfig.HSM_LIB_PATH!);
|
||||
|
||||
// Initialize the module
|
||||
pkcs11.C_Initialize();
|
||||
|
@ -1,12 +1,13 @@
|
||||
import pkcs11js from "pkcs11js";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { TEnvConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { HsmKeyType, HsmModule } from "./hsm-types";
|
||||
|
||||
type THsmServiceFactoryDep = {
|
||||
hsmModule: HsmModule;
|
||||
envConfig: Pick<TEnvConfig, "HSM_PIN" | "HSM_SLOT" | "HSM_LIB_PATH" | "HSM_KEY_LABEL" | "isHsmConfigured">;
|
||||
};
|
||||
|
||||
export type THsmServiceFactory = ReturnType<typeof hsmServiceFactory>;
|
||||
@ -15,9 +16,7 @@ type SyncOrAsync<T> = T | Promise<T>;
|
||||
type SessionCallback<T> = (session: pkcs11js.Handle) => SyncOrAsync<T>;
|
||||
|
||||
// eslint-disable-next-line no-empty-pattern
|
||||
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsmServiceFactoryDep) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 }, envConfig }: THsmServiceFactoryDep) => {
|
||||
// Constants for buffer structures
|
||||
const IV_LENGTH = 16; // Luna HSM typically expects 16-byte IV for cbc
|
||||
const BLOCK_SIZE = 16;
|
||||
@ -63,11 +62,11 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
throw new Error("No slots available");
|
||||
}
|
||||
|
||||
if (appCfg.HSM_SLOT >= slots.length) {
|
||||
throw new Error(`HSM slot ${appCfg.HSM_SLOT} not found or not initialized`);
|
||||
if (envConfig.HSM_SLOT >= slots.length) {
|
||||
throw new Error(`HSM slot ${envConfig.HSM_SLOT} not found or not initialized`);
|
||||
}
|
||||
|
||||
const slotId = slots[appCfg.HSM_SLOT];
|
||||
const slotId = slots[envConfig.HSM_SLOT];
|
||||
|
||||
const startTime = Date.now();
|
||||
while (Date.now() - startTime < MAX_TIMEOUT) {
|
||||
@ -78,7 +77,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
|
||||
// Login
|
||||
try {
|
||||
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, appCfg.HSM_PIN);
|
||||
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, envConfig.HSM_PIN);
|
||||
logger.info("HSM: Successfully authenticated");
|
||||
break;
|
||||
} catch (error) {
|
||||
@ -86,7 +85,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
if (error instanceof pkcs11js.Pkcs11Error) {
|
||||
if (error.code === pkcs11js.CKR_PIN_INCORRECT) {
|
||||
// We throw instantly here to prevent further attempts, because if too many attempts are made, the HSM will potentially wipe all key material
|
||||
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${appCfg.HSM_SLOT}`);
|
||||
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${envConfig.HSM_SLOT}`);
|
||||
throw new Error("HSM: Incorrect HSM Pin detected. Please check the HSM configuration.");
|
||||
}
|
||||
if (error.code === pkcs11js.CKR_USER_ALREADY_LOGGED_IN) {
|
||||
@ -133,7 +132,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
};
|
||||
|
||||
const $findKey = (sessionHandle: pkcs11js.Handle, type: HsmKeyType) => {
|
||||
const label = type === HsmKeyType.HMAC ? `${appCfg.HSM_KEY_LABEL}_HMAC` : appCfg.HSM_KEY_LABEL;
|
||||
const label = type === HsmKeyType.HMAC ? `${envConfig.HSM_KEY_LABEL}_HMAC` : envConfig.HSM_KEY_LABEL;
|
||||
const keyType = type === HsmKeyType.HMAC ? pkcs11js.CKK_GENERIC_SECRET : pkcs11js.CKK_AES;
|
||||
|
||||
const template = [
|
||||
@ -360,7 +359,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
};
|
||||
|
||||
const isActive = async () => {
|
||||
if (!isInitialized || !appCfg.isHsmConfigured) {
|
||||
if (!isInitialized || !envConfig.isHsmConfigured) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -372,11 +371,11 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
logger.error(err, "HSM: Error testing PKCS#11 module");
|
||||
}
|
||||
|
||||
return appCfg.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
||||
return envConfig.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
||||
};
|
||||
|
||||
const startService = async () => {
|
||||
if (!appCfg.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
||||
if (!envConfig.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
||||
|
||||
try {
|
||||
await $withSession(async (sessionHandle) => {
|
||||
@ -395,7 +394,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_AES },
|
||||
{ type: pkcs11js.CKA_VALUE_LEN, value: AES_KEY_SIZE / 8 },
|
||||
{ type: pkcs11js.CKA_LABEL, value: appCfg.HSM_KEY_LABEL! },
|
||||
{ type: pkcs11js.CKA_LABEL, value: envConfig.HSM_KEY_LABEL! },
|
||||
{ type: pkcs11js.CKA_ENCRYPT, value: true }, // Allow encryption
|
||||
{ type: pkcs11js.CKA_DECRYPT, value: true }, // Allow decryption
|
||||
...genericAttributes
|
||||
@ -410,7 +409,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
keyTemplate
|
||||
);
|
||||
|
||||
logger.info(`HSM: Master key created successfully with label: ${appCfg.HSM_KEY_LABEL}`);
|
||||
logger.info(`HSM: Master key created successfully with label: ${envConfig.HSM_KEY_LABEL}`);
|
||||
}
|
||||
|
||||
// Check if HMAC key exists, create if not
|
||||
@ -419,7 +418,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_GENERIC_SECRET },
|
||||
{ type: pkcs11js.CKA_VALUE_LEN, value: HMAC_KEY_SIZE / 8 }, // 256-bit key
|
||||
{ type: pkcs11js.CKA_LABEL, value: `${appCfg.HSM_KEY_LABEL!}_HMAC` },
|
||||
{ type: pkcs11js.CKA_LABEL, value: `${envConfig.HSM_KEY_LABEL!}_HMAC` },
|
||||
{ type: pkcs11js.CKA_SIGN, value: true }, // Allow signing
|
||||
{ type: pkcs11js.CKA_VERIFY, value: true }, // Allow verification
|
||||
...genericAttributes
|
||||
@ -434,7 +433,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
hmacKeyTemplate
|
||||
);
|
||||
|
||||
logger.info(`HSM: HMAC key created successfully with label: ${appCfg.HSM_KEY_LABEL}_HMAC`);
|
||||
logger.info(`HSM: HMAC key created successfully with label: ${envConfig.HSM_KEY_LABEL}_HMAC`);
|
||||
}
|
||||
|
||||
// Get slot info to check supported mechanisms
|
||||
|
38
backend/src/keystore/memory.ts
Normal file
38
backend/src/keystore/memory.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import { Lock } from "@app/lib/red-lock";
|
||||
|
||||
import { TKeyStoreFactory } from "./keystore";
|
||||
|
||||
export const inMemoryKeyStore = (): TKeyStoreFactory => {
|
||||
const store: Record<string, string | number | Buffer> = {};
|
||||
|
||||
return {
|
||||
setItem: async (key, value) => {
|
||||
store[key] = value;
|
||||
return "OK";
|
||||
},
|
||||
setItemWithExpiry: async (key, value) => {
|
||||
store[key] = value;
|
||||
return "OK";
|
||||
},
|
||||
deleteItem: async (key) => {
|
||||
delete store[key];
|
||||
return 1;
|
||||
},
|
||||
getItem: async (key) => {
|
||||
const value = store[key];
|
||||
if (typeof value === "string") {
|
||||
return value;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
incrementBy: async () => {
|
||||
return 1;
|
||||
},
|
||||
acquireLock: () => {
|
||||
return Promise.resolve({
|
||||
release: () => {}
|
||||
}) as Promise<Lock>;
|
||||
},
|
||||
waitTillReady: async () => {}
|
||||
};
|
||||
};
|
@ -258,7 +258,8 @@ const envSchema = z
|
||||
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",")
|
||||
}));
|
||||
|
||||
let envCfg: Readonly<z.infer<typeof envSchema>>;
|
||||
export type TEnvConfig = Readonly<z.infer<typeof envSchema>>;
|
||||
let envCfg: TEnvConfig;
|
||||
|
||||
export const getConfig = () => envCfg;
|
||||
// cannot import singleton logger directly as it needs config to load various transport
|
||||
|
@ -20,21 +20,21 @@ dotenv.config();
|
||||
|
||||
const run = async () => {
|
||||
const logger = await initLogger();
|
||||
const appCfg = initEnvConfig(logger);
|
||||
const envConfig = initEnvConfig(logger);
|
||||
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.DB_ROOT_CERT,
|
||||
readReplicas: appCfg.DB_READ_REPLICAS?.map((el) => ({
|
||||
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.DB_ROOT_CERT,
|
||||
readReplicas: envConfig.DB_READ_REPLICAS?.map((el) => ({
|
||||
dbRootCert: el.DB_ROOT_CERT,
|
||||
dbConnectionUri: el.DB_CONNECTION_URI
|
||||
}))
|
||||
});
|
||||
|
||||
const auditLogDb = appCfg.AUDIT_LOGS_DB_CONNECTION_URI
|
||||
const auditLogDb = envConfig.AUDIT_LOGS_DB_CONNECTION_URI
|
||||
? initAuditLogDbConnection({
|
||||
dbConnectionUri: appCfg.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.AUDIT_LOGS_DB_ROOT_CERT
|
||||
dbConnectionUri: envConfig.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.AUDIT_LOGS_DB_ROOT_CERT
|
||||
})
|
||||
: undefined;
|
||||
|
||||
@ -57,20 +57,30 @@ const run = async () => {
|
||||
|
||||
const smtp = smtpServiceFactory(formatSmtpConfig());
|
||||
|
||||
const queue = queueServiceFactory(appCfg.REDIS_URL, {
|
||||
dbConnectionUrl: appCfg.DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.DB_ROOT_CERT
|
||||
const queue = queueServiceFactory(envConfig.REDIS_URL, {
|
||||
dbConnectionUrl: envConfig.DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
await queue.initialize();
|
||||
|
||||
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
|
||||
const redis = new Redis(appCfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
||||
const redis = new Redis(envConfig.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore, redis });
|
||||
const server = await main({
|
||||
db,
|
||||
auditLogDb,
|
||||
hsmModule: hsmModule.getModule(),
|
||||
smtp,
|
||||
logger,
|
||||
queue,
|
||||
keyStore,
|
||||
redis,
|
||||
envConfig
|
||||
});
|
||||
const bootstrap = await bootstrapCheck({ db });
|
||||
|
||||
// eslint-disable-next-line
|
||||
@ -90,8 +100,8 @@ const run = async () => {
|
||||
});
|
||||
|
||||
await server.listen({
|
||||
port: appCfg.PORT,
|
||||
host: appCfg.HOST,
|
||||
port: envConfig.PORT,
|
||||
host: envConfig.HOST,
|
||||
listenTextResolver: (address) => {
|
||||
void bootstrap();
|
||||
return address;
|
||||
|
@ -17,7 +17,7 @@ import { Knex } from "knex";
|
||||
|
||||
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
|
||||
import { getConfig, IS_PACKAGED, TEnvConfig } from "@app/lib/config/env";
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
@ -43,10 +43,11 @@ type TMain = {
|
||||
keyStore: TKeyStoreFactory;
|
||||
hsmModule: HsmModule;
|
||||
redis: Redis;
|
||||
envConfig: TEnvConfig;
|
||||
};
|
||||
|
||||
// Run the server!
|
||||
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore, redis }: TMain) => {
|
||||
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore, redis, envConfig }: TMain) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const server = fastify({
|
||||
@ -127,7 +128,7 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
})
|
||||
});
|
||||
|
||||
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule });
|
||||
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule, envConfig });
|
||||
|
||||
await server.register(registerServeUI, {
|
||||
standaloneMode: appCfg.STANDALONE_MODE || IS_PACKAGED,
|
||||
@ -142,4 +143,4 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
await queue.shutdown();
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
};
|
@ -85,7 +85,7 @@ import { sshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certi
|
||||
import { trustedIpDALFactory } from "@app/ee/services/trusted-ip/trusted-ip-dal";
|
||||
import { trustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { getConfig, TEnvConfig } from "@app/lib/config/env";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue/access-token-queue";
|
||||
@ -244,7 +244,8 @@ export const registerRoutes = async (
|
||||
hsmModule,
|
||||
smtp: smtpService,
|
||||
queue: queueService,
|
||||
keyStore
|
||||
keyStore,
|
||||
envConfig
|
||||
}: {
|
||||
auditLogDb?: Knex;
|
||||
db: Knex;
|
||||
@ -252,6 +253,7 @@ export const registerRoutes = async (
|
||||
smtp: TSmtpService;
|
||||
queue: TQueueServiceFactory;
|
||||
keyStore: TKeyStoreFactory;
|
||||
envConfig: TEnvConfig;
|
||||
}
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
@ -391,7 +393,8 @@ export const registerRoutes = async (
|
||||
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule
|
||||
hsmModule,
|
||||
envConfig
|
||||
});
|
||||
|
||||
const kmsService = kmsServiceFactory({
|
||||
@ -401,7 +404,8 @@ export const registerRoutes = async (
|
||||
internalKmsDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
hsmService
|
||||
hsmService,
|
||||
envConfig
|
||||
});
|
||||
|
||||
const externalKmsService = externalKmsServiceFactory({
|
||||
|
@ -13,7 +13,7 @@ import {
|
||||
} from "@app/ee/services/external-kms/providers/model";
|
||||
import { THsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { TEnvConfig } from "@app/lib/config/env";
|
||||
import { randomSecureBytes } from "@app/lib/crypto";
|
||||
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { generateHash } from "@app/lib/crypto/encryption";
|
||||
@ -48,6 +48,7 @@ type TKmsServiceFactoryDep = {
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "waitTillReady" | "setItemWithExpiry">;
|
||||
internalKmsDAL: Pick<TInternalKmsDALFactory, "create">;
|
||||
hsmService: THsmServiceFactory;
|
||||
envConfig: Pick<TEnvConfig, "ENCRYPTION_KEY" | "ROOT_ENCRYPTION_KEY">;
|
||||
};
|
||||
|
||||
export type TKmsServiceFactory = ReturnType<typeof kmsServiceFactory>;
|
||||
@ -61,6 +62,7 @@ const KMS_VERSION_BLOB_LENGTH = 3;
|
||||
const KmsSanitizedSchema = KmsKeysSchema.extend({ isExternal: z.boolean() });
|
||||
|
||||
export const kmsServiceFactory = ({
|
||||
envConfig,
|
||||
kmsDAL,
|
||||
kmsRootConfigDAL,
|
||||
keyStore,
|
||||
@ -635,10 +637,8 @@ export const kmsServiceFactory = ({
|
||||
};
|
||||
|
||||
const $getBasicEncryptionKey = () => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const encryptionKey = appCfg.ENCRYPTION_KEY || appCfg.ROOT_ENCRYPTION_KEY;
|
||||
const isBase64 = !appCfg.ENCRYPTION_KEY;
|
||||
const encryptionKey = envConfig.ENCRYPTION_KEY || envConfig.ROOT_ENCRYPTION_KEY;
|
||||
const isBase64 = !envConfig.ENCRYPTION_KEY;
|
||||
if (!encryptionKey)
|
||||
throw new Error(
|
||||
"Root encryption key not found for KMS service. Did you set the ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY environment variables?"
|
||||
|
@ -4,8 +4,17 @@ import { ProjectType, TProjectKeys } from "@app/db/schemas";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
|
||||
import { CaStatus } from "../certificate-authority/certificate-authority-types";
|
||||
import { KmsType } from "../kms/kms-types";
|
||||
|
||||
enum KmsType {
|
||||
External = "external",
|
||||
Internal = "internal"
|
||||
}
|
||||
|
||||
enum CaStatus {
|
||||
ACTIVE = "active",
|
||||
DISABLED = "disabled",
|
||||
PENDING_CERTIFICATE = "pending-certificate"
|
||||
}
|
||||
|
||||
export enum ProjectFilterType {
|
||||
ID = "id",
|
||||
|
Reference in New Issue
Block a user