Compare commits
108 Commits
oidc-group
...
misc/add-p
Author | SHA1 | Date | |
---|---|---|---|
|
d185dbb7ff | ||
|
4292cb2a04 | ||
|
051f53c66e | ||
|
99daa43fc6 | ||
|
27badad3d7 | ||
|
b5e3af6e7d | ||
|
280fbdfbb9 | ||
|
18fc10aaec | ||
|
b20e04bdeb | ||
|
10d14edc20 | ||
|
4abdd4216b | ||
|
332ed68c13 | ||
|
52feabd786 | ||
|
d7a99db66a | ||
|
fc0bdc25af | ||
|
5ffe45eaf5 | ||
|
8f795100ea | ||
|
8d8a3efd77 | ||
|
677180548b | ||
|
293bea474e | ||
|
bc4fc9a1ca | ||
|
483850441d | ||
|
4355fd09cc | ||
|
1f85d9c486 | ||
|
75d33820b3 | ||
|
074446df1f | ||
|
7ffa0ef8f5 | ||
|
5250e7c3d5 | ||
|
2deaa4eff3 | ||
|
0b6bc4c1f0 | ||
|
abbe7bbd0c | ||
|
565340dc50 | ||
|
36c428f152 | ||
|
f97826ea82 | ||
|
0f5cbf055c | ||
|
1345ff02e3 | ||
|
b960ee61d7 | ||
|
0b98a214a7 | ||
|
599c2226e4 | ||
|
8e24a4d3f8 | ||
|
27486e7600 | ||
|
979e9efbcb | ||
|
e06b5ecd1b | ||
|
1097ec64b2 | ||
|
93fe9929b7 | ||
|
aca654a993 | ||
|
b5cf237a4a | ||
|
6efb630200 | ||
|
151ede6cbf | ||
|
931ee1e8da | ||
|
0401793d38 | ||
|
0613c12508 | ||
|
60d3ffac5d | ||
|
5e192539a1 | ||
|
021a8ddace | ||
|
f92aba14cd | ||
|
fdeefcdfcf | ||
|
645f70f770 | ||
|
923feb81f3 | ||
|
16c51af340 | ||
|
9fd37ca456 | ||
|
92bebf7d84 | ||
|
df053bbae9 | ||
|
42319f01a7 | ||
|
0ea9f9b60d | ||
|
33ce783fda | ||
|
63c48dc095 | ||
|
16eefe5bac | ||
|
b984111a73 | ||
|
677ff62b5c | ||
|
8cc2e08f24 | ||
|
d90178f49a | ||
|
ad50cff184 | ||
|
8e43d2a994 | ||
|
7074fdbac3 | ||
|
ef70de1e0b | ||
|
7e9ee7b5e3 | ||
|
517c613d05 | ||
|
ae8cf06ec6 | ||
|
818778ddc5 | ||
|
2e12d9a13c | ||
|
e678c9d1cf | ||
|
da0b07ce2a | ||
|
3306a9ca69 | ||
|
e9af34a6ba | ||
|
3de8ed169f | ||
|
d1eb350bdd | ||
|
d268f52a1c | ||
|
c519cee5d1 | ||
|
c7dc595e1a | ||
|
be26dc9872 | ||
|
aaeb6e73fe | ||
|
52f8c6adba | ||
|
3d2b2cbbab | ||
|
1a82809bd5 | ||
|
c4f994750d | ||
|
fa7020949c | ||
|
eca2b3ccde | ||
|
67fc16ecd3 | ||
|
f85add7cca | ||
|
cd028ae133 | ||
|
63c71fabcd | ||
|
e90166f1f0 | ||
|
8adf4787b9 | ||
|
a12522db55 | ||
|
49ab487dc2 | ||
|
daf0731580 | ||
|
fb2b64cb19 |
@@ -0,0 +1,47 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEmail = await knex.schema.hasColumn(TableName.Users, "email");
|
||||
const hasUsername = await knex.schema.hasColumn(TableName.Users, "username");
|
||||
if (hasEmail) {
|
||||
await knex(TableName.Users)
|
||||
.where({ isGhost: false })
|
||||
.update({
|
||||
// @ts-expect-error email assume string this is expected
|
||||
email: knex.raw("lower(email)")
|
||||
});
|
||||
}
|
||||
if (hasUsername) {
|
||||
await knex.schema.raw(`
|
||||
CREATE INDEX IF NOT EXISTS ${TableName.Users}_lower_username_idx
|
||||
ON ${TableName.Users} (LOWER(username))
|
||||
`);
|
||||
|
||||
const duplicatesSubquery = knex(TableName.Users)
|
||||
.select(knex.raw("lower(username) as lowercase_username"))
|
||||
.groupBy("lowercase_username")
|
||||
.having(knex.raw("count(*)"), ">", 1);
|
||||
|
||||
// Update usernames to lowercase where they won't create duplicates
|
||||
await knex(TableName.Users)
|
||||
.where({ isGhost: false })
|
||||
.whereRaw("username <> lower(username)") // Only update if not already lowercase
|
||||
// @ts-expect-error username assume string this is expected
|
||||
.whereNotIn(knex.raw("lower(username)"), duplicatesSubquery)
|
||||
.update({
|
||||
// @ts-expect-error username assume string this is expected
|
||||
username: knex.raw("lower(username)")
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasUsername = await knex.schema.hasColumn(TableName.Users, "username");
|
||||
if (hasUsername) {
|
||||
await knex.schema.raw(`
|
||||
DROP INDEX IF EXISTS ${TableName.Users}_lower_username_idx
|
||||
`);
|
||||
}
|
||||
}
|
@@ -0,0 +1,25 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasGatewayIdColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "gatewayId");
|
||||
|
||||
if (!hasGatewayIdColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.uuid("gatewayId").nullable();
|
||||
table.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("SET NULL");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasGatewayIdColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "gatewayId");
|
||||
|
||||
if (hasGatewayIdColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.dropForeign("gatewayId");
|
||||
table.dropColumn("gatewayId");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,110 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
// Note(daniel): We aren't dropping tables or columns in this migrations so we can easily rollback if needed.
|
||||
// In the future we need to drop the projectGatewayId on the dynamic secrets table, and drop the project_gateways table entirely.
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
knex.replicaNode = () => {
|
||||
return knex;
|
||||
};
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId"))) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (table) => {
|
||||
table.uuid("gatewayId").nullable();
|
||||
table.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("SET NULL");
|
||||
|
||||
table.index("gatewayId");
|
||||
});
|
||||
|
||||
const existingDynamicSecretsWithProjectGatewayId = await knex(TableName.DynamicSecret)
|
||||
.select(selectAllTableCols(TableName.DynamicSecret))
|
||||
.whereNotNull(`${TableName.DynamicSecret}.projectGatewayId`)
|
||||
.join(TableName.ProjectGateway, `${TableName.ProjectGateway}.id`, `${TableName.DynamicSecret}.projectGatewayId`)
|
||||
.whereNotNull(`${TableName.ProjectGateway}.gatewayId`)
|
||||
.select(
|
||||
knex.ref("projectId").withSchema(TableName.ProjectGateway).as("projectId"),
|
||||
knex.ref("gatewayId").withSchema(TableName.ProjectGateway).as("projectGatewayGatewayId")
|
||||
);
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
|
||||
const updatedDynamicSecrets = await Promise.all(
|
||||
existingDynamicSecretsWithProjectGatewayId.map(async (existingDynamicSecret) => {
|
||||
if (!existingDynamicSecret.projectGatewayGatewayId) {
|
||||
const result = {
|
||||
...existingDynamicSecret,
|
||||
gatewayId: null
|
||||
};
|
||||
|
||||
const { projectId, projectGatewayGatewayId, ...rest } = result;
|
||||
return rest;
|
||||
}
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: existingDynamicSecret.projectId
|
||||
});
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: existingDynamicSecret.projectId
|
||||
});
|
||||
|
||||
let decryptedStoredInput = JSON.parse(
|
||||
secretManagerDecryptor({ cipherTextBlob: Buffer.from(existingDynamicSecret.encryptedInput) }).toString()
|
||||
) as object;
|
||||
|
||||
// We're not removing the existing projectGatewayId from the input so we can easily rollback without having to re-encrypt the input
|
||||
decryptedStoredInput = {
|
||||
...decryptedStoredInput,
|
||||
gatewayId: existingDynamicSecret.projectGatewayGatewayId
|
||||
};
|
||||
|
||||
const encryptedInput = secretManagerEncryptor({
|
||||
plainText: Buffer.from(JSON.stringify(decryptedStoredInput))
|
||||
}).cipherTextBlob;
|
||||
|
||||
const result = {
|
||||
...existingDynamicSecret,
|
||||
encryptedInput,
|
||||
gatewayId: existingDynamicSecret.projectGatewayGatewayId
|
||||
};
|
||||
|
||||
const { projectId, projectGatewayGatewayId, ...rest } = result;
|
||||
return rest;
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.DynamicSecret)
|
||||
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// no re-encryption needed as we keep the old projectGatewayId in the input
|
||||
if (await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId")) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (table) => {
|
||||
table.dropForeign("gatewayId");
|
||||
table.dropColumn("gatewayId");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,53 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const columns = await knex.table(TableName.Organization).columnInfo();
|
||||
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (!columns.secretsProductEnabled) {
|
||||
t.boolean("secretsProductEnabled").defaultTo(true);
|
||||
}
|
||||
if (!columns.pkiProductEnabled) {
|
||||
t.boolean("pkiProductEnabled").defaultTo(true);
|
||||
}
|
||||
if (!columns.kmsProductEnabled) {
|
||||
t.boolean("kmsProductEnabled").defaultTo(true);
|
||||
}
|
||||
if (!columns.sshProductEnabled) {
|
||||
t.boolean("sshProductEnabled").defaultTo(true);
|
||||
}
|
||||
if (!columns.scannerProductEnabled) {
|
||||
t.boolean("scannerProductEnabled").defaultTo(true);
|
||||
}
|
||||
if (!columns.shareSecretsProductEnabled) {
|
||||
t.boolean("shareSecretsProductEnabled").defaultTo(true);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const columns = await knex.table(TableName.Organization).columnInfo();
|
||||
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (columns.secretsProductEnabled) {
|
||||
t.dropColumn("secretsProductEnabled");
|
||||
}
|
||||
if (columns.pkiProductEnabled) {
|
||||
t.dropColumn("pkiProductEnabled");
|
||||
}
|
||||
if (columns.kmsProductEnabled) {
|
||||
t.dropColumn("kmsProductEnabled");
|
||||
}
|
||||
if (columns.sshProductEnabled) {
|
||||
t.dropColumn("sshProductEnabled");
|
||||
}
|
||||
if (columns.scannerProductEnabled) {
|
||||
t.dropColumn("scannerProductEnabled");
|
||||
}
|
||||
if (columns.shareSecretsProductEnabled) {
|
||||
t.dropColumn("shareSecretsProductEnabled");
|
||||
}
|
||||
});
|
||||
}
|
@@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasSecretSharingColumn = await knex.schema.hasColumn(TableName.Project, "secretSharing");
|
||||
if (!hasSecretSharingColumn) {
|
||||
await knex.schema.table(TableName.Project, (table) => {
|
||||
table.boolean("secretSharing").notNullable().defaultTo(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasSecretSharingColumn = await knex.schema.hasColumn(TableName.Project, "secretSharing");
|
||||
if (hasSecretSharingColumn) {
|
||||
await knex.schema.table(TableName.Project, (table) => {
|
||||
table.dropColumn("secretSharing");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,35 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasLifetimeColumn = await knex.schema.hasColumn(TableName.Organization, "maxSharedSecretLifetime");
|
||||
const hasViewLimitColumn = await knex.schema.hasColumn(TableName.Organization, "maxSharedSecretViewLimit");
|
||||
|
||||
if (!hasLifetimeColumn || !hasViewLimitColumn) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (!hasLifetimeColumn) {
|
||||
t.integer("maxSharedSecretLifetime").nullable().defaultTo(2592000); // 30 days in seconds
|
||||
}
|
||||
if (!hasViewLimitColumn) {
|
||||
t.integer("maxSharedSecretViewLimit").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasLifetimeColumn = await knex.schema.hasColumn(TableName.Organization, "maxSharedSecretLifetime");
|
||||
const hasViewLimitColumn = await knex.schema.hasColumn(TableName.Organization, "maxSharedSecretViewLimit");
|
||||
|
||||
if (hasLifetimeColumn || hasViewLimitColumn) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (hasLifetimeColumn) {
|
||||
t.dropColumn("maxSharedSecretLifetime");
|
||||
}
|
||||
if (hasViewLimitColumn) {
|
||||
t.dropColumn("maxSharedSecretViewLimit");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,43 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const hasEncryptedSalt = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSalt");
|
||||
const hasAuthorizedEmails = await knex.schema.hasColumn(TableName.SecretSharing, "authorizedEmails");
|
||||
|
||||
if (!hasEncryptedSalt || !hasAuthorizedEmails) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
// These two columns are only needed when secrets are shared with a specific list of emails
|
||||
|
||||
if (!hasEncryptedSalt) {
|
||||
t.binary("encryptedSalt").nullable();
|
||||
}
|
||||
|
||||
if (!hasAuthorizedEmails) {
|
||||
t.json("authorizedEmails").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const hasEncryptedSalt = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSalt");
|
||||
const hasAuthorizedEmails = await knex.schema.hasColumn(TableName.SecretSharing, "authorizedEmails");
|
||||
|
||||
if (hasEncryptedSalt || hasAuthorizedEmails) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (hasEncryptedSalt) {
|
||||
t.dropColumn("encryptedSalt");
|
||||
}
|
||||
|
||||
if (hasAuthorizedEmails) {
|
||||
t.dropColumn("authorizedEmails");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@@ -27,7 +27,8 @@ export const DynamicSecretsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedInput: zodBuffer,
|
||||
projectGatewayId: z.string().uuid().nullable().optional()
|
||||
projectGatewayId: z.string().uuid().nullable().optional(),
|
||||
gatewayId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
||||
|
@@ -29,7 +29,8 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
allowedNames: z.string(),
|
||||
allowedAudience: z.string(),
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional(),
|
||||
gatewayId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
||||
|
@@ -28,7 +28,15 @@ export const OrganizationsSchema = z.object({
|
||||
privilegeUpgradeInitiatedByUsername: z.string().nullable().optional(),
|
||||
privilegeUpgradeInitiatedAt: z.date().nullable().optional(),
|
||||
bypassOrgAuthEnabled: z.boolean().default(false),
|
||||
userTokenExpiration: z.string().nullable().optional()
|
||||
userTokenExpiration: z.string().nullable().optional(),
|
||||
secretsProductEnabled: z.boolean().default(true).nullable().optional(),
|
||||
pkiProductEnabled: z.boolean().default(true).nullable().optional(),
|
||||
kmsProductEnabled: z.boolean().default(true).nullable().optional(),
|
||||
sshProductEnabled: z.boolean().default(true).nullable().optional(),
|
||||
scannerProductEnabled: z.boolean().default(true).nullable().optional(),
|
||||
shareSecretsProductEnabled: z.boolean().default(true).nullable().optional(),
|
||||
maxSharedSecretLifetime: z.number().default(2592000).nullable().optional(),
|
||||
maxSharedSecretViewLimit: z.number().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||
|
@@ -27,7 +27,8 @@ export const ProjectsSchema = z.object({
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
enforceCapitalization: z.boolean().default(false),
|
||||
hasDeleteProtection: z.boolean().default(false).nullable().optional()
|
||||
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
|
||||
secretSharing: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
@@ -27,7 +27,9 @@ export const SecretSharingSchema = z.object({
|
||||
password: z.string().nullable().optional(),
|
||||
encryptedSecret: zodBuffer.nullable().optional(),
|
||||
identifier: z.string().nullable().optional(),
|
||||
type: z.string().default("share")
|
||||
type: z.string().default("share"),
|
||||
encryptedSalt: zodBuffer.nullable().optional(),
|
||||
authorizedEmails: z.unknown().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||
|
@@ -121,14 +121,7 @@ export const registerGatewayRouter = async (server: FastifyZodProvider) => {
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
}),
|
||||
projects: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
id: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
@@ -158,17 +151,15 @@ export const registerGatewayRouter = async (server: FastifyZodProvider) => {
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
}),
|
||||
projectGatewayId: z.string()
|
||||
})
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateways = await server.services.gateway.getProjectGateways({
|
||||
projectId: req.params.projectId,
|
||||
projectPermission: req.permission
|
||||
const gateways = await server.services.gateway.listGateways({
|
||||
orgPermission: req.permission
|
||||
});
|
||||
return { gateways };
|
||||
}
|
||||
@@ -216,8 +207,7 @@ export const registerGatewayRouter = async (server: FastifyZodProvider) => {
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
name: slugSchema({ field: "name" }).optional(),
|
||||
projectIds: z.string().array().optional()
|
||||
name: slugSchema({ field: "name" }).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -230,8 +220,7 @@ export const registerGatewayRouter = async (server: FastifyZodProvider) => {
|
||||
const gateway = await server.services.gateway.updateGatewayById({
|
||||
orgPermission: req.permission,
|
||||
id: req.params.id,
|
||||
name: req.body.name,
|
||||
projectIds: req.body.projectIds
|
||||
name: req.body.name
|
||||
});
|
||||
return { gateway };
|
||||
}
|
||||
|
@@ -145,7 +145,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.saml.samlLogin({
|
||||
externalId: profile.nameID,
|
||||
email,
|
||||
email: email.toLowerCase(),
|
||||
firstName,
|
||||
lastName: lastName as string,
|
||||
relayState: (req.body as { RelayState?: string }).RelayState,
|
||||
|
@@ -17,7 +17,8 @@ import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-fold
|
||||
|
||||
import { TDynamicSecretLeaseDALFactory } from "../dynamic-secret-lease/dynamic-secret-lease-dal";
|
||||
import { TDynamicSecretLeaseQueueServiceFactory } from "../dynamic-secret-lease/dynamic-secret-lease-queue";
|
||||
import { TProjectGatewayDALFactory } from "../gateway/project-gateway-dal";
|
||||
import { TGatewayDALFactory } from "../gateway/gateway-dal";
|
||||
import { OrgPermissionGatewayActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TDynamicSecretDALFactory } from "./dynamic-secret-dal";
|
||||
import {
|
||||
DynamicSecretStatus,
|
||||
@@ -44,9 +45,9 @@ type TDynamicSecretServiceFactoryDep = {
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findBySecretPathMultiEnv">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
projectGatewayDAL: Pick<TProjectGatewayDALFactory, "findOne">;
|
||||
gatewayDAL: Pick<TGatewayDALFactory, "findOne" | "find">;
|
||||
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
|
||||
};
|
||||
|
||||
@@ -62,7 +63,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
dynamicSecretQueueService,
|
||||
projectDAL,
|
||||
kmsService,
|
||||
projectGatewayDAL,
|
||||
gatewayDAL,
|
||||
resourceMetadataDAL
|
||||
}: TDynamicSecretServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
@@ -117,15 +118,31 @@ export const dynamicSecretServiceFactory = ({
|
||||
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
|
||||
|
||||
let selectedGatewayId: string | null = null;
|
||||
if (inputs && typeof inputs === "object" && "projectGatewayId" in inputs && inputs.projectGatewayId) {
|
||||
const projectGatewayId = inputs.projectGatewayId as string;
|
||||
if (inputs && typeof inputs === "object" && "gatewayId" in inputs && inputs.gatewayId) {
|
||||
const gatewayId = inputs.gatewayId as string;
|
||||
|
||||
const projectGateway = await projectGatewayDAL.findOne({ id: projectGatewayId, projectId });
|
||||
if (!projectGateway)
|
||||
const [gateway] = await gatewayDAL.find({ id: gatewayId, orgId: actorOrgId });
|
||||
|
||||
if (!gateway) {
|
||||
throw new NotFoundError({
|
||||
message: `Project gateway with ${projectGatewayId} not found`
|
||||
message: `Gateway with ID ${gatewayId} not found`
|
||||
});
|
||||
selectedGatewayId = projectGateway.id;
|
||||
}
|
||||
|
||||
const { permission: orgPermission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
gateway.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(orgPermission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.AttachGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
|
||||
selectedGatewayId = gateway.id;
|
||||
}
|
||||
|
||||
const isConnected = await selectedProvider.validateConnection(provider.inputs);
|
||||
@@ -146,7 +163,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
defaultTTL,
|
||||
folderId: folder.id,
|
||||
name,
|
||||
projectGatewayId: selectedGatewayId
|
||||
gatewayId: selectedGatewayId
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -255,20 +272,30 @@ export const dynamicSecretServiceFactory = ({
|
||||
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
|
||||
|
||||
let selectedGatewayId: string | null = null;
|
||||
if (
|
||||
updatedInput &&
|
||||
typeof updatedInput === "object" &&
|
||||
"projectGatewayId" in updatedInput &&
|
||||
updatedInput?.projectGatewayId
|
||||
) {
|
||||
const projectGatewayId = updatedInput.projectGatewayId as string;
|
||||
if (updatedInput && typeof updatedInput === "object" && "gatewayId" in updatedInput && updatedInput?.gatewayId) {
|
||||
const gatewayId = updatedInput.gatewayId as string;
|
||||
|
||||
const projectGateway = await projectGatewayDAL.findOne({ id: projectGatewayId, projectId });
|
||||
if (!projectGateway)
|
||||
const [gateway] = await gatewayDAL.find({ id: gatewayId, orgId: actorOrgId });
|
||||
if (!gateway) {
|
||||
throw new NotFoundError({
|
||||
message: `Project gateway with ${projectGatewayId} not found`
|
||||
message: `Gateway with ID ${gatewayId} not found`
|
||||
});
|
||||
selectedGatewayId = projectGateway.id;
|
||||
}
|
||||
|
||||
const { permission: orgPermission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
gateway.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(orgPermission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.AttachGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
|
||||
selectedGatewayId = gateway.id;
|
||||
}
|
||||
|
||||
const isConnected = await selectedProvider.validateConnection(newInput);
|
||||
@@ -284,7 +311,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
defaultTTL,
|
||||
name: newName ?? name,
|
||||
status: null,
|
||||
projectGatewayId: selectedGatewayId
|
||||
gatewayId: selectedGatewayId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@@ -18,7 +18,7 @@ import { SqlDatabaseProvider } from "./sql-database";
|
||||
import { TotpProvider } from "./totp";
|
||||
|
||||
type TBuildDynamicSecretProviderDTO = {
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTls">;
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
};
|
||||
|
||||
export const buildDynamicSecretProviders = ({
|
||||
|
@@ -137,7 +137,7 @@ export const DynamicSecretSqlDBSchema = z.object({
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
ca: z.string().optional(),
|
||||
projectGatewayId: z.string().nullable().optional()
|
||||
gatewayId: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretCassandraSchema = z.object({
|
||||
|
@@ -112,14 +112,14 @@ const generateUsername = (provider: SqlProviders) => {
|
||||
};
|
||||
|
||||
type TSqlDatabaseProviderDTO = {
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTls">;
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
};
|
||||
|
||||
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
|
||||
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.gatewayId));
|
||||
validateHandlebarTemplate("SQL creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration", "database"].includes(val)
|
||||
});
|
||||
@@ -168,7 +168,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>,
|
||||
gatewayCallback: (host: string, port: number) => Promise<void>
|
||||
) => {
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTls(providerInputs.projectGatewayId as string);
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(providerInputs.gatewayId as string);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
await withGatewayProxy(
|
||||
async (port) => {
|
||||
@@ -202,7 +202,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
await db.destroy();
|
||||
};
|
||||
|
||||
if (providerInputs.projectGatewayId) {
|
||||
if (providerInputs.gatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
@@ -238,7 +238,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
if (providerInputs.projectGatewayId) {
|
||||
if (providerInputs.gatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
@@ -265,7 +265,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
if (providerInputs.projectGatewayId) {
|
||||
if (providerInputs.gatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
@@ -301,7 +301,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
if (providerInputs.projectGatewayId) {
|
||||
if (providerInputs.gatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
|
@@ -1,37 +1,34 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { GatewaysSchema, TableName, TGateways } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import {
|
||||
buildFindFilter,
|
||||
ormify,
|
||||
selectAllTableCols,
|
||||
sqlNestRelationships,
|
||||
TFindFilter,
|
||||
TFindOpt
|
||||
} from "@app/lib/knex";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt } from "@app/lib/knex";
|
||||
|
||||
export type TGatewayDALFactory = ReturnType<typeof gatewayDALFactory>;
|
||||
|
||||
export const gatewayDALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.Gateway);
|
||||
|
||||
const find = async (filter: TFindFilter<TGateways>, { offset, limit, sort, tx }: TFindOpt<TGateways> = {}) => {
|
||||
const find = async (
|
||||
filter: TFindFilter<TGateways> & { orgId?: string },
|
||||
{ offset, limit, sort, tx }: TFindOpt<TGateways> = {}
|
||||
) => {
|
||||
try {
|
||||
const query = (tx || db)(TableName.Gateway)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter(filter))
|
||||
.where(buildFindFilter(filter, TableName.Gateway, ["orgId"]))
|
||||
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.Gateway}.identityId`)
|
||||
.leftJoin(TableName.ProjectGateway, `${TableName.ProjectGateway}.gatewayId`, `${TableName.Gateway}.id`)
|
||||
.leftJoin(TableName.Project, `${TableName.Project}.id`, `${TableName.ProjectGateway}.projectId`)
|
||||
.join(
|
||||
TableName.IdentityOrgMembership,
|
||||
`${TableName.IdentityOrgMembership}.identityId`,
|
||||
`${TableName.Gateway}.identityId`
|
||||
)
|
||||
.select(selectAllTableCols(TableName.Gateway))
|
||||
.select(
|
||||
db.ref("name").withSchema(TableName.Identity).as("identityName"),
|
||||
db.ref("name").withSchema(TableName.Project).as("projectName"),
|
||||
db.ref("slug").withSchema(TableName.Project).as("projectSlug"),
|
||||
db.ref("id").withSchema(TableName.Project).as("projectId")
|
||||
);
|
||||
.select(db.ref("orgId").withSchema(TableName.IdentityOrgMembership).as("identityOrgId"))
|
||||
.select(db.ref("name").withSchema(TableName.Identity).as("identityName"));
|
||||
|
||||
if (filter.orgId) {
|
||||
void query.where(`${TableName.IdentityOrgMembership}.orgId`, filter.orgId);
|
||||
}
|
||||
if (limit) void query.limit(limit);
|
||||
if (offset) void query.offset(offset);
|
||||
if (sort) {
|
||||
@@ -39,48 +36,16 @@ export const gatewayDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
|
||||
const docs = await query;
|
||||
return sqlNestRelationships({
|
||||
data: docs,
|
||||
key: "id",
|
||||
parentMapper: (data) => ({
|
||||
...GatewaysSchema.parse(data),
|
||||
identity: { id: data.identityId, name: data.identityName }
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "projectId",
|
||||
label: "projects" as const,
|
||||
mapper: ({ projectId, projectName, projectSlug }) => ({
|
||||
id: projectId,
|
||||
name: projectName,
|
||||
slug: projectSlug
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return docs.map((el) => ({
|
||||
...GatewaysSchema.parse(el),
|
||||
orgId: el.identityOrgId as string, // todo(daniel): figure out why typescript is not inferring this as a string
|
||||
identity: { id: el.identityId, name: el.identityName }
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: `${TableName.Gateway}: Find` });
|
||||
}
|
||||
};
|
||||
|
||||
const findByProjectId = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const query = (tx || db)(TableName.Gateway)
|
||||
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.Gateway}.identityId`)
|
||||
.join(TableName.ProjectGateway, `${TableName.ProjectGateway}.gatewayId`, `${TableName.Gateway}.id`)
|
||||
.select(selectAllTableCols(TableName.Gateway))
|
||||
.select(
|
||||
db.ref("name").withSchema(TableName.Identity).as("identityName"),
|
||||
db.ref("id").withSchema(TableName.ProjectGateway).as("projectGatewayId")
|
||||
)
|
||||
.where({ [`${TableName.ProjectGateway}.projectId` as "projectId"]: projectId });
|
||||
|
||||
const docs = await query;
|
||||
return docs.map((el) => ({ ...el, identity: { id: el.identityId, name: el.identityName } }));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: `${TableName.Gateway}: Find by project id` });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...orm, find, findByProjectId };
|
||||
return { ...orm, find };
|
||||
};
|
||||
|
@@ -4,7 +4,6 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import * as x509 from "@peculiar/x509";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { KeyStorePrefixes, PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
@@ -27,17 +26,14 @@ import { TGatewayDALFactory } from "./gateway-dal";
|
||||
import {
|
||||
TExchangeAllocatedRelayAddressDTO,
|
||||
TGetGatewayByIdDTO,
|
||||
TGetProjectGatewayByIdDTO,
|
||||
THeartBeatDTO,
|
||||
TListGatewaysDTO,
|
||||
TUpdateGatewayByIdDTO
|
||||
} from "./gateway-types";
|
||||
import { TOrgGatewayConfigDALFactory } from "./org-gateway-config-dal";
|
||||
import { TProjectGatewayDALFactory } from "./project-gateway-dal";
|
||||
|
||||
type TGatewayServiceFactoryDep = {
|
||||
gatewayDAL: TGatewayDALFactory;
|
||||
projectGatewayDAL: TProjectGatewayDALFactory;
|
||||
orgGatewayConfigDAL: Pick<TOrgGatewayConfigDALFactory, "findOne" | "create" | "transaction" | "findById">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures" | "getPlan">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey" | "decryptWithRootKey">;
|
||||
@@ -57,8 +53,7 @@ export const gatewayServiceFactory = ({
|
||||
kmsService,
|
||||
permissionService,
|
||||
orgGatewayConfigDAL,
|
||||
keyStore,
|
||||
projectGatewayDAL
|
||||
keyStore
|
||||
}: TGatewayServiceFactoryDep) => {
|
||||
const $validateOrgAccessToGateway = async (orgId: string, actorId: string, actorAuthMethod: ActorAuthMethod) => {
|
||||
// if (!licenseService.onPremFeatures.gateway) {
|
||||
@@ -526,7 +521,7 @@ export const gatewayServiceFactory = ({
|
||||
return gateway;
|
||||
};
|
||||
|
||||
const updateGatewayById = async ({ orgPermission, id, name, projectIds }: TUpdateGatewayByIdDTO) => {
|
||||
const updateGatewayById = async ({ orgPermission, id, name }: TUpdateGatewayByIdDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
@@ -543,15 +538,6 @@ export const gatewayServiceFactory = ({
|
||||
|
||||
const [gateway] = await gatewayDAL.update({ id, orgGatewayRootCaId: orgGatewayConfig.id }, { name });
|
||||
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
if (projectIds) {
|
||||
await projectGatewayDAL.transaction(async (tx) => {
|
||||
await projectGatewayDAL.delete({ gatewayId: gateway.id }, tx);
|
||||
await projectGatewayDAL.insertMany(
|
||||
projectIds.map((el) => ({ gatewayId: gateway.id, projectId: el })),
|
||||
tx
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
return gateway;
|
||||
};
|
||||
@@ -576,27 +562,7 @@ export const gatewayServiceFactory = ({
|
||||
return gateway;
|
||||
};
|
||||
|
||||
const getProjectGateways = async ({ projectId, projectPermission }: TGetProjectGatewayByIdDTO) => {
|
||||
await permissionService.getProjectPermission({
|
||||
projectId,
|
||||
actor: projectPermission.type,
|
||||
actorId: projectPermission.id,
|
||||
actorOrgId: projectPermission.orgId,
|
||||
actorAuthMethod: projectPermission.authMethod,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
|
||||
const gateways = await gatewayDAL.findByProjectId(projectId);
|
||||
return gateways;
|
||||
};
|
||||
|
||||
// this has no permission check and used for dynamic secrets directly
|
||||
// assumes permission check is already done
|
||||
const fnGetGatewayClientTls = async (projectGatewayId: string) => {
|
||||
const projectGateway = await projectGatewayDAL.findById(projectGatewayId);
|
||||
if (!projectGateway) throw new NotFoundError({ message: `Project gateway with ID ${projectGatewayId} not found.` });
|
||||
|
||||
const { gatewayId } = projectGateway;
|
||||
const fnGetGatewayClientTlsByGatewayId = async (gatewayId: string) => {
|
||||
const gateway = await gatewayDAL.findById(gatewayId);
|
||||
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${gatewayId} not found.` });
|
||||
|
||||
@@ -645,8 +611,7 @@ export const gatewayServiceFactory = ({
|
||||
getGatewayById,
|
||||
updateGatewayById,
|
||||
deleteGatewayById,
|
||||
getProjectGateways,
|
||||
fnGetGatewayClientTls,
|
||||
fnGetGatewayClientTlsByGatewayId,
|
||||
heartbeat
|
||||
};
|
||||
};
|
||||
|
@@ -20,7 +20,6 @@ export type TGetGatewayByIdDTO = {
|
||||
export type TUpdateGatewayByIdDTO = {
|
||||
id: string;
|
||||
name?: string;
|
||||
projectIds?: string[];
|
||||
orgPermission: OrgServiceActor;
|
||||
};
|
||||
|
||||
|
@@ -1,10 +0,0 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TProjectGatewayDALFactory = ReturnType<typeof projectGatewayDALFactory>;
|
||||
|
||||
export const projectGatewayDALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.ProjectGateway);
|
||||
return orm;
|
||||
};
|
@@ -111,9 +111,9 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
|
||||
if (search) {
|
||||
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike ?`, [`%${search}%`]);
|
||||
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", lower("username")) ilike ?`, [`%${search}%`]);
|
||||
} else if (username) {
|
||||
void query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
|
||||
void query.andWhereRaw(`lower("${TableName.Users}"."username") ilike ?`, `%${username}%`);
|
||||
}
|
||||
|
||||
switch (filter) {
|
||||
|
@@ -30,7 +30,7 @@ import {
|
||||
import { TUserGroupMembershipDALFactory } from "./user-group-membership-dal";
|
||||
|
||||
type TGroupServiceFactoryDep = {
|
||||
userDAL: Pick<TUserDALFactory, "find" | "findUserEncKeyByUserIdsBatch" | "transaction" | "findOne">;
|
||||
userDAL: Pick<TUserDALFactory, "find" | "findUserEncKeyByUserIdsBatch" | "transaction" | "findUserByUsername">;
|
||||
groupDAL: Pick<
|
||||
TGroupDALFactory,
|
||||
"create" | "findOne" | "update" | "delete" | "findAllGroupPossibleMembers" | "findById" | "transaction"
|
||||
@@ -380,7 +380,10 @@ export const groupServiceFactory = ({
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const user = await userDAL.findOne({ username });
|
||||
const usersWithUsername = await userDAL.findUserByUsername(username);
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const user =
|
||||
usersWithUsername?.length > 1 ? usersWithUsername.find((el) => el.username === username) : usersWithUsername?.[0];
|
||||
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });
|
||||
|
||||
const users = await addUsersToGroupByUserIds({
|
||||
@@ -461,7 +464,10 @@ export const groupServiceFactory = ({
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const user = await userDAL.findOne({ username });
|
||||
const usersWithUsername = await userDAL.findUserByUsername(username);
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const user =
|
||||
usersWithUsername?.length > 1 ? usersWithUsername.find((el) => el.username === username) : usersWithUsername?.[0];
|
||||
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });
|
||||
|
||||
const users = await removeUsersFromGroupByUserIds({
|
||||
|
@@ -24,9 +24,13 @@ export const initializeHsmModule = (envConfig: Pick<TEnvConfig, "isHsmConfigured
|
||||
isInitialized = true;
|
||||
|
||||
logger.info("PKCS#11 module initialized");
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to initialize PKCS#11 module");
|
||||
throw err;
|
||||
} catch (error) {
|
||||
if (error instanceof pkcs11js.Pkcs11Error && error.code === pkcs11js.CKR_CRYPTOKI_ALREADY_INITIALIZED) {
|
||||
logger.info("Skipping HSM initialization because it's already initialized.");
|
||||
} else {
|
||||
logger.error(error, "Failed to initialize PKCS#11 module");
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
@@ -380,7 +380,7 @@ export const ldapConfigServiceFactory = ({
|
||||
if (serverCfg.trustLdapEmails) {
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
email: email.toLowerCase(),
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
@@ -391,8 +391,8 @@ export const ldapConfigServiceFactory = ({
|
||||
const uniqueUsername = await normalizeUsername(username, userDAL);
|
||||
newUser = await userDAL.create(
|
||||
{
|
||||
username: serverCfg.trustLdapEmails ? email : uniqueUsername,
|
||||
email,
|
||||
username: serverCfg.trustLdapEmails ? email.toLowerCase() : uniqueUsername,
|
||||
email: email.toLowerCase(),
|
||||
isEmailVerified: serverCfg.trustLdapEmails,
|
||||
firstName,
|
||||
lastName,
|
||||
@@ -429,7 +429,7 @@ export const ldapConfigServiceFactory = ({
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
inviteEmail: email,
|
||||
inviteEmail: email.toLowerCase(),
|
||||
orgId,
|
||||
role,
|
||||
roleId,
|
||||
|
@@ -2,6 +2,7 @@ import axios, { AxiosError } from "axios";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TFeatureSet } from "./license-types";
|
||||
|
||||
@@ -98,9 +99,10 @@ export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
(response) => response,
|
||||
async (err) => {
|
||||
const originalRequest = (err as AxiosError).config;
|
||||
|
||||
const errStatusCode = Number((err as AxiosError)?.response?.status);
|
||||
logger.error((err as AxiosError)?.response?.data, "License server call error");
|
||||
// eslint-disable-next-line
|
||||
if ((err as AxiosError)?.response?.status === 401 && !(originalRequest as any)._retry) {
|
||||
if ((errStatusCode === 401 || errStatusCode === 403) && !(originalRequest as any)._retry) {
|
||||
// eslint-disable-next-line
|
||||
(originalRequest as any)._retry = true; // injected
|
||||
|
||||
|
@@ -348,8 +348,8 @@ export const licenseServiceFactory = ({
|
||||
} = await licenseServerCloudApi.request.post(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods`,
|
||||
{
|
||||
success_url: `${appCfg.SITE_URL}/dashboard`,
|
||||
cancel_url: `${appCfg.SITE_URL}/dashboard`
|
||||
success_url: `${appCfg.SITE_URL}/organization/billing`,
|
||||
cancel_url: `${appCfg.SITE_URL}/organization/billing`
|
||||
}
|
||||
);
|
||||
|
||||
@@ -362,7 +362,7 @@ export const licenseServiceFactory = ({
|
||||
} = await licenseServerCloudApi.request.post(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/billing-details/billing-portal`,
|
||||
{
|
||||
return_url: `${appCfg.SITE_URL}/dashboard`
|
||||
return_url: `${appCfg.SITE_URL}/organization/billing`
|
||||
}
|
||||
);
|
||||
|
||||
@@ -379,7 +379,7 @@ export const licenseServiceFactory = ({
|
||||
message: `Organization with ID '${orgId}' not found`
|
||||
});
|
||||
}
|
||||
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
|
||||
if (instanceType === InstanceType.Cloud) {
|
||||
const { data } = await licenseServerCloudApi.request.get(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
|
||||
);
|
||||
@@ -407,11 +407,38 @@ export const licenseServiceFactory = ({
|
||||
message: `Organization with ID '${orgId}' not found`
|
||||
});
|
||||
}
|
||||
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
|
||||
const { data } = await licenseServerCloudApi.request.get(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
|
||||
);
|
||||
return data;
|
||||
|
||||
const orgMembersUsed = await orgDAL.countAllOrgMembers(orgId);
|
||||
const identityUsed = await identityOrgMembershipDAL.countAllOrgIdentities({ orgId });
|
||||
const projects = await projectDAL.find({ orgId });
|
||||
const projectCount = projects.length;
|
||||
|
||||
if (instanceType === InstanceType.Cloud) {
|
||||
const { data } = await licenseServerCloudApi.request.get<{
|
||||
head: { name: string }[];
|
||||
rows: { name: string; allowed: boolean }[];
|
||||
}>(`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`);
|
||||
|
||||
const formattedData = {
|
||||
head: data.head,
|
||||
rows: data.rows.map((el) => {
|
||||
let used = "-";
|
||||
|
||||
if (el.name === BillingPlanRows.MemberLimit.name) {
|
||||
used = orgMembersUsed.toString();
|
||||
} else if (el.name === BillingPlanRows.WorkspaceLimit.name) {
|
||||
used = projectCount.toString();
|
||||
} else if (el.name === BillingPlanRows.IdentityLimit.name) {
|
||||
used = (identityUsed + orgMembersUsed).toString();
|
||||
}
|
||||
|
||||
return {
|
||||
...el,
|
||||
used
|
||||
};
|
||||
})
|
||||
};
|
||||
return formattedData;
|
||||
}
|
||||
|
||||
const mappedRows = await Promise.all(
|
||||
@@ -420,14 +447,11 @@ export const licenseServiceFactory = ({
|
||||
let used = "-";
|
||||
|
||||
if (field === BillingPlanRows.MemberLimit.field) {
|
||||
const orgMemberships = await orgDAL.countAllOrgMembers(orgId);
|
||||
used = orgMemberships.toString();
|
||||
used = orgMembersUsed.toString();
|
||||
} else if (field === BillingPlanRows.WorkspaceLimit.field) {
|
||||
const projects = await projectDAL.find({ orgId });
|
||||
used = projects.length.toString();
|
||||
used = projectCount.toString();
|
||||
} else if (field === BillingPlanRows.IdentityLimit.field) {
|
||||
const identities = await identityOrgMembershipDAL.countAllOrgIdentities({ orgId });
|
||||
used = identities.toString();
|
||||
used = identityUsed.toString();
|
||||
}
|
||||
|
||||
return {
|
||||
|
@@ -171,8 +171,8 @@ export const oidcConfigServiceFactory = ({
|
||||
};
|
||||
|
||||
const oidcLogin = async ({
|
||||
externalId,
|
||||
email,
|
||||
externalId,
|
||||
firstName,
|
||||
lastName,
|
||||
orgId,
|
||||
@@ -717,7 +717,7 @@ export const oidcConfigServiceFactory = ({
|
||||
const groups = typeof claims.groups === "string" ? [claims.groups] : (claims.groups as string[] | undefined);
|
||||
|
||||
oidcLogin({
|
||||
email: claims.email,
|
||||
email: claims.email.toLowerCase(),
|
||||
externalId: claims.sub,
|
||||
firstName: claims.given_name ?? "",
|
||||
lastName: claims.family_name ?? "",
|
||||
|
@@ -41,7 +41,8 @@ export enum OrgPermissionGatewayActions {
|
||||
CreateGateways = "create-gateways",
|
||||
ListGateways = "list-gateways",
|
||||
EditGateways = "edit-gateways",
|
||||
DeleteGateways = "delete-gateways"
|
||||
DeleteGateways = "delete-gateways",
|
||||
AttachGateways = "attach-gateways"
|
||||
}
|
||||
|
||||
export enum OrgPermissionIdentityActions {
|
||||
@@ -337,6 +338,7 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionGatewayActions.CreateGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.EditGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.DeleteGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.AttachGateways, OrgPermissionSubjects.Gateway);
|
||||
|
||||
can(OrgPermissionAdminConsoleAction.AccessAllProjects, OrgPermissionSubjects.AdminConsole);
|
||||
|
||||
@@ -378,6 +380,7 @@ const buildMemberPermission = () => {
|
||||
can(OrgPermissionAppConnectionActions.Connect, OrgPermissionSubjects.AppConnections);
|
||||
can(OrgPermissionGatewayActions.ListGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.CreateGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.AttachGateways, OrgPermissionSubjects.Gateway);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
@@ -342,7 +342,7 @@ export const scimServiceFactory = ({
|
||||
orgMembership = await orgMembershipDAL.create(
|
||||
{
|
||||
userId: userAlias.userId,
|
||||
inviteEmail: email,
|
||||
inviteEmail: email.toLowerCase(),
|
||||
orgId,
|
||||
role,
|
||||
roleId,
|
||||
@@ -364,7 +364,7 @@ export const scimServiceFactory = ({
|
||||
if (trustScimEmails) {
|
||||
user = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
email: email.toLowerCase(),
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
@@ -379,8 +379,8 @@ export const scimServiceFactory = ({
|
||||
);
|
||||
user = await userDAL.create(
|
||||
{
|
||||
username: trustScimEmails ? email : uniqueUsername,
|
||||
email,
|
||||
username: trustScimEmails ? email.toLowerCase() : uniqueUsername,
|
||||
email: email.toLowerCase(),
|
||||
isEmailVerified: trustScimEmails,
|
||||
firstName,
|
||||
lastName,
|
||||
@@ -396,7 +396,7 @@ export const scimServiceFactory = ({
|
||||
userId: user.id,
|
||||
aliasType,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
emails: email ? [email.toLowerCase()] : [],
|
||||
orgId
|
||||
},
|
||||
tx
|
||||
@@ -418,7 +418,7 @@ export const scimServiceFactory = ({
|
||||
orgMembership = await orgMembershipDAL.create(
|
||||
{
|
||||
userId: user.id,
|
||||
inviteEmail: email,
|
||||
inviteEmail: email.toLowerCase(),
|
||||
orgId,
|
||||
role,
|
||||
roleId,
|
||||
@@ -529,7 +529,7 @@ export const scimServiceFactory = ({
|
||||
membership.userId,
|
||||
{
|
||||
firstName: scimUser.name.givenName,
|
||||
email: scimUser.emails[0].value,
|
||||
email: scimUser.emails[0].value.toLowerCase(),
|
||||
lastName: scimUser.name.familyName,
|
||||
isEmailVerified: hasEmailChanged ? trustScimEmails : undefined
|
||||
},
|
||||
@@ -606,7 +606,7 @@ export const scimServiceFactory = ({
|
||||
membership.userId,
|
||||
{
|
||||
firstName,
|
||||
email,
|
||||
email: email?.toLowerCase(),
|
||||
lastName,
|
||||
isEmailVerified:
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import ldap from "ldapjs";
|
||||
import ldap, { Client, SearchOptions } from "ldapjs";
|
||||
|
||||
import {
|
||||
TRotationFactory,
|
||||
@@ -8,26 +8,73 @@ import {
|
||||
TRotationFactoryRotateCredentials
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { DistinguishedNameRegex } from "@app/lib/regex";
|
||||
import { encryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns";
|
||||
import { getLdapConnectionClient, LdapProvider, TLdapConnection } from "@app/services/app-connection/ldap";
|
||||
|
||||
import { generatePassword } from "../shared/utils";
|
||||
import {
|
||||
LdapPasswordRotationMethod,
|
||||
TLdapPasswordRotationGeneratedCredentials,
|
||||
TLdapPasswordRotationInput,
|
||||
TLdapPasswordRotationWithConnection
|
||||
} from "./ldap-password-rotation-types";
|
||||
|
||||
const getEncodedPassword = (password: string) => Buffer.from(`"${password}"`, "utf16le");
|
||||
|
||||
const getDN = async (dn: string, client: Client): Promise<string> => {
|
||||
if (DistinguishedNameRegex.test(dn)) return dn;
|
||||
|
||||
const opts: SearchOptions = {
|
||||
filter: `(userPrincipalName=${dn})`,
|
||||
scope: "sub",
|
||||
attributes: ["dn"]
|
||||
};
|
||||
|
||||
const base = dn
|
||||
.split("@")[1]
|
||||
.split(".")
|
||||
.map((dc) => `dc=${dc}`)
|
||||
.join(",");
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// Perform the search
|
||||
client.search(base, opts, (err, res) => {
|
||||
if (err) {
|
||||
logger.error(err, "LDAP Failed to get DN");
|
||||
reject(new Error(`Provider Resolve DN Error: ${err.message}`));
|
||||
}
|
||||
|
||||
let userDn: string | null;
|
||||
|
||||
res.on("searchEntry", (entry) => {
|
||||
userDn = entry.objectName;
|
||||
});
|
||||
|
||||
res.on("error", (error) => {
|
||||
logger.error(error, "LDAP Failed to get DN");
|
||||
reject(new Error(`Provider Resolve DN Error: ${error.message}`));
|
||||
});
|
||||
|
||||
res.on("end", () => {
|
||||
if (userDn) {
|
||||
resolve(userDn);
|
||||
} else {
|
||||
reject(new Error(`Unable to resolve DN for ${dn}.`));
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export const ldapPasswordRotationFactory: TRotationFactory<
|
||||
TLdapPasswordRotationWithConnection,
|
||||
TLdapPasswordRotationGeneratedCredentials
|
||||
TLdapPasswordRotationGeneratedCredentials,
|
||||
TLdapPasswordRotationInput["temporaryParameters"]
|
||||
> = (secretRotation, appConnectionDAL, kmsService) => {
|
||||
const {
|
||||
connection,
|
||||
parameters: { dn, passwordRequirements },
|
||||
secretsMapping
|
||||
} = secretRotation;
|
||||
const { connection, parameters, secretsMapping, activeIndex } = secretRotation;
|
||||
|
||||
const { dn, passwordRequirements } = parameters;
|
||||
|
||||
const $verifyCredentials = async (credentials: Pick<TLdapConnection["credentials"], "dn" | "password">) => {
|
||||
try {
|
||||
@@ -40,13 +87,21 @@ export const ldapPasswordRotationFactory: TRotationFactory<
|
||||
}
|
||||
};
|
||||
|
||||
const $rotatePassword = async () => {
|
||||
const $rotatePassword = async (currentPassword?: string) => {
|
||||
const { credentials, orgId } = connection;
|
||||
|
||||
if (!credentials.url.startsWith("ldaps")) throw new Error("Password Rotation requires an LDAPS connection");
|
||||
|
||||
const client = await getLdapConnectionClient(credentials);
|
||||
const isPersonalRotation = credentials.dn === dn;
|
||||
const client = await getLdapConnectionClient(
|
||||
currentPassword
|
||||
? {
|
||||
...credentials,
|
||||
password: currentPassword,
|
||||
dn
|
||||
}
|
||||
: credentials
|
||||
);
|
||||
const isConnectionRotation = credentials.dn === dn;
|
||||
|
||||
const password = generatePassword(passwordRequirements);
|
||||
|
||||
@@ -58,8 +113,8 @@ export const ldapPasswordRotationFactory: TRotationFactory<
|
||||
const encodedPassword = getEncodedPassword(password);
|
||||
|
||||
// service account vs personal password rotation require different changes
|
||||
if (isPersonalRotation) {
|
||||
const currentEncodedPassword = getEncodedPassword(credentials.password);
|
||||
if (isConnectionRotation || currentPassword) {
|
||||
const currentEncodedPassword = getEncodedPassword(currentPassword || credentials.password);
|
||||
|
||||
changes = [
|
||||
new ldap.Change({
|
||||
@@ -93,8 +148,9 @@ export const ldapPasswordRotationFactory: TRotationFactory<
|
||||
}
|
||||
|
||||
try {
|
||||
const userDn = await getDN(dn, client);
|
||||
await new Promise((resolve, reject) => {
|
||||
client.modify(dn, changes, (err) => {
|
||||
client.modify(userDn, changes, (err) => {
|
||||
if (err) {
|
||||
logger.error(err, "LDAP Password Rotation Failed");
|
||||
reject(new Error(`Provider Modify Error: ${err.message}`));
|
||||
@@ -110,7 +166,7 @@ export const ldapPasswordRotationFactory: TRotationFactory<
|
||||
|
||||
await $verifyCredentials({ dn, password });
|
||||
|
||||
if (isPersonalRotation) {
|
||||
if (isConnectionRotation) {
|
||||
const updatedCredentials: TLdapConnection["credentials"] = {
|
||||
...credentials,
|
||||
password
|
||||
@@ -128,29 +184,41 @@ export const ldapPasswordRotationFactory: TRotationFactory<
|
||||
return { dn, password };
|
||||
};
|
||||
|
||||
const issueCredentials: TRotationFactoryIssueCredentials<TLdapPasswordRotationGeneratedCredentials> = async (
|
||||
callback
|
||||
) => {
|
||||
const credentials = await $rotatePassword();
|
||||
const issueCredentials: TRotationFactoryIssueCredentials<
|
||||
TLdapPasswordRotationGeneratedCredentials,
|
||||
TLdapPasswordRotationInput["temporaryParameters"]
|
||||
> = async (callback, temporaryParameters) => {
|
||||
const credentials = await $rotatePassword(
|
||||
parameters.rotationMethod === LdapPasswordRotationMethod.TargetPrincipal
|
||||
? temporaryParameters?.password
|
||||
: undefined
|
||||
);
|
||||
|
||||
return callback(credentials);
|
||||
};
|
||||
|
||||
const revokeCredentials: TRotationFactoryRevokeCredentials<TLdapPasswordRotationGeneratedCredentials> = async (
|
||||
_,
|
||||
credentialsToRevoke,
|
||||
callback
|
||||
) => {
|
||||
const currentPassword = credentialsToRevoke[activeIndex].password;
|
||||
|
||||
// we just rotate to a new password, essentially revoking old credentials
|
||||
await $rotatePassword();
|
||||
await $rotatePassword(
|
||||
parameters.rotationMethod === LdapPasswordRotationMethod.TargetPrincipal ? currentPassword : undefined
|
||||
);
|
||||
|
||||
return callback();
|
||||
};
|
||||
|
||||
const rotateCredentials: TRotationFactoryRotateCredentials<TLdapPasswordRotationGeneratedCredentials> = async (
|
||||
_,
|
||||
callback
|
||||
callback,
|
||||
activeCredentials
|
||||
) => {
|
||||
const credentials = await $rotatePassword();
|
||||
const credentials = await $rotatePassword(
|
||||
parameters.rotationMethod === LdapPasswordRotationMethod.TargetPrincipal ? activeCredentials.password : undefined
|
||||
);
|
||||
|
||||
return callback(credentials);
|
||||
};
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import RE2 from "re2";
|
||||
import { z } from "zod";
|
||||
|
||||
import { LdapPasswordRotationMethod } from "@app/ee/services/secret-rotation-v2/ldap-password/ldap-password-rotation-types";
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import {
|
||||
BaseCreateSecretRotationSchema,
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-schemas";
|
||||
import { PasswordRequirementsSchema } from "@app/ee/services/secret-rotation-v2/shared/general";
|
||||
import { SecretRotations } from "@app/lib/api-docs";
|
||||
import { DistinguishedNameRegex } from "@app/lib/regex";
|
||||
import { DistinguishedNameRegex, UserPrincipalNameRegex } from "@app/lib/regex";
|
||||
import { SecretNameSchema } from "@app/server/lib/schemas";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
@@ -26,10 +26,16 @@ const LdapPasswordRotationParametersSchema = z.object({
|
||||
dn: z
|
||||
.string()
|
||||
.trim()
|
||||
.regex(new RE2(DistinguishedNameRegex), "Invalid DN format, ie; CN=user,OU=users,DC=example,DC=com")
|
||||
.min(1, "Distinguished Name (DN) Required")
|
||||
.min(1, "DN/UPN required")
|
||||
.refine((value) => DistinguishedNameRegex.test(value) || UserPrincipalNameRegex.test(value), {
|
||||
message: "Invalid DN/UPN format"
|
||||
})
|
||||
.describe(SecretRotations.PARAMETERS.LDAP_PASSWORD.dn),
|
||||
passwordRequirements: PasswordRequirementsSchema.optional()
|
||||
passwordRequirements: PasswordRequirementsSchema.optional(),
|
||||
rotationMethod: z
|
||||
.nativeEnum(LdapPasswordRotationMethod)
|
||||
.optional()
|
||||
.describe(SecretRotations.PARAMETERS.LDAP_PASSWORD.rotationMethod)
|
||||
});
|
||||
|
||||
const LdapPasswordRotationSecretsMappingSchema = z.object({
|
||||
@@ -50,10 +56,28 @@ export const LdapPasswordRotationSchema = BaseSecretRotationSchema(SecretRotatio
|
||||
secretsMapping: LdapPasswordRotationSecretsMappingSchema
|
||||
});
|
||||
|
||||
export const CreateLdapPasswordRotationSchema = BaseCreateSecretRotationSchema(SecretRotation.LdapPassword).extend({
|
||||
parameters: LdapPasswordRotationParametersSchema,
|
||||
secretsMapping: LdapPasswordRotationSecretsMappingSchema
|
||||
});
|
||||
export const CreateLdapPasswordRotationSchema = BaseCreateSecretRotationSchema(SecretRotation.LdapPassword)
|
||||
.extend({
|
||||
parameters: LdapPasswordRotationParametersSchema,
|
||||
secretsMapping: LdapPasswordRotationSecretsMappingSchema,
|
||||
temporaryParameters: z
|
||||
.object({
|
||||
password: z.string().min(1, "Password required").describe(SecretRotations.PARAMETERS.LDAP_PASSWORD.password)
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
.superRefine((val, ctx) => {
|
||||
if (
|
||||
val.parameters.rotationMethod === LdapPasswordRotationMethod.TargetPrincipal &&
|
||||
!val.temporaryParameters?.password
|
||||
) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "Password required",
|
||||
path: ["temporaryParameters", "password"]
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export const UpdateLdapPasswordRotationSchema = BaseUpdateSecretRotationSchema(SecretRotation.LdapPassword).extend({
|
||||
parameters: LdapPasswordRotationParametersSchema.optional(),
|
||||
|
@@ -9,6 +9,11 @@ import {
|
||||
LdapPasswordRotationSchema
|
||||
} from "./ldap-password-rotation-schemas";
|
||||
|
||||
export enum LdapPasswordRotationMethod {
|
||||
ConnectionPrincipal = "connection-principal",
|
||||
TargetPrincipal = "target-principal"
|
||||
}
|
||||
|
||||
export type TLdapPasswordRotation = z.infer<typeof LdapPasswordRotationSchema>;
|
||||
|
||||
export type TLdapPasswordRotationInput = z.infer<typeof CreateLdapPasswordRotationSchema>;
|
||||
|
@@ -1,12 +1,13 @@
|
||||
import { AxiosError } from "axios";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { AUTH0_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./auth0-client-secret";
|
||||
import { AWS_IAM_USER_SECRET_ROTATION_LIST_OPTION } from "./aws-iam-user-secret";
|
||||
import { AZURE_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./azure-client-secret";
|
||||
import { LDAP_PASSWORD_ROTATION_LIST_OPTION } from "./ldap-password";
|
||||
import { LDAP_PASSWORD_ROTATION_LIST_OPTION, TLdapPasswordRotation } from "./ldap-password";
|
||||
import { MSSQL_CREDENTIALS_ROTATION_LIST_OPTION } from "./mssql-credentials";
|
||||
import { POSTGRES_CREDENTIALS_ROTATION_LIST_OPTION } from "./postgres-credentials";
|
||||
import { SecretRotation, SecretRotationStatus } from "./secret-rotation-v2-enums";
|
||||
@@ -15,7 +16,8 @@ import {
|
||||
TSecretRotationV2,
|
||||
TSecretRotationV2GeneratedCredentials,
|
||||
TSecretRotationV2ListItem,
|
||||
TSecretRotationV2Raw
|
||||
TSecretRotationV2Raw,
|
||||
TUpdateSecretRotationV2DTO
|
||||
} from "./secret-rotation-v2-types";
|
||||
|
||||
const SECRET_ROTATION_LIST_OPTIONS: Record<SecretRotation, TSecretRotationV2ListItem> = {
|
||||
@@ -228,3 +230,30 @@ export const parseRotationErrorMessage = (err: unknown): string => {
|
||||
? errorMessage
|
||||
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
|
||||
};
|
||||
|
||||
function haveUnequalProperties<T>(obj1: T, obj2: T, properties: (keyof T)[]): boolean {
|
||||
return properties.some((prop) => obj1[prop] !== obj2[prop]);
|
||||
}
|
||||
|
||||
export const throwOnImmutableParameterUpdate = (
|
||||
updatePayload: TUpdateSecretRotationV2DTO,
|
||||
secretRotation: TSecretRotationV2Raw
|
||||
) => {
|
||||
if (!updatePayload.parameters) return;
|
||||
|
||||
switch (updatePayload.type) {
|
||||
case SecretRotation.LdapPassword:
|
||||
if (
|
||||
haveUnequalProperties(
|
||||
updatePayload.parameters as TLdapPasswordRotation["parameters"],
|
||||
secretRotation.parameters as TLdapPasswordRotation["parameters"],
|
||||
["rotationMethod", "dn"]
|
||||
)
|
||||
) {
|
||||
throw new BadRequestError({ message: "Cannot update rotation method or DN" });
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// do nothing
|
||||
}
|
||||
};
|
||||
|
@@ -25,7 +25,8 @@ import {
|
||||
getNextUtcRotationInterval,
|
||||
getSecretRotationRotateSecretJobOptions,
|
||||
listSecretRotationOptions,
|
||||
parseRotationErrorMessage
|
||||
parseRotationErrorMessage,
|
||||
throwOnImmutableParameterUpdate
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-fns";
|
||||
import {
|
||||
SECRET_ROTATION_CONNECTION_MAP,
|
||||
@@ -46,6 +47,7 @@ import {
|
||||
TSecretRotationV2,
|
||||
TSecretRotationV2GeneratedCredentials,
|
||||
TSecretRotationV2Raw,
|
||||
TSecretRotationV2TemporaryParameters,
|
||||
TSecretRotationV2WithConnection,
|
||||
TUpdateSecretRotationV2DTO
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
|
||||
@@ -112,7 +114,8 @@ const MAX_GENERATED_CREDENTIALS_LENGTH = 2;
|
||||
|
||||
type TRotationFactoryImplementation = TRotationFactory<
|
||||
TSecretRotationV2WithConnection,
|
||||
TSecretRotationV2GeneratedCredentials
|
||||
TSecretRotationV2GeneratedCredentials,
|
||||
TSecretRotationV2TemporaryParameters
|
||||
>;
|
||||
const SECRET_ROTATION_FACTORY_MAP: Record<SecretRotation, TRotationFactoryImplementation> = {
|
||||
[SecretRotation.PostgresCredentials]: sqlCredentialsRotationFactory as TRotationFactoryImplementation,
|
||||
@@ -400,6 +403,7 @@ export const secretRotationV2ServiceFactory = ({
|
||||
environment,
|
||||
rotateAtUtc = { hours: 0, minutes: 0 },
|
||||
secretsMapping,
|
||||
temporaryParameters,
|
||||
...payload
|
||||
}: TCreateSecretRotationV2DTO,
|
||||
actor: OrgServiceActor
|
||||
@@ -546,7 +550,7 @@ export const secretRotationV2ServiceFactory = ({
|
||||
|
||||
return createdRotation;
|
||||
});
|
||||
});
|
||||
}, temporaryParameters);
|
||||
|
||||
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(projectId);
|
||||
await snapshotService.performSnapshot(folder.id);
|
||||
@@ -585,10 +589,7 @@ export const secretRotationV2ServiceFactory = ({
|
||||
}
|
||||
};
|
||||
|
||||
const updateSecretRotation = async (
|
||||
{ type, rotationId, ...payload }: TUpdateSecretRotationV2DTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const updateSecretRotation = async (dto: TUpdateSecretRotationV2DTO, actor: OrgServiceActor) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.secretRotation)
|
||||
@@ -596,6 +597,8 @@ export const secretRotationV2ServiceFactory = ({
|
||||
message: "Failed to update secret rotation due to plan restriction. Upgrade plan to update secret rotations."
|
||||
});
|
||||
|
||||
const { type, rotationId, ...payload } = dto;
|
||||
|
||||
const secretRotation = await secretRotationV2DAL.findById(rotationId);
|
||||
|
||||
if (!secretRotation)
|
||||
@@ -603,6 +606,8 @@ export const secretRotationV2ServiceFactory = ({
|
||||
message: `Could not find ${SECRET_ROTATION_NAME_MAP[type]} Rotation with ID ${rotationId}`
|
||||
});
|
||||
|
||||
throwOnImmutableParameterUpdate(dto, secretRotation);
|
||||
|
||||
const { folder, environment, projectId, folderId, connection } = secretRotation;
|
||||
const secretsMapping = secretRotation.secretsMapping as TSecretRotationV2["secretsMapping"];
|
||||
|
||||
@@ -877,6 +882,7 @@ export const secretRotationV2ServiceFactory = ({
|
||||
const inactiveIndex = (activeIndex + 1) % MAX_GENERATED_CREDENTIALS_LENGTH;
|
||||
|
||||
const inactiveCredentials = generatedCredentials[inactiveIndex];
|
||||
const activeCredentials = generatedCredentials[activeIndex];
|
||||
|
||||
const rotationFactory = SECRET_ROTATION_FACTORY_MAP[type as SecretRotation](
|
||||
{
|
||||
@@ -887,73 +893,77 @@ export const secretRotationV2ServiceFactory = ({
|
||||
kmsService
|
||||
);
|
||||
|
||||
const updatedRotation = await rotationFactory.rotateCredentials(inactiveCredentials, async (newCredentials) => {
|
||||
const updatedCredentials = [...generatedCredentials];
|
||||
updatedCredentials[inactiveIndex] = newCredentials;
|
||||
const updatedRotation = await rotationFactory.rotateCredentials(
|
||||
inactiveCredentials,
|
||||
async (newCredentials) => {
|
||||
const updatedCredentials = [...generatedCredentials];
|
||||
updatedCredentials[inactiveIndex] = newCredentials;
|
||||
|
||||
const encryptedUpdatedCredentials = await encryptSecretRotationCredentials({
|
||||
projectId,
|
||||
generatedCredentials: updatedCredentials as TSecretRotationV2GeneratedCredentials,
|
||||
kmsService
|
||||
});
|
||||
|
||||
return secretRotationV2DAL.transaction(async (tx) => {
|
||||
const secretsPayload = rotationFactory.getSecretsPayload(newCredentials);
|
||||
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
const encryptedUpdatedCredentials = await encryptSecretRotationCredentials({
|
||||
projectId,
|
||||
generatedCredentials: updatedCredentials as TSecretRotationV2GeneratedCredentials,
|
||||
kmsService
|
||||
});
|
||||
|
||||
// update mapped secrets with new credential values
|
||||
await fnSecretBulkUpdate({
|
||||
folderId,
|
||||
orgId: connection.orgId,
|
||||
tx,
|
||||
inputSecrets: secretsPayload.map(({ key, value }) => ({
|
||||
filter: {
|
||||
key,
|
||||
folderId,
|
||||
type: SecretType.Shared
|
||||
},
|
||||
data: {
|
||||
encryptedValue: encryptor({
|
||||
plainText: Buffer.from(value)
|
||||
}).cipherTextBlob,
|
||||
references: []
|
||||
}
|
||||
})),
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
secretVersionDAL: secretVersionV2BridgeDAL,
|
||||
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
|
||||
secretTagDAL,
|
||||
resourceMetadataDAL
|
||||
});
|
||||
return secretRotationV2DAL.transaction(async (tx) => {
|
||||
const secretsPayload = rotationFactory.getSecretsPayload(newCredentials);
|
||||
|
||||
const currentTime = new Date();
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
return secretRotationV2DAL.updateById(
|
||||
secretRotation.id,
|
||||
{
|
||||
encryptedGeneratedCredentials: encryptedUpdatedCredentials,
|
||||
activeIndex: inactiveIndex,
|
||||
isLastRotationManual: isManualRotation,
|
||||
lastRotatedAt: currentTime,
|
||||
lastRotationAttemptedAt: currentTime,
|
||||
nextRotationAt: calculateNextRotationAt({
|
||||
...(secretRotation as TSecretRotationV2),
|
||||
rotationStatus: SecretRotationStatus.Success,
|
||||
// update mapped secrets with new credential values
|
||||
await fnSecretBulkUpdate({
|
||||
folderId,
|
||||
orgId: connection.orgId,
|
||||
tx,
|
||||
inputSecrets: secretsPayload.map(({ key, value }) => ({
|
||||
filter: {
|
||||
key,
|
||||
folderId,
|
||||
type: SecretType.Shared
|
||||
},
|
||||
data: {
|
||||
encryptedValue: encryptor({
|
||||
plainText: Buffer.from(value)
|
||||
}).cipherTextBlob,
|
||||
references: []
|
||||
}
|
||||
})),
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
secretVersionDAL: secretVersionV2BridgeDAL,
|
||||
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
|
||||
secretTagDAL,
|
||||
resourceMetadataDAL
|
||||
});
|
||||
|
||||
const currentTime = new Date();
|
||||
|
||||
return secretRotationV2DAL.updateById(
|
||||
secretRotation.id,
|
||||
{
|
||||
encryptedGeneratedCredentials: encryptedUpdatedCredentials,
|
||||
activeIndex: inactiveIndex,
|
||||
isLastRotationManual: isManualRotation,
|
||||
lastRotatedAt: currentTime,
|
||||
isManualRotation
|
||||
}),
|
||||
rotationStatus: SecretRotationStatus.Success,
|
||||
lastRotationJobId: jobId,
|
||||
encryptedLastRotationMessage: null
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
});
|
||||
lastRotationAttemptedAt: currentTime,
|
||||
nextRotationAt: calculateNextRotationAt({
|
||||
...(secretRotation as TSecretRotationV2),
|
||||
rotationStatus: SecretRotationStatus.Success,
|
||||
lastRotatedAt: currentTime,
|
||||
isManualRotation
|
||||
}),
|
||||
rotationStatus: SecretRotationStatus.Success,
|
||||
lastRotationJobId: jobId,
|
||||
encryptedLastRotationMessage: null
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
},
|
||||
activeCredentials
|
||||
);
|
||||
|
||||
await auditLogService.createAuditLog({
|
||||
...(auditLogInfo ?? {
|
||||
|
@@ -87,6 +87,8 @@ export type TSecretRotationV2ListItem =
|
||||
| TLdapPasswordRotationListItem
|
||||
| TAwsIamUserSecretRotationListItem;
|
||||
|
||||
export type TSecretRotationV2TemporaryParameters = TLdapPasswordRotationInput["temporaryParameters"] | undefined;
|
||||
|
||||
export type TSecretRotationV2Raw = NonNullable<Awaited<ReturnType<TSecretRotationV2DALFactory["findById"]>>>;
|
||||
|
||||
export type TListSecretRotationsV2ByProjectId = {
|
||||
@@ -120,6 +122,7 @@ export type TCreateSecretRotationV2DTO = Pick<
|
||||
environment: string;
|
||||
isAutoRotationEnabled?: boolean;
|
||||
rotateAtUtc?: TRotateAtUtc;
|
||||
temporaryParameters?: TSecretRotationV2TemporaryParameters;
|
||||
};
|
||||
|
||||
export type TUpdateSecretRotationV2DTO = Partial<
|
||||
@@ -186,8 +189,12 @@ export type TSecretRotationSendNotificationJobPayload = {
|
||||
// transactional behavior. By passing in the rotation mutation, if this mutation fails we can roll back the
|
||||
// third party credential changes (when supported), preventing credentials getting out of sync
|
||||
|
||||
export type TRotationFactoryIssueCredentials<T extends TSecretRotationV2GeneratedCredentials> = (
|
||||
callback: (newCredentials: T[number]) => Promise<TSecretRotationV2Raw>
|
||||
export type TRotationFactoryIssueCredentials<
|
||||
T extends TSecretRotationV2GeneratedCredentials,
|
||||
P extends TSecretRotationV2TemporaryParameters = undefined
|
||||
> = (
|
||||
callback: (newCredentials: T[number]) => Promise<TSecretRotationV2Raw>,
|
||||
temporaryParameters?: P
|
||||
) => Promise<TSecretRotationV2Raw>;
|
||||
|
||||
export type TRotationFactoryRevokeCredentials<T extends TSecretRotationV2GeneratedCredentials> = (
|
||||
@@ -197,7 +204,8 @@ export type TRotationFactoryRevokeCredentials<T extends TSecretRotationV2Generat
|
||||
|
||||
export type TRotationFactoryRotateCredentials<T extends TSecretRotationV2GeneratedCredentials> = (
|
||||
credentialsToRevoke: T[number] | undefined,
|
||||
callback: (newCredentials: T[number]) => Promise<TSecretRotationV2Raw>
|
||||
callback: (newCredentials: T[number]) => Promise<TSecretRotationV2Raw>,
|
||||
activeCredentials: T[number]
|
||||
) => Promise<TSecretRotationV2Raw>;
|
||||
|
||||
export type TRotationFactoryGetSecretsPayload<T extends TSecretRotationV2GeneratedCredentials> = (
|
||||
@@ -206,13 +214,14 @@ export type TRotationFactoryGetSecretsPayload<T extends TSecretRotationV2Generat
|
||||
|
||||
export type TRotationFactory<
|
||||
T extends TSecretRotationV2WithConnection,
|
||||
C extends TSecretRotationV2GeneratedCredentials
|
||||
C extends TSecretRotationV2GeneratedCredentials,
|
||||
P extends TSecretRotationV2TemporaryParameters = undefined
|
||||
> = (
|
||||
secretRotation: T,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
issueCredentials: TRotationFactoryIssueCredentials<C>;
|
||||
issueCredentials: TRotationFactoryIssueCredentials<C, P>;
|
||||
revokeCredentials: TRotationFactoryRevokeCredentials<C>;
|
||||
rotateCredentials: TRotationFactoryRotateCredentials<C>;
|
||||
getSecretsPayload: TRotationFactoryGetSecretsPayload<C>;
|
||||
|
@@ -393,6 +393,7 @@ export const KUBERNETES_AUTH = {
|
||||
allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.",
|
||||
allowedAudience:
|
||||
"The optional audience claim that the service account JWT token must have to authenticate with Infisical.",
|
||||
gatewayId: "The ID of the gateway to use when performing kubernetes API requests.",
|
||||
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
|
||||
@@ -409,6 +410,7 @@ export const KUBERNETES_AUTH = {
|
||||
allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.",
|
||||
allowedAudience:
|
||||
"The new optional audience claim that the service account JWT token must have to authenticate with Infisical.",
|
||||
gatewayId: "The ID of the gateway to use when performing kubernetes API requests.",
|
||||
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
|
||||
@@ -606,7 +608,8 @@ export const PROJECTS = {
|
||||
projectDescription: "An optional description label for the project.",
|
||||
autoCapitalization: "Disable or enable auto-capitalization for the project.",
|
||||
slug: "An optional slug for the project. (must be unique within the organization)",
|
||||
hasDeleteProtection: "Enable or disable delete protection for the project."
|
||||
hasDeleteProtection: "Enable or disable delete protection for the project.",
|
||||
secretSharing: "Enable or disable secret sharing for the project."
|
||||
},
|
||||
GET_KEY: {
|
||||
workspaceId: "The ID of the project to get the key from."
|
||||
@@ -2060,7 +2063,7 @@ export const AppConnections = {
|
||||
LDAP: {
|
||||
provider: "The type of LDAP provider. Determines provider-specific behaviors.",
|
||||
url: "The LDAP/LDAPS URL to connect to (e.g., 'ldap://domain-or-ip:389' or 'ldaps://domain-or-ip:636').",
|
||||
dn: "The Distinguished Name (DN) of the principal to bind with (e.g., 'CN=John,CN=Users,DC=example,DC=com').",
|
||||
dn: "The Distinguished Name (DN) or User Principal Name (UPN) of the principal to bind with (e.g., 'CN=John,CN=Users,DC=example,DC=com').",
|
||||
password: "The password to bind with for authentication.",
|
||||
sslRejectUnauthorized:
|
||||
"Whether or not to reject unauthorized SSL certificates (true/false) when using ldaps://. Set to false only in test environments.",
|
||||
@@ -2305,7 +2308,10 @@ export const SecretRotations = {
|
||||
clientId: "The client ID of the Azure Application to rotate the client secret for."
|
||||
},
|
||||
LDAP_PASSWORD: {
|
||||
dn: "The Distinguished Name (DN) of the principal to rotate the password for."
|
||||
dn: "The Distinguished Name (DN) or User Principal Name (UPN) of the principal to rotate the password for.",
|
||||
rotationMethod:
|
||||
'Whether the rotation should be performed by the LDAP "connection-principal" or the "target-principal" (defaults to \'connection-principal\').',
|
||||
password: 'The password of the provided principal if "parameters.rotationMethod" is set to "target-principal".'
|
||||
},
|
||||
GENERAL: {
|
||||
PASSWORD_REQUIREMENTS: {
|
||||
@@ -2339,7 +2345,7 @@ export const SecretRotations = {
|
||||
clientSecret: "The name of the secret that the rotated client secret will be mapped to."
|
||||
},
|
||||
LDAP_PASSWORD: {
|
||||
dn: "The name of the secret that the Distinguished Name (DN) of the principal will be mapped to.",
|
||||
dn: "The name of the secret that the Distinguished Name (DN) or User Principal Name (UPN) of the principal will be mapped to.",
|
||||
password: "The name of the secret that the rotated password will be mapped to."
|
||||
},
|
||||
AWS_IAM_USER_SECRET: {
|
||||
|
@@ -174,6 +174,8 @@ const setupProxyServer = async ({
|
||||
return new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
|
||||
let streamClosed = false;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
server.on("connection", async (clientConn) => {
|
||||
try {
|
||||
@@ -202,9 +204,15 @@ const setupProxyServer = async ({
|
||||
|
||||
// Handle client connection close
|
||||
clientConn.on("end", () => {
|
||||
writer.close().catch((err) => {
|
||||
logger.error(err);
|
||||
});
|
||||
if (!streamClosed) {
|
||||
try {
|
||||
writer.close().catch((err) => {
|
||||
logger.debug(err, "Error closing writer (already closed)");
|
||||
});
|
||||
} catch (error) {
|
||||
logger.debug(error, "Error in writer close");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
clientConn.on("error", (clientConnErr) => {
|
||||
@@ -249,14 +257,29 @@ const setupProxyServer = async ({
|
||||
setupCopy();
|
||||
// Handle connection closure
|
||||
clientConn.on("close", () => {
|
||||
stream.destroy().catch((err) => {
|
||||
proxyErrorMsg.push((err as Error)?.message);
|
||||
});
|
||||
if (!streamClosed) {
|
||||
streamClosed = true;
|
||||
stream.destroy().catch((err) => {
|
||||
logger.debug(err, "Stream already destroyed during close event");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const cleanup = async () => {
|
||||
clientConn?.destroy();
|
||||
await stream.destroy();
|
||||
try {
|
||||
clientConn?.destroy();
|
||||
} catch (err) {
|
||||
logger.debug(err, "Error destroying client connection");
|
||||
}
|
||||
|
||||
if (!streamClosed) {
|
||||
streamClosed = true;
|
||||
try {
|
||||
await stream.destroy();
|
||||
} catch (err) {
|
||||
logger.debug(err, "Error destroying stream (might be already closed)");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
clientConn.on("error", (clientConnErr) => {
|
||||
@@ -301,8 +324,17 @@ const setupProxyServer = async ({
|
||||
server,
|
||||
port: address.port,
|
||||
cleanup: async () => {
|
||||
server.close();
|
||||
await quicClient?.destroy();
|
||||
try {
|
||||
server.close();
|
||||
} catch (err) {
|
||||
logger.debug(err, "Error closing server");
|
||||
}
|
||||
|
||||
try {
|
||||
await quicClient?.destroy();
|
||||
} catch (err) {
|
||||
logger.debug(err, "Error destroying QUIC client");
|
||||
}
|
||||
},
|
||||
getProxyError: () => proxyErrorMsg.join(",")
|
||||
});
|
||||
@@ -320,10 +352,10 @@ interface ProxyOptions {
|
||||
orgId: string;
|
||||
}
|
||||
|
||||
export const withGatewayProxy = async (
|
||||
callback: (port: number) => Promise<void>,
|
||||
export const withGatewayProxy = async <T>(
|
||||
callback: (port: number) => Promise<T>,
|
||||
options: ProxyOptions
|
||||
): Promise<void> => {
|
||||
): Promise<T> => {
|
||||
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId } = options;
|
||||
|
||||
// Setup the proxy server
|
||||
@@ -339,7 +371,7 @@ export const withGatewayProxy = async (
|
||||
|
||||
try {
|
||||
// Execute the callback with the allocated port
|
||||
await callback(port);
|
||||
return await callback(port);
|
||||
} catch (err) {
|
||||
const proxyErrorMessage = getProxyError();
|
||||
if (proxyErrorMessage) {
|
||||
|
@@ -32,13 +32,13 @@ export const buildFindFilter =
|
||||
<R extends object = object>(
|
||||
{ $in, $notNull, $search, $complex, ...filter }: TFindFilter<R>,
|
||||
tableName?: TableName,
|
||||
excludeKeys?: Array<keyof R>
|
||||
excludeKeys?: string[]
|
||||
) =>
|
||||
(bd: Knex.QueryBuilder<R, R>) => {
|
||||
const processedFilter = tableName
|
||||
? Object.fromEntries(
|
||||
Object.entries(filter)
|
||||
.filter(([key]) => !excludeKeys || !excludeKeys.includes(key as keyof R))
|
||||
.filter(([key]) => !excludeKeys || !excludeKeys.includes(key))
|
||||
.map(([key, value]) => [`${tableName}.${key}`, value])
|
||||
)
|
||||
: filter;
|
||||
|
@@ -1,6 +1,8 @@
|
||||
import { Knex } from "knex";
|
||||
import { Compare, Filter, parse } from "scim2-parse-filter";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
const appendParentToGroupingOperator = (parentPath: string, filter: Filter) => {
|
||||
if (filter.op !== "[]" && filter.op !== "and" && filter.op !== "or" && filter.op !== "not") {
|
||||
return { ...filter, attrPath: `${parentPath}.${(filter as Compare).attrPath}` };
|
||||
@@ -27,8 +29,12 @@ const processDynamicQuery = (
|
||||
const { scimFilterAst, query } = stack.pop()!;
|
||||
switch (scimFilterAst.op) {
|
||||
case "eq": {
|
||||
let sanitizedValue = scimFilterAst.compValue;
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.where(attrPath, scimFilterAst.compValue);
|
||||
if (attrPath === `${TableName.Users}.email` && typeof sanitizedValue === "string") {
|
||||
sanitizedValue = sanitizedValue.toLowerCase();
|
||||
}
|
||||
if (attrPath) void query.where(attrPath, sanitizedValue);
|
||||
break;
|
||||
}
|
||||
case "pr": {
|
||||
@@ -62,18 +68,30 @@ const processDynamicQuery = (
|
||||
break;
|
||||
}
|
||||
case "ew": {
|
||||
let sanitizedValue = scimFilterAst.compValue;
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereILike(attrPath, `%${scimFilterAst.compValue}`);
|
||||
if (attrPath === `${TableName.Users}.email` && typeof sanitizedValue === "string") {
|
||||
sanitizedValue = sanitizedValue.toLowerCase();
|
||||
}
|
||||
if (attrPath) void query.whereILike(attrPath, `%${sanitizedValue}`);
|
||||
break;
|
||||
}
|
||||
case "co": {
|
||||
let sanitizedValue = scimFilterAst.compValue;
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereILike(attrPath, `%${scimFilterAst.compValue}%`);
|
||||
if (attrPath === `${TableName.Users}.email` && typeof sanitizedValue === "string") {
|
||||
sanitizedValue = sanitizedValue.toLowerCase();
|
||||
}
|
||||
if (attrPath) void query.whereILike(attrPath, `%${sanitizedValue}%`);
|
||||
break;
|
||||
}
|
||||
case "ne": {
|
||||
let sanitizedValue = scimFilterAst.compValue;
|
||||
const attrPath = getAttributeField(scimFilterAst.attrPath);
|
||||
if (attrPath) void query.whereNot(attrPath, "=", scimFilterAst.compValue);
|
||||
if (attrPath === `${TableName.Users}.email` && typeof sanitizedValue === "string") {
|
||||
sanitizedValue = sanitizedValue.toLowerCase();
|
||||
}
|
||||
if (attrPath) void query.whereNot(attrPath, "=", sanitizedValue);
|
||||
break;
|
||||
}
|
||||
case "and": {
|
||||
|
@@ -1,3 +1,11 @@
|
||||
import RE2 from "re2";
|
||||
|
||||
export const DistinguishedNameRegex =
|
||||
// DN format, ie; CN=user,OU=users,DC=example,DC=com
|
||||
/^(?:(?:[a-zA-Z0-9]+=[^,+="<>#;\\\\]+)(?:(?:\\+[a-zA-Z0-9]+=[^,+="<>#;\\\\]+)*)(?:,(?:[a-zA-Z0-9]+=[^,+="<>#;\\\\]+)(?:(?:\\+[a-zA-Z0-9]+=[^,+="<>#;\\\\]+)*))*)?$/;
|
||||
new RE2(
|
||||
/^(?:(?:[a-zA-Z0-9]+=[^,+="<>#;\\\\]+)(?:(?:\\+[a-zA-Z0-9]+=[^,+="<>#;\\\\]+)*)(?:,(?:[a-zA-Z0-9]+=[^,+="<>#;\\\\]+)(?:(?:\\+[a-zA-Z0-9]+=[^,+="<>#;\\\\]+)*))*)?$/
|
||||
);
|
||||
|
||||
export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]{2,}$/);
|
||||
|
||||
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
|
||||
|
@@ -32,7 +32,6 @@ import { externalKmsServiceFactory } from "@app/ee/services/external-kms/externa
|
||||
import { gatewayDALFactory } from "@app/ee/services/gateway/gateway-dal";
|
||||
import { gatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { orgGatewayConfigDALFactory } from "@app/ee/services/gateway/org-gateway-config-dal";
|
||||
import { projectGatewayDALFactory } from "@app/ee/services/gateway/project-gateway-dal";
|
||||
import { githubOrgSyncDALFactory } from "@app/ee/services/github-org-sync/github-org-sync-dal";
|
||||
import { githubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service";
|
||||
import { groupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
@@ -439,7 +438,6 @@ export const registerRoutes = async (
|
||||
|
||||
const orgGatewayConfigDAL = orgGatewayConfigDALFactory(db);
|
||||
const gatewayDAL = gatewayDALFactory(db);
|
||||
const projectGatewayDAL = projectGatewayDALFactory(db);
|
||||
const secretReminderRecipientsDAL = secretReminderRecipientsDALFactory(db);
|
||||
const githubOrgSyncDAL = githubOrgSyncDALFactory(db);
|
||||
|
||||
@@ -627,7 +625,6 @@ export const registerRoutes = async (
|
||||
|
||||
const userService = userServiceFactory({
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
orgMembershipDAL,
|
||||
tokenService,
|
||||
permissionService,
|
||||
@@ -1422,12 +1419,24 @@ export const registerRoutes = async (
|
||||
identityUaDAL,
|
||||
licenseService
|
||||
});
|
||||
|
||||
const gatewayService = gatewayServiceFactory({
|
||||
permissionService,
|
||||
gatewayDAL,
|
||||
kmsService,
|
||||
licenseService,
|
||||
orgGatewayConfigDAL,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const identityKubernetesAuthService = identityKubernetesAuthServiceFactory({
|
||||
identityKubernetesAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
identityAccessTokenDAL,
|
||||
permissionService,
|
||||
licenseService,
|
||||
gatewayService,
|
||||
gatewayDAL,
|
||||
kmsService
|
||||
});
|
||||
const identityGcpAuthService = identityGcpAuthServiceFactory({
|
||||
@@ -1490,16 +1499,6 @@ export const registerRoutes = async (
|
||||
identityDAL
|
||||
});
|
||||
|
||||
const gatewayService = gatewayServiceFactory({
|
||||
permissionService,
|
||||
gatewayDAL,
|
||||
kmsService,
|
||||
licenseService,
|
||||
orgGatewayConfigDAL,
|
||||
keyStore,
|
||||
projectGatewayDAL
|
||||
});
|
||||
|
||||
const dynamicSecretProviders = buildDynamicSecretProviders({
|
||||
gatewayService
|
||||
});
|
||||
@@ -1521,7 +1520,7 @@ export const registerRoutes = async (
|
||||
permissionService,
|
||||
licenseService,
|
||||
kmsService,
|
||||
projectGatewayDAL,
|
||||
gatewayDAL,
|
||||
resourceMetadataDAL
|
||||
});
|
||||
|
||||
|
@@ -261,7 +261,8 @@ export const SanitizedProjectSchema = ProjectsSchema.pick({
|
||||
pitVersionLimit: true,
|
||||
kmsCertificateKeyId: true,
|
||||
auditLogsRetentionDays: true,
|
||||
hasDeleteProtection: true
|
||||
hasDeleteProtection: true,
|
||||
secretSharing: true
|
||||
});
|
||||
|
||||
export const SanitizedTagSchema = SecretTagsSchema.pick({
|
||||
|
@@ -131,8 +131,8 @@ export const registerCertRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.object({
|
||||
certificate: z.string().trim().describe(CERTIFICATES.GET_CERT.certificate),
|
||||
certificateChain: z.string().trim().nullish().describe(CERTIFICATES.GET_CERT.certificateChain),
|
||||
privateKey: z.string().trim().describe(CERTIFICATES.GET_CERT.privateKey),
|
||||
certificateChain: z.string().trim().nullable().describe(CERTIFICATES.GET_CERT.certificateChain),
|
||||
privateKey: z.string().trim().nullable().describe(CERTIFICATES.GET_CERT.privateKey),
|
||||
serialNumber: z.string().trim().describe(CERTIFICATES.GET_CERT.serialNumberRes)
|
||||
})
|
||||
}
|
||||
@@ -518,7 +518,7 @@ export const registerCertRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.object({
|
||||
certificate: z.string().trim().describe(CERTIFICATES.GET_CERT.certificate),
|
||||
certificateChain: z.string().trim().nullish().describe(CERTIFICATES.GET_CERT.certificateChain),
|
||||
certificateChain: z.string().trim().nullable().describe(CERTIFICATES.GET_CERT.certificateChain),
|
||||
serialNumber: z.string().trim().describe(CERTIFICATES.GET_CERT.serialNumberRes)
|
||||
})
|
||||
}
|
||||
|
@@ -3,6 +3,7 @@ import { z } from "zod";
|
||||
import { IdentityKubernetesAuthsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ApiDocsTags, KUBERNETES_AUTH } from "@app/lib/api-docs";
|
||||
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@@ -21,7 +22,8 @@ const IdentityKubernetesAuthResponseSchema = IdentityKubernetesAuthsSchema.pick(
|
||||
kubernetesHost: true,
|
||||
allowedNamespaces: true,
|
||||
allowedNames: true,
|
||||
allowedAudience: true
|
||||
allowedAudience: true,
|
||||
gatewayId: true
|
||||
}).extend({
|
||||
caCert: z.string(),
|
||||
tokenReviewerJwt: z.string().optional().nullable()
|
||||
@@ -100,12 +102,32 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
kubernetesHost: z.string().trim().min(1).describe(KUBERNETES_AUTH.ATTACH.kubernetesHost),
|
||||
kubernetesHost: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.describe(KUBERNETES_AUTH.ATTACH.kubernetesHost)
|
||||
.refine(
|
||||
(val) =>
|
||||
characterValidator([
|
||||
CharacterType.Alphabets,
|
||||
CharacterType.Numbers,
|
||||
CharacterType.Colon,
|
||||
CharacterType.Period,
|
||||
CharacterType.ForwardSlash,
|
||||
CharacterType.Hyphen
|
||||
])(val),
|
||||
{
|
||||
message:
|
||||
"Kubernetes host must only contain alphabets, numbers, colons, periods, hyphen, and forward slashes."
|
||||
}
|
||||
),
|
||||
caCert: z.string().trim().default("").describe(KUBERNETES_AUTH.ATTACH.caCert),
|
||||
tokenReviewerJwt: z.string().trim().optional().describe(KUBERNETES_AUTH.ATTACH.tokenReviewerJwt),
|
||||
allowedNamespaces: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNamespaces), // TODO: validation
|
||||
allowedNames: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNames),
|
||||
allowedAudience: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedAudience),
|
||||
gatewayId: z.string().uuid().optional().nullable().describe(KUBERNETES_AUTH.ATTACH.gatewayId),
|
||||
accessTokenTrustedIps: z
|
||||
.object({
|
||||
ipAddress: z.string().trim()
|
||||
@@ -199,12 +221,36 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
kubernetesHost: z.string().trim().min(1).optional().describe(KUBERNETES_AUTH.UPDATE.kubernetesHost),
|
||||
kubernetesHost: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.optional()
|
||||
.describe(KUBERNETES_AUTH.UPDATE.kubernetesHost)
|
||||
.refine(
|
||||
(val) => {
|
||||
if (!val) return true;
|
||||
|
||||
return characterValidator([
|
||||
CharacterType.Alphabets,
|
||||
CharacterType.Numbers,
|
||||
CharacterType.Colon,
|
||||
CharacterType.Period,
|
||||
CharacterType.ForwardSlash,
|
||||
CharacterType.Hyphen
|
||||
])(val);
|
||||
},
|
||||
{
|
||||
message:
|
||||
"Kubernetes host must only contain alphabets, numbers, colons, periods, hyphen, and forward slashes."
|
||||
}
|
||||
),
|
||||
caCert: z.string().trim().optional().describe(KUBERNETES_AUTH.UPDATE.caCert),
|
||||
tokenReviewerJwt: z.string().trim().nullable().optional().describe(KUBERNETES_AUTH.UPDATE.tokenReviewerJwt),
|
||||
allowedNamespaces: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNamespaces), // TODO: validation
|
||||
allowedNames: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNames),
|
||||
allowedAudience: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedAudience),
|
||||
gatewayId: z.string().uuid().optional().nullable().describe(KUBERNETES_AUTH.UPDATE.gatewayId),
|
||||
accessTokenTrustedIps: z
|
||||
.object({
|
||||
ipAddress: z.string().trim()
|
||||
|
@@ -16,7 +16,12 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => {
|
||||
method: "POST",
|
||||
schema: {
|
||||
body: z.object({
|
||||
inviteeEmails: z.array(z.string().trim().email()),
|
||||
inviteeEmails: z
|
||||
.string()
|
||||
.trim()
|
||||
.email()
|
||||
.array()
|
||||
.refine((val) => val.every((el) => el === el.toLowerCase()), "Email must be lowercase"),
|
||||
organizationId: z.string().trim(),
|
||||
projects: z
|
||||
.object({
|
||||
@@ -115,7 +120,11 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
email: z.string().trim().email(),
|
||||
email: z
|
||||
.string()
|
||||
.trim()
|
||||
.email()
|
||||
.refine((val) => val === val.toLowerCase(), "Email must be lowercase"),
|
||||
organizationId: z.string().trim(),
|
||||
code: z.string().trim()
|
||||
}),
|
||||
|
@@ -275,6 +275,23 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
{ message: "Duration value must be at least 1" }
|
||||
)
|
||||
.optional(),
|
||||
secretsProductEnabled: z.boolean().optional(),
|
||||
pkiProductEnabled: z.boolean().optional(),
|
||||
kmsProductEnabled: z.boolean().optional(),
|
||||
sshProductEnabled: z.boolean().optional(),
|
||||
scannerProductEnabled: z.boolean().optional(),
|
||||
shareSecretsProductEnabled: z.boolean().optional(),
|
||||
maxSharedSecretLifetime: z
|
||||
.number()
|
||||
.min(300, "Max Shared Secret lifetime cannot be under 5 minutes")
|
||||
.max(2592000, "Max Shared Secret lifetime cannot exceed 30 days")
|
||||
.optional(),
|
||||
maxSharedSecretViewLimit: z
|
||||
.number()
|
||||
.min(1, "Max Shared Secret view count cannot be lower than 1")
|
||||
.max(1000, "Max Shared Secret view count cannot exceed 1000")
|
||||
.nullable()
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
|
@@ -346,7 +346,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
"Project slug can only contain lowercase letters and numbers, with optional single hyphens (-) or underscores (_) between words. Cannot start or end with a hyphen or underscore."
|
||||
})
|
||||
.optional()
|
||||
.describe(PROJECTS.UPDATE.slug)
|
||||
.describe(PROJECTS.UPDATE.slug),
|
||||
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -366,7 +367,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
description: req.body.description,
|
||||
autoCapitalization: req.body.autoCapitalization,
|
||||
hasDeleteProtection: req.body.hasDeleteProtection,
|
||||
slug: req.body.slug
|
||||
slug: req.body.slug,
|
||||
secretSharing: req.body.secretSharing
|
||||
},
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
|
@@ -62,7 +62,9 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
}),
|
||||
body: z.object({
|
||||
hashedHex: z.string().min(1).optional(),
|
||||
password: z.string().optional()
|
||||
password: z.string().optional(),
|
||||
email: z.string().optional(),
|
||||
hash: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -88,7 +90,9 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
sharedSecretId: req.params.id,
|
||||
hashedHex: req.body.hashedHex,
|
||||
password: req.body.password,
|
||||
orgId: req.permission?.orgId
|
||||
orgId: req.permission?.orgId,
|
||||
email: req.body.email,
|
||||
hash: req.body.hash
|
||||
});
|
||||
|
||||
if (sharedSecret.secret?.orgId) {
|
||||
@@ -151,7 +155,8 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
secretValue: z.string(),
|
||||
expiresAt: z.string(),
|
||||
expiresAfterViews: z.number().min(1).optional(),
|
||||
accessType: z.nativeEnum(SecretSharingAccessType).default(SecretSharingAccessType.Organization)
|
||||
accessType: z.nativeEnum(SecretSharingAccessType).default(SecretSharingAccessType.Organization),
|
||||
emails: z.string().email().array().max(100).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@@ -46,6 +46,54 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/duplicate-accounts",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
users: UsersSchema.extend({
|
||||
isMyAccount: z.boolean(),
|
||||
organizations: z.object({ name: z.string(), slug: z.string() }).array()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT], { requireOrg: false }),
|
||||
handler: async (req) => {
|
||||
if (req.auth.authMode === AuthMode.JWT && req.auth.user.email) {
|
||||
const users = await server.services.user.getAllMyAccounts(req.auth.user.email, req.permission.id);
|
||||
return { users };
|
||||
}
|
||||
return { users: [] };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/remove-duplicate-accounts",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT], { requireOrg: false }),
|
||||
handler: async (req) => {
|
||||
if (req.auth.authMode === AuthMode.JWT && req.auth.user.email) {
|
||||
await server.services.user.removeMyDuplicateAccounts(req.auth.user.email, req.permission.id);
|
||||
}
|
||||
return { message: "Removed all duplicate accounts" };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/private-key",
|
||||
|
@@ -27,8 +27,19 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
projectId: z.string().describe(PROJECT_USERS.INVITE_MEMBER.projectId)
|
||||
}),
|
||||
body: z.object({
|
||||
emails: z.string().email().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.emails),
|
||||
usernames: z.string().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.usernames),
|
||||
emails: z
|
||||
.string()
|
||||
.email()
|
||||
.array()
|
||||
.default([])
|
||||
.describe(PROJECT_USERS.INVITE_MEMBER.emails)
|
||||
.refine((val) => val.every((el) => el === el.toLowerCase()), "Email must be lowercase"),
|
||||
usernames: z
|
||||
.string()
|
||||
.array()
|
||||
.default([])
|
||||
.describe(PROJECT_USERS.INVITE_MEMBER.usernames)
|
||||
.refine((val) => val.every((el) => el === el.toLowerCase()), "Username must be lowercase"),
|
||||
roleSlugs: z.string().array().min(1).optional().describe(PROJECT_USERS.INVITE_MEMBER.roleSlugs)
|
||||
}),
|
||||
response: {
|
||||
@@ -92,8 +103,19 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
projectId: z.string().describe(PROJECT_USERS.REMOVE_MEMBER.projectId)
|
||||
}),
|
||||
body: z.object({
|
||||
emails: z.string().email().array().default([]).describe(PROJECT_USERS.REMOVE_MEMBER.emails),
|
||||
usernames: z.string().array().default([]).describe(PROJECT_USERS.REMOVE_MEMBER.usernames)
|
||||
emails: z
|
||||
.string()
|
||||
.email()
|
||||
.array()
|
||||
.default([])
|
||||
.describe(PROJECT_USERS.REMOVE_MEMBER.emails)
|
||||
.refine((val) => val.every((el) => el === el.toLowerCase()), "Email must be lowercase"),
|
||||
usernames: z
|
||||
.string()
|
||||
.array()
|
||||
.default([])
|
||||
.describe(PROJECT_USERS.REMOVE_MEMBER.usernames)
|
||||
.refine((val) => val.every((el) => el === el.toLowerCase()), "Username must be lowercase")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@@ -1,8 +1,7 @@
|
||||
import RE2 from "re2";
|
||||
import { z } from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { DistinguishedNameRegex } from "@app/lib/regex";
|
||||
import { DistinguishedNameRegex, LdapUrlRegex, UserPrincipalNameRegex } from "@app/lib/regex";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
@@ -14,17 +13,14 @@ import { LdapConnectionMethod, LdapProvider } from "./ldap-connection-enums";
|
||||
|
||||
export const LdapConnectionSimpleBindCredentialsSchema = z.object({
|
||||
provider: z.nativeEnum(LdapProvider).describe(AppConnections.CREDENTIALS.LDAP.provider),
|
||||
url: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "URL required")
|
||||
.regex(new RE2(/^ldaps?:\/\//))
|
||||
.describe(AppConnections.CREDENTIALS.LDAP.url),
|
||||
url: z.string().trim().min(1, "URL required").regex(LdapUrlRegex).describe(AppConnections.CREDENTIALS.LDAP.url),
|
||||
dn: z
|
||||
.string()
|
||||
.trim()
|
||||
.regex(new RE2(DistinguishedNameRegex), "Invalid DN format, ie; CN=user,OU=users,DC=example,DC=com")
|
||||
.min(1, "Distinguished Name (DN) required")
|
||||
.min(1, "DN/UPN required")
|
||||
.refine((value) => DistinguishedNameRegex.test(value) || UserPrincipalNameRegex.test(value), {
|
||||
message: "Invalid DN/UPN format"
|
||||
})
|
||||
.describe(AppConnections.CREDENTIALS.LDAP.dn),
|
||||
password: z.string().trim().min(1, "Password required").describe(AppConnections.CREDENTIALS.LDAP.password),
|
||||
sslRejectUnauthorized: z.boolean().optional().describe(AppConnections.CREDENTIALS.LDAP.sslRejectUnauthorized),
|
||||
|
@@ -199,9 +199,12 @@ export const authLoginServiceFactory = ({
|
||||
providerAuthToken,
|
||||
clientPublicKey
|
||||
}: TLoginGenServerPublicKeyDTO) => {
|
||||
const userEnc = await userDAL.findUserEncKeyByUsername({
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const usersByUsername = await userDAL.findUserEncKeyByUsername({
|
||||
username: email
|
||||
});
|
||||
const userEnc =
|
||||
usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === email) : usersByUsername?.[0];
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
@@ -250,9 +253,12 @@ export const authLoginServiceFactory = ({
|
||||
}: TLoginClientProofDTO) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUsername({
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const usersByUsername = await userDAL.findUserEncKeyByUsername({
|
||||
username: email
|
||||
});
|
||||
const userEnc =
|
||||
usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === email) : usersByUsername?.[0];
|
||||
if (!userEnc) throw new Error("Failed to find user");
|
||||
const user = await userDAL.findById(userEnc.userId);
|
||||
const cfg = getConfig();
|
||||
@@ -649,10 +655,12 @@ export const authLoginServiceFactory = ({
|
||||
* OAuth2 login for google,github, and other oauth2 provider
|
||||
* */
|
||||
const oauth2Login = async ({ email, firstName, lastName, authMethod, callbackPort }: TOauthLoginDTO) => {
|
||||
let user = await userDAL.findUserByUsername(email);
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const usersByUsername = await userDAL.findUserByUsername(email);
|
||||
let user = usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === email) : usersByUsername?.[0];
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods) {
|
||||
if (serverCfg.enabledLoginMethods && user) {
|
||||
switch (authMethod) {
|
||||
case AuthMethod.GITHUB: {
|
||||
if (!serverCfg.enabledLoginMethods.includes(LoginMethod.GITHUB)) {
|
||||
@@ -715,8 +723,8 @@ export const authLoginServiceFactory = ({
|
||||
}
|
||||
|
||||
user = await userDAL.create({
|
||||
username: email,
|
||||
email,
|
||||
username: email.trim().toLowerCase(),
|
||||
email: email.trim().toLowerCase(),
|
||||
isEmailVerified: true,
|
||||
firstName,
|
||||
lastName,
|
||||
@@ -814,11 +822,14 @@ export const authLoginServiceFactory = ({
|
||||
? decodedProviderToken.orgId
|
||||
: undefined;
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUsername({
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const usersByUsername = await userDAL.findUserEncKeyByUsername({
|
||||
username: email
|
||||
});
|
||||
if (!userEnc) throw new BadRequestError({ message: "Invalid token" });
|
||||
if (!userEnc.serverEncryptedPrivateKey)
|
||||
const userEnc =
|
||||
usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === email) : usersByUsername?.[0];
|
||||
|
||||
if (!userEnc?.serverEncryptedPrivateKey)
|
||||
throw new BadRequestError({ message: "Key handoff incomplete. Please try logging in again." });
|
||||
|
||||
const token = await generateUserTokens({
|
||||
|
@@ -121,7 +121,10 @@ export const authPaswordServiceFactory = ({
|
||||
*/
|
||||
const sendPasswordResetEmail = async (email: string) => {
|
||||
const sendEmail = async () => {
|
||||
const user = await userDAL.findUserByUsername(email);
|
||||
const users = await userDAL.findUserByUsername(email);
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const user = users?.length > 1 ? users.find((el) => el.username === email) : users?.[0];
|
||||
if (!user) throw new BadRequestError({ message: "Failed to find user data" });
|
||||
|
||||
if (user && user.isAccepted) {
|
||||
const cfg = getConfig();
|
||||
@@ -152,7 +155,10 @@ export const authPaswordServiceFactory = ({
|
||||
* */
|
||||
const verifyPasswordResetEmail = async (email: string, code: string) => {
|
||||
const cfg = getConfig();
|
||||
const user = await userDAL.findUserByUsername(email);
|
||||
const users = await userDAL.findUserByUsername(email);
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const user = users?.length > 1 ? users.find((el) => el.username === email) : users?.[0];
|
||||
if (!user) throw new BadRequestError({ message: "Failed to find user data" });
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
|
||||
@@ -189,16 +195,15 @@ export const authPaswordServiceFactory = ({
|
||||
throw new BadRequestError({ message: `User encryption key not found for user with ID '${userId}'` });
|
||||
}
|
||||
|
||||
if (!user.hashedPassword) {
|
||||
throw new BadRequestError({ message: "Unable to reset password, no password is set" });
|
||||
}
|
||||
|
||||
if (!user.authMethods?.includes(AuthMethod.EMAIL)) {
|
||||
throw new BadRequestError({ message: "Unable to reset password, no email authentication method is configured" });
|
||||
}
|
||||
|
||||
// we check the old password if the user is resetting their password while logged in
|
||||
if (type === ResetPasswordV2Type.LoggedInReset) {
|
||||
if (!user.hashedPassword) {
|
||||
throw new BadRequestError({ message: "Unable to change password, no password is set" });
|
||||
}
|
||||
if (!oldPassword) {
|
||||
throw new BadRequestError({ message: "Current password is required." });
|
||||
}
|
||||
|
@@ -73,18 +73,27 @@ export const authSignupServiceFactory = ({
|
||||
}: TAuthSignupDep) => {
|
||||
// first step of signup. create user and send email
|
||||
const beginEmailSignupProcess = async (email: string) => {
|
||||
const isEmailInvalid = await isDisposableEmail(email);
|
||||
const sanitizedEmail = email.trim().toLowerCase();
|
||||
const isEmailInvalid = await isDisposableEmail(sanitizedEmail);
|
||||
if (isEmailInvalid) {
|
||||
throw new Error("Provided a disposable email");
|
||||
}
|
||||
|
||||
let user = await userDAL.findUserByUsername(email);
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const usersByUsername = await userDAL.findUserByUsername(sanitizedEmail);
|
||||
let user =
|
||||
usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === sanitizedEmail) : usersByUsername?.[0];
|
||||
if (user && user.isAccepted) {
|
||||
// TODO(akhilmhdh-pg): copy as old one. this needs to be changed due to security issues
|
||||
throw new Error("Failed to send verification code for complete account");
|
||||
throw new BadRequestError({ message: "Failed to send verification code for complete account" });
|
||||
}
|
||||
if (!user) {
|
||||
user = await userDAL.create({ authMethods: [AuthMethod.EMAIL], username: email, email, isGhost: false });
|
||||
user = await userDAL.create({
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
username: sanitizedEmail,
|
||||
email: sanitizedEmail,
|
||||
isGhost: false
|
||||
});
|
||||
}
|
||||
if (!user) throw new Error("Failed to create user");
|
||||
|
||||
@@ -96,7 +105,7 @@ export const authSignupServiceFactory = ({
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.SignupEmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email as string],
|
||||
recipients: [sanitizedEmail],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
@@ -104,11 +113,15 @@ export const authSignupServiceFactory = ({
|
||||
};
|
||||
|
||||
const verifyEmailSignup = async (email: string, code: string) => {
|
||||
const user = await userDAL.findUserByUsername(email);
|
||||
const sanitizedEmail = email.trim().toLowerCase();
|
||||
const usersByUsername = await userDAL.findUserByUsername(sanitizedEmail);
|
||||
const user =
|
||||
usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === sanitizedEmail) : usersByUsername?.[0];
|
||||
if (!user || (user && user.isAccepted)) {
|
||||
// TODO(akhilmhdh): copy as old one. this needs to be changed due to security issues
|
||||
throw new Error("Failed to send verification code for complete account");
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
await tokenService.validateTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_CONFIRMATION,
|
||||
@@ -153,12 +166,15 @@ export const authSignupServiceFactory = ({
|
||||
authorization,
|
||||
useDefaultOrg
|
||||
}: TCompleteAccountSignupDTO) => {
|
||||
const sanitizedEmail = email.trim().toLowerCase();
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
const user = await userDAL.findOne({ username: email });
|
||||
const usersByUsername = await userDAL.findUserByUsername(sanitizedEmail);
|
||||
const user =
|
||||
usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === sanitizedEmail) : usersByUsername?.[0];
|
||||
if (!user || (user && user.isAccepted)) {
|
||||
throw new Error("Failed to complete account for complete user");
|
||||
throw new BadRequestError({ message: "Failed to complete account for complete user" });
|
||||
}
|
||||
|
||||
let organizationId: string | null = null;
|
||||
@@ -315,7 +331,7 @@ export const authSignupServiceFactory = ({
|
||||
}
|
||||
|
||||
const updatedMembersips = await orgDAL.updateMembership(
|
||||
{ inviteEmail: email, status: OrgMembershipStatus.Invited },
|
||||
{ inviteEmail: sanitizedEmail, status: OrgMembershipStatus.Invited },
|
||||
{ userId: user.id, status: OrgMembershipStatus.Accepted }
|
||||
);
|
||||
const uniqueOrgId = [...new Set(updatedMembersips.map(({ orgId }) => orgId))];
|
||||
@@ -382,9 +398,9 @@ export const authSignupServiceFactory = ({
|
||||
* User signup flow when they are invited to join the org
|
||||
* */
|
||||
const completeAccountInvite = async ({
|
||||
email,
|
||||
ip,
|
||||
salt,
|
||||
email,
|
||||
password,
|
||||
verifier,
|
||||
firstName,
|
||||
@@ -399,7 +415,10 @@ export const authSignupServiceFactory = ({
|
||||
encryptedPrivateKeyTag,
|
||||
authorization
|
||||
}: TCompleteAccountInviteDTO) => {
|
||||
const user = await userDAL.findUserByUsername(email);
|
||||
const sanitizedEmail = email.trim().toLowerCase();
|
||||
const usersByUsername = await userDAL.findUserByUsername(sanitizedEmail);
|
||||
const user =
|
||||
usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === sanitizedEmail) : usersByUsername?.[0];
|
||||
if (!user || (user && user.isAccepted)) {
|
||||
throw new Error("Failed to complete account for complete user");
|
||||
}
|
||||
@@ -407,7 +426,7 @@ export const authSignupServiceFactory = ({
|
||||
validateSignUpAuthorization(authorization, user.id);
|
||||
|
||||
const [orgMembership] = await orgDAL.findMembership({
|
||||
inviteEmail: email,
|
||||
inviteEmail: sanitizedEmail,
|
||||
status: OrgMembershipStatus.Invited
|
||||
});
|
||||
if (!orgMembership)
|
||||
@@ -454,7 +473,7 @@ export const authSignupServiceFactory = ({
|
||||
const serverGeneratedPrivateKey = await getUserPrivateKey(serverGeneratedPassword, {
|
||||
...systemGeneratedUserEncryptionKey
|
||||
});
|
||||
const encKeys = await generateUserSrpKeys(email, password, {
|
||||
const encKeys = await generateUserSrpKeys(sanitizedEmail, password, {
|
||||
publicKey: systemGeneratedUserEncryptionKey.publicKey,
|
||||
privateKey: serverGeneratedPrivateKey
|
||||
});
|
||||
@@ -505,7 +524,7 @@ export const authSignupServiceFactory = ({
|
||||
}
|
||||
|
||||
const updatedMembersips = await orgDAL.updateMembership(
|
||||
{ inviteEmail: email, status: OrgMembershipStatus.Invited },
|
||||
{ inviteEmail: sanitizedEmail, status: OrgMembershipStatus.Invited },
|
||||
{ userId: us.id, status: OrgMembershipStatus.Accepted },
|
||||
tx
|
||||
);
|
||||
|
@@ -105,7 +105,7 @@ export const buildCertificateChain = async ({
|
||||
kmsService,
|
||||
kmsId
|
||||
}: TBuildCertificateChainDTO) => {
|
||||
if (!encryptedCertificateChain && (!caCert || !caCertChain)) {
|
||||
if (!encryptedCertificateChain && !caCert) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@@ -29,6 +29,7 @@ import {
|
||||
TGetCertPrivateKeyDTO,
|
||||
TRevokeCertDTO
|
||||
} from "./certificate-types";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
|
||||
type TCertificateServiceFactoryDep = {
|
||||
certificateDAL: Pick<TCertificateDALFactory, "findOne" | "deleteById" | "update" | "find">;
|
||||
@@ -337,18 +338,27 @@ export const certificateServiceFactory = ({
|
||||
encryptedCertificateChain: certBody.encryptedCertificateChain || undefined
|
||||
});
|
||||
|
||||
const { certPrivateKey } = await getCertificateCredentials({
|
||||
certId: cert.id,
|
||||
projectId: ca.projectId,
|
||||
certificateSecretDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
let privateKey: string | null = null;
|
||||
try {
|
||||
const { certPrivateKey } = await getCertificateCredentials({
|
||||
certId: cert.id,
|
||||
projectId: ca.projectId,
|
||||
certificateSecretDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
privateKey = certPrivateKey;
|
||||
} catch (e) {
|
||||
// Skip NotFound errors but throw all others
|
||||
if (!(e instanceof NotFoundError)) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
certificate,
|
||||
certificateChain,
|
||||
privateKey: certPrivateKey,
|
||||
privateKey,
|
||||
serialNumber,
|
||||
cert,
|
||||
ca
|
||||
|
@@ -4,8 +4,14 @@ import https from "https";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { IdentityAuthMethod, TIdentityKubernetesAuthsUpdate } from "@app/db/schemas";
|
||||
import { TGatewayDALFactory } from "@app/ee/services/gateway/gateway-dal";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionIdentityActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import {
|
||||
OrgPermissionGatewayActions,
|
||||
OrgPermissionIdentityActions,
|
||||
OrgPermissionSubjects
|
||||
} from "@app/ee/services/permission/org-permission";
|
||||
import {
|
||||
constructPermissionErrorMessage,
|
||||
validatePrivilegeChangeOperation
|
||||
@@ -13,6 +19,7 @@ import {
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { withGatewayProxy } from "@app/lib/gateway";
|
||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
@@ -43,6 +50,8 @@ type TIdentityKubernetesAuthServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
gatewayService: TGatewayServiceFactory;
|
||||
gatewayDAL: Pick<TGatewayDALFactory, "find">;
|
||||
};
|
||||
|
||||
export type TIdentityKubernetesAuthServiceFactory = ReturnType<typeof identityKubernetesAuthServiceFactory>;
|
||||
@@ -53,8 +62,45 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
identityAccessTokenDAL,
|
||||
permissionService,
|
||||
licenseService,
|
||||
gatewayService,
|
||||
gatewayDAL,
|
||||
kmsService
|
||||
}: TIdentityKubernetesAuthServiceFactoryDep) => {
|
||||
const $gatewayProxyWrapper = async <T>(
|
||||
inputs: {
|
||||
gatewayId: string;
|
||||
targetHost: string;
|
||||
targetPort: number;
|
||||
},
|
||||
gatewayCallback: (host: string, port: number) => Promise<T>
|
||||
): Promise<T> => {
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
|
||||
const callbackResult = await withGatewayProxy(
|
||||
async (port) => {
|
||||
// Needs to be https protocol or the kubernetes API server will fail with "Client sent an HTTP request to an HTTPS server"
|
||||
const res = await gatewayCallback("https://localhost", port);
|
||||
return res;
|
||||
},
|
||||
{
|
||||
targetHost: inputs.targetHost,
|
||||
targetPort: inputs.targetPort,
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return callbackResult;
|
||||
};
|
||||
|
||||
const login = async ({ identityId, jwt: serviceAccountJwt }: TLoginKubernetesAuthDTO) => {
|
||||
const identityKubernetesAuth = await identityKubernetesAuthDAL.findOne({ identityId });
|
||||
if (!identityKubernetesAuth) {
|
||||
@@ -92,46 +138,65 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
tokenReviewerJwt = serviceAccountJwt;
|
||||
}
|
||||
|
||||
const { data } = await axios
|
||||
.post<TCreateTokenReviewResponse>(
|
||||
`${identityKubernetesAuth.kubernetesHost}/apis/authentication.k8s.io/v1/tokenreviews`,
|
||||
{
|
||||
apiVersion: "authentication.k8s.io/v1",
|
||||
kind: "TokenReview",
|
||||
spec: {
|
||||
token: serviceAccountJwt,
|
||||
...(identityKubernetesAuth.allowedAudience ? { audiences: [identityKubernetesAuth.allowedAudience] } : {})
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${tokenReviewerJwt}`
|
||||
},
|
||||
signal: AbortSignal.timeout(10000),
|
||||
timeout: 10000,
|
||||
// if ca cert, rejectUnauthorized: true
|
||||
httpsAgent: new https.Agent({
|
||||
ca: caCert,
|
||||
rejectUnauthorized: !!caCert
|
||||
})
|
||||
}
|
||||
)
|
||||
.catch((err) => {
|
||||
if (err instanceof AxiosError) {
|
||||
if (err.response) {
|
||||
const { message } = err?.response?.data as unknown as { message?: string };
|
||||
const tokenReviewCallback = async (host: string = identityKubernetesAuth.kubernetesHost, port?: number) => {
|
||||
const baseUrl = port ? `${host}:${port}` : host;
|
||||
|
||||
if (message) {
|
||||
throw new UnauthorizedError({
|
||||
message,
|
||||
name: "KubernetesTokenReviewRequestError"
|
||||
});
|
||||
const res = await axios
|
||||
.post<TCreateTokenReviewResponse>(
|
||||
`${baseUrl}/apis/authentication.k8s.io/v1/tokenreviews`,
|
||||
{
|
||||
apiVersion: "authentication.k8s.io/v1",
|
||||
kind: "TokenReview",
|
||||
spec: {
|
||||
token: serviceAccountJwt,
|
||||
...(identityKubernetesAuth.allowedAudience ? { audiences: [identityKubernetesAuth.allowedAudience] } : {})
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${tokenReviewerJwt}`
|
||||
},
|
||||
signal: AbortSignal.timeout(10000),
|
||||
timeout: 10000,
|
||||
// if ca cert, rejectUnauthorized: true
|
||||
httpsAgent: new https.Agent({
|
||||
ca: caCert,
|
||||
rejectUnauthorized: !!caCert
|
||||
})
|
||||
}
|
||||
)
|
||||
.catch((err) => {
|
||||
if (err instanceof AxiosError) {
|
||||
if (err.response) {
|
||||
const { message } = err?.response?.data as unknown as { message?: string };
|
||||
|
||||
if (message) {
|
||||
throw new UnauthorizedError({
|
||||
message,
|
||||
name: "KubernetesTokenReviewRequestError"
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
throw err;
|
||||
});
|
||||
|
||||
return res.data;
|
||||
};
|
||||
|
||||
const [k8sHost, k8sPort] = identityKubernetesAuth.kubernetesHost.split(":");
|
||||
|
||||
const data = identityKubernetesAuth.gatewayId
|
||||
? await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: identityKubernetesAuth.gatewayId,
|
||||
targetHost: k8sHost,
|
||||
targetPort: k8sPort ? Number(k8sPort) : 443
|
||||
},
|
||||
tokenReviewCallback
|
||||
)
|
||||
: await tokenReviewCallback();
|
||||
|
||||
if ("error" in data.status)
|
||||
throw new UnauthorizedError({ message: data.status.error, name: "KubernetesTokenReviewError" });
|
||||
@@ -222,6 +287,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
|
||||
const attachKubernetesAuth = async ({
|
||||
identityId,
|
||||
gatewayId,
|
||||
kubernetesHost,
|
||||
caCert,
|
||||
tokenReviewerJwt,
|
||||
@@ -280,6 +346,27 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
return extractIPDetails(accessTokenTrustedIp.ipAddress);
|
||||
});
|
||||
|
||||
if (gatewayId) {
|
||||
const [gateway] = await gatewayDAL.find({ id: gatewayId, orgId: identityMembershipOrg.orgId });
|
||||
if (!gateway) {
|
||||
throw new NotFoundError({
|
||||
message: `Gateway with ID ${gatewayId} not found`
|
||||
});
|
||||
}
|
||||
|
||||
const { permission: orgPermission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(orgPermission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.AttachGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
}
|
||||
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: identityMembershipOrg.orgId
|
||||
@@ -296,6 +383,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
accessTokenMaxTTL,
|
||||
accessTokenTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
gatewayId,
|
||||
accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps),
|
||||
encryptedKubernetesTokenReviewerJwt: tokenReviewerJwt
|
||||
? encryptor({ plainText: Buffer.from(tokenReviewerJwt) }).cipherTextBlob
|
||||
@@ -318,6 +406,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
allowedNamespaces,
|
||||
allowedNames,
|
||||
allowedAudience,
|
||||
gatewayId,
|
||||
accessTokenTTL,
|
||||
accessTokenMaxTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
@@ -373,11 +462,33 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
return extractIPDetails(accessTokenTrustedIp.ipAddress);
|
||||
});
|
||||
|
||||
if (gatewayId) {
|
||||
const [gateway] = await gatewayDAL.find({ id: gatewayId, orgId: identityMembershipOrg.orgId });
|
||||
if (!gateway) {
|
||||
throw new NotFoundError({
|
||||
message: `Gateway with ID ${gatewayId} not found`
|
||||
});
|
||||
}
|
||||
|
||||
const { permission: orgPermission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(orgPermission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.AttachGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
}
|
||||
|
||||
const updateQuery: TIdentityKubernetesAuthsUpdate = {
|
||||
kubernetesHost,
|
||||
allowedNamespaces,
|
||||
allowedNames,
|
||||
allowedAudience,
|
||||
gatewayId,
|
||||
accessTokenMaxTTL,
|
||||
accessTokenTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
|
@@ -13,6 +13,7 @@ export type TAttachKubernetesAuthDTO = {
|
||||
allowedNamespaces: string;
|
||||
allowedNames: string;
|
||||
allowedAudience: string;
|
||||
gatewayId?: string | null;
|
||||
accessTokenTTL: number;
|
||||
accessTokenMaxTTL: number;
|
||||
accessTokenNumUsesLimit: number;
|
||||
@@ -28,6 +29,7 @@ export type TUpdateKubernetesAuthDTO = {
|
||||
allowedNamespaces?: string;
|
||||
allowedNames?: string;
|
||||
allowedAudience?: string;
|
||||
gatewayId?: string | null;
|
||||
accessTokenTTL?: number;
|
||||
accessTokenMaxTTL?: number;
|
||||
accessTokenNumUsesLimit?: number;
|
||||
|
@@ -206,7 +206,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.count("*")
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.where({ isGhost: false })
|
||||
.where({ isGhost: false, [`${TableName.OrgMembership}.isActive` as "isActive"]: true })
|
||||
.first();
|
||||
|
||||
return parseInt((count as unknown as CountResult).count || "0", 10);
|
||||
|
@@ -18,5 +18,13 @@ export const sanitizedOrganizationSchema = OrganizationsSchema.pick({
|
||||
privilegeUpgradeInitiatedByUsername: true,
|
||||
privilegeUpgradeInitiatedAt: true,
|
||||
bypassOrgAuthEnabled: true,
|
||||
userTokenExpiration: true
|
||||
userTokenExpiration: true,
|
||||
secretsProductEnabled: true,
|
||||
pkiProductEnabled: true,
|
||||
kmsProductEnabled: true,
|
||||
sshProductEnabled: true,
|
||||
scannerProductEnabled: true,
|
||||
shareSecretsProductEnabled: true,
|
||||
maxSharedSecretLifetime: true,
|
||||
maxSharedSecretViewLimit: true
|
||||
});
|
||||
|
@@ -355,7 +355,15 @@ export const orgServiceFactory = ({
|
||||
selectedMfaMethod,
|
||||
allowSecretSharingOutsideOrganization,
|
||||
bypassOrgAuthEnabled,
|
||||
userTokenExpiration
|
||||
userTokenExpiration,
|
||||
secretsProductEnabled,
|
||||
pkiProductEnabled,
|
||||
kmsProductEnabled,
|
||||
sshProductEnabled,
|
||||
scannerProductEnabled,
|
||||
shareSecretsProductEnabled,
|
||||
maxSharedSecretLifetime,
|
||||
maxSharedSecretViewLimit
|
||||
}
|
||||
}: TUpdateOrgDTO) => {
|
||||
const appCfg = getConfig();
|
||||
@@ -457,7 +465,15 @@ export const orgServiceFactory = ({
|
||||
selectedMfaMethod,
|
||||
allowSecretSharingOutsideOrganization,
|
||||
bypassOrgAuthEnabled,
|
||||
userTokenExpiration
|
||||
userTokenExpiration,
|
||||
secretsProductEnabled,
|
||||
pkiProductEnabled,
|
||||
kmsProductEnabled,
|
||||
sshProductEnabled,
|
||||
scannerProductEnabled,
|
||||
shareSecretsProductEnabled,
|
||||
maxSharedSecretLifetime,
|
||||
maxSharedSecretViewLimit
|
||||
});
|
||||
if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` });
|
||||
return org;
|
||||
@@ -811,7 +827,11 @@ export const orgServiceFactory = ({
|
||||
const users: Pick<TUsers, "id" | "firstName" | "lastName" | "email" | "username">[] = [];
|
||||
|
||||
for await (const inviteeEmail of inviteeEmails) {
|
||||
let inviteeUser = await userDAL.findUserByUsername(inviteeEmail, tx);
|
||||
const usersByUsername = await userDAL.findUserByUsername(inviteeEmail, tx);
|
||||
let inviteeUser =
|
||||
usersByUsername?.length > 1
|
||||
? usersByUsername.find((el) => el.username === inviteeEmail)
|
||||
: usersByUsername?.[0];
|
||||
|
||||
// if the user doesn't exist we create the user with the email
|
||||
if (!inviteeUser) {
|
||||
@@ -1223,10 +1243,13 @@ export const orgServiceFactory = ({
|
||||
* magic link and issue a temporary signup token for user to complete setting up their account
|
||||
*/
|
||||
const verifyUserToOrg = async ({ orgId, email, code }: TVerifyUserToOrgDTO) => {
|
||||
const user = await userDAL.findUserByUsername(email);
|
||||
const usersByUsername = await userDAL.findUserByUsername(email);
|
||||
const user =
|
||||
usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === email) : usersByUsername?.[0];
|
||||
if (!user) {
|
||||
throw new NotFoundError({ message: "User not found" });
|
||||
}
|
||||
|
||||
const [orgMembership] = await orgDAL.findMembership({
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: user.id,
|
||||
status: OrgMembershipStatus.Invited,
|
||||
|
@@ -75,6 +75,14 @@ export type TUpdateOrgDTO = {
|
||||
allowSecretSharingOutsideOrganization: boolean;
|
||||
bypassOrgAuthEnabled: boolean;
|
||||
userTokenExpiration: string;
|
||||
secretsProductEnabled: boolean;
|
||||
pkiProductEnabled: boolean;
|
||||
kmsProductEnabled: boolean;
|
||||
sshProductEnabled: boolean;
|
||||
scannerProductEnabled: boolean;
|
||||
shareSecretsProductEnabled: boolean;
|
||||
maxSharedSecretLifetime: number;
|
||||
maxSharedSecretViewLimit: number | null;
|
||||
}>;
|
||||
} & TOrgPermission;
|
||||
|
||||
|
@@ -658,7 +658,8 @@ export const projectServiceFactory = ({
|
||||
autoCapitalization: update.autoCapitalization,
|
||||
enforceCapitalization: update.autoCapitalization,
|
||||
hasDeleteProtection: update.hasDeleteProtection,
|
||||
slug: update.slug
|
||||
slug: update.slug,
|
||||
secretSharing: update.secretSharing
|
||||
});
|
||||
|
||||
return updatedProject;
|
||||
|
@@ -93,6 +93,7 @@ export type TUpdateProjectDTO = {
|
||||
autoCapitalization?: boolean;
|
||||
hasDeleteProtection?: boolean;
|
||||
slug?: string;
|
||||
secretSharing?: boolean;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
|
@@ -6,6 +6,7 @@ import { TSecretSharing } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { SecretSharingAccessType } from "@app/lib/types";
|
||||
import { isUuidV4 } from "@app/lib/validator";
|
||||
|
||||
@@ -60,7 +61,9 @@ export const secretSharingServiceFactory = ({
|
||||
}
|
||||
|
||||
const fiveMins = 5 * 60 * 1000;
|
||||
if (expiryTime - currentTime < fiveMins) {
|
||||
|
||||
// 1 second buffer
|
||||
if (expiryTime - currentTime + 1000 < fiveMins) {
|
||||
throw new BadRequestError({ message: "Expiration time cannot be less than 5 mins" });
|
||||
}
|
||||
};
|
||||
@@ -76,8 +79,11 @@ export const secretSharingServiceFactory = ({
|
||||
password,
|
||||
accessType,
|
||||
expiresAt,
|
||||
expiresAfterViews
|
||||
expiresAfterViews,
|
||||
emails
|
||||
}: TCreateSharedSecretDTO) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
if (!permission) throw new ForbiddenRequestError({ name: "User is not a part of the specified organization" });
|
||||
$validateSharedSecretExpiry(expiresAt);
|
||||
@@ -93,7 +99,46 @@ export const secretSharingServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Shared secret value too long" });
|
||||
}
|
||||
|
||||
// Check lifetime is within org allowance
|
||||
const expiresAtTimestamp = new Date(expiresAt).getTime();
|
||||
const lifetime = expiresAtTimestamp - new Date().getTime();
|
||||
|
||||
// org.maxSharedSecretLifetime is in seconds
|
||||
if (org.maxSharedSecretLifetime && lifetime / 1000 > org.maxSharedSecretLifetime) {
|
||||
throw new BadRequestError({ message: "Secret lifetime exceeds organization limit" });
|
||||
}
|
||||
|
||||
// Check max view count is within org allowance
|
||||
if (org.maxSharedSecretViewLimit && (!expiresAfterViews || expiresAfterViews > org.maxSharedSecretViewLimit)) {
|
||||
throw new BadRequestError({ message: "Secret max views parameter exceeds organization limit" });
|
||||
}
|
||||
|
||||
const encryptWithRoot = kmsService.encryptWithRootKey();
|
||||
|
||||
let salt: string | undefined;
|
||||
let encryptedSalt: Buffer | undefined;
|
||||
const orgEmails = [];
|
||||
|
||||
if (emails && emails.length > 0) {
|
||||
const allOrgMembers = await orgDAL.findAllOrgMembers(orgId);
|
||||
|
||||
// Check to see that all emails are a part of the organization (if enforced) while also collecting a list of emails which are in the org
|
||||
for (const email of emails) {
|
||||
if (allOrgMembers.some((v) => v.user.email === email)) {
|
||||
orgEmails.push(email);
|
||||
// If the email is not part of the org, but access type / org settings require it
|
||||
} else if (!org.allowSecretSharingOutsideOrganization || accessType === SecretSharingAccessType.Organization) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization does not allow sharing secrets to members outside of this organization"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Generate salt for signing email hashes (if emails are provided)
|
||||
salt = crypto.randomBytes(32).toString("hex");
|
||||
encryptedSalt = encryptWithRoot(Buffer.from(salt));
|
||||
}
|
||||
|
||||
const encryptedSecret = encryptWithRoot(Buffer.from(secretValue));
|
||||
|
||||
const id = crypto.randomBytes(32).toString("hex");
|
||||
@@ -112,11 +157,45 @@ export const secretSharingServiceFactory = ({
|
||||
expiresAfterViews,
|
||||
userId: actorId,
|
||||
orgId,
|
||||
accessType
|
||||
accessType,
|
||||
authorizedEmails: emails && emails.length > 0 ? JSON.stringify(emails) : undefined,
|
||||
encryptedSalt
|
||||
});
|
||||
|
||||
const idToReturn = `${Buffer.from(newSharedSecret.identifier!, "hex").toString("base64url")}`;
|
||||
|
||||
// Loop through recipients and send out emails with unique access links
|
||||
if (emails && salt) {
|
||||
const user = await userDAL.findById(actorId);
|
||||
|
||||
if (!user) {
|
||||
throw new NotFoundError({ message: `User with ID '${actorId}' not found` });
|
||||
}
|
||||
|
||||
for await (const email of emails) {
|
||||
try {
|
||||
const hmac = crypto.createHmac("sha256", salt).update(email);
|
||||
const hash = hmac.digest("hex");
|
||||
|
||||
// Only show the username to emails which are part of the organization
|
||||
const respondentUsername = orgEmails.includes(email) ? user.username : undefined;
|
||||
|
||||
await smtpService.sendMail({
|
||||
recipients: [email],
|
||||
subjectLine: "A secret has been shared with you",
|
||||
substitutions: {
|
||||
name,
|
||||
respondentUsername,
|
||||
secretRequestUrl: `${appCfg.SITE_URL}/shared/secret/${idToReturn}?email=${encodeURIComponent(email)}&hash=${hash}`
|
||||
},
|
||||
template: SmtpTemplates.SecretRequestCompleted
|
||||
});
|
||||
} catch (e) {
|
||||
logger.error(e, "Failed to send shared secret URL to a recipient's email.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { id: idToReturn };
|
||||
};
|
||||
|
||||
@@ -390,8 +469,15 @@ export const secretSharingServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
/** Get's password-less secret. validates all secret's requested (must be fresh). */
|
||||
const getSharedSecretById = async ({ sharedSecretId, hashedHex, orgId, password }: TGetActiveSharedSecretByIdDTO) => {
|
||||
/** Gets password-less secret. validates all secret's requested (must be fresh). */
|
||||
const getSharedSecretById = async ({
|
||||
sharedSecretId,
|
||||
hashedHex,
|
||||
orgId,
|
||||
password,
|
||||
email,
|
||||
hash
|
||||
}: TGetActiveSharedSecretByIdDTO) => {
|
||||
const sharedSecret = isUuidV4(sharedSecretId)
|
||||
? await secretSharingDAL.findOne({
|
||||
id: sharedSecretId,
|
||||
@@ -438,6 +524,32 @@ export const secretSharingServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
|
||||
if (sharedSecret.authorizedEmails && sharedSecret.encryptedSalt) {
|
||||
// Verify both params were passed
|
||||
if (!email || !hash) {
|
||||
throw new BadRequestError({
|
||||
message: "This secret is email protected. Parameters must include email and hash."
|
||||
});
|
||||
|
||||
// Verify that email is authorized to view shared secret
|
||||
} else if (!(sharedSecret.authorizedEmails as string[]).includes(email)) {
|
||||
throw new UnauthorizedError({ message: "Email not authorized to view secret" });
|
||||
|
||||
// Verify that hash matches
|
||||
} else {
|
||||
const salt = decryptWithRoot(sharedSecret.encryptedSalt).toString();
|
||||
const hmac = crypto.createHmac("sha256", salt).update(email);
|
||||
const rebuiltHash = hmac.digest("hex");
|
||||
|
||||
if (rebuiltHash !== hash) {
|
||||
throw new UnauthorizedError({ message: "Email not authorized to view secret" });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Password checks
|
||||
const isPasswordProtected = Boolean(sharedSecret.password);
|
||||
const hasProvidedPassword = Boolean(password);
|
||||
if (isPasswordProtected) {
|
||||
@@ -452,7 +564,6 @@ export const secretSharingServiceFactory = ({
|
||||
// If encryptedSecret is set, we know that this secret has been encrypted using KMS, and we can therefore do server-side decryption.
|
||||
let decryptedSecretValue: Buffer | undefined;
|
||||
if (sharedSecret.encryptedSecret) {
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
decryptedSecretValue = decryptWithRoot(sharedSecret.encryptedSecret);
|
||||
}
|
||||
|
||||
|
@@ -22,6 +22,7 @@ export type TSharedSecretPermission = {
|
||||
accessType?: SecretSharingAccessType;
|
||||
name?: string;
|
||||
password?: string;
|
||||
emails?: string[];
|
||||
};
|
||||
|
||||
export type TCreatePublicSharedSecretDTO = {
|
||||
@@ -37,6 +38,10 @@ export type TGetActiveSharedSecretByIdDTO = {
|
||||
hashedHex?: string;
|
||||
orgId?: string;
|
||||
password?: string;
|
||||
|
||||
// For secrets shared with specific emails
|
||||
email?: string;
|
||||
hash?: string;
|
||||
};
|
||||
|
||||
export type TValidateActiveSharedSecretDTO = TGetActiveSharedSecretByIdDTO & {
|
||||
|
@@ -28,9 +28,9 @@ const BaseSyncOptionsSchema = <T extends AnyZodObject | undefined = undefined>({
|
||||
keySchema: z
|
||||
.string()
|
||||
.optional()
|
||||
.refine((val) => !val || new RE2(/^(?:[a-zA-Z0-9\-/]*)(?:\{\{secretKey\}\})(?:[a-zA-Z0-9\-/]*)$/).test(val), {
|
||||
.refine((val) => !val || new RE2(/^(?:[a-zA-Z0-9_\-/]*)(?:\{\{secretKey\}\})(?:[a-zA-Z0-9_\-/]*)$/).test(val), {
|
||||
message:
|
||||
"Key schema must include one {{secretKey}} and only contain letters, numbers, dashes, slashes, and the {{secretKey}} placeholder."
|
||||
"Key schema must include one {{secretKey}} and only contain letters, numbers, dashes, underscores, slashes, and the {{secretKey}} placeholder."
|
||||
})
|
||||
.describe(SecretSyncs.SYNC_OPTIONS(destination).keySchema),
|
||||
disableSecretDeletion: z.boolean().optional().describe(SecretSyncs.SYNC_OPTIONS(destination).disableSecretDeletion)
|
||||
|
@@ -257,8 +257,8 @@ export const superAdminServiceFactory = ({
|
||||
const adminSignUp = async ({
|
||||
lastName,
|
||||
firstName,
|
||||
salt,
|
||||
email,
|
||||
salt,
|
||||
password,
|
||||
verifier,
|
||||
publicKey,
|
||||
@@ -272,7 +272,8 @@ export const superAdminServiceFactory = ({
|
||||
userAgent
|
||||
}: TAdminSignUpDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const existingUser = await userDAL.findOne({ email });
|
||||
const sanitizedEmail = email.trim().toLowerCase();
|
||||
const existingUser = await userDAL.findOne({ username: sanitizedEmail });
|
||||
if (existingUser) throw new BadRequestError({ name: "Admin sign up", message: "User already exists" });
|
||||
|
||||
const privateKey = await getUserPrivateKey(password, {
|
||||
@@ -292,8 +293,8 @@ export const superAdminServiceFactory = ({
|
||||
{
|
||||
firstName,
|
||||
lastName,
|
||||
username: email,
|
||||
email,
|
||||
username: sanitizedEmail,
|
||||
email: sanitizedEmail,
|
||||
superAdmin: true,
|
||||
isGhost: false,
|
||||
isAccepted: true,
|
||||
@@ -348,12 +349,13 @@ export const superAdminServiceFactory = ({
|
||||
|
||||
const bootstrapInstance = async ({ email, password, organizationName }: TAdminBootstrapInstanceDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const sanitizedEmail = email.trim().toLowerCase();
|
||||
const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID);
|
||||
if (serverCfg?.initialized) {
|
||||
throw new BadRequestError({ message: "Instance has already been set up" });
|
||||
}
|
||||
|
||||
const existingUser = await userDAL.findOne({ email });
|
||||
const existingUser = await userDAL.findOne({ email: sanitizedEmail });
|
||||
if (existingUser) throw new BadRequestError({ name: "Instance initialization", message: "User already exists" });
|
||||
|
||||
const userInfo = await userDAL.transaction(async (tx) => {
|
||||
@@ -361,8 +363,8 @@ export const superAdminServiceFactory = ({
|
||||
{
|
||||
firstName: "Admin",
|
||||
lastName: "User",
|
||||
username: email,
|
||||
email,
|
||||
username: sanitizedEmail,
|
||||
email: sanitizedEmail,
|
||||
superAdmin: true,
|
||||
isGhost: false,
|
||||
isAccepted: true,
|
||||
@@ -372,7 +374,7 @@ export const superAdminServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
const { tag, encoding, ciphertext, iv } = infisicalSymmetricEncypt(password);
|
||||
const encKeys = await generateUserSrpKeys(email, password);
|
||||
const encKeys = await generateUserSrpKeys(sanitizedEmail, password);
|
||||
|
||||
const userEnc = await userDAL.createUserEncryption(
|
||||
{
|
||||
|
@@ -8,16 +8,18 @@ import {
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate,
|
||||
TUsers
|
||||
TUsers,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
|
||||
|
||||
export type TUserDALFactory = ReturnType<typeof userDALFactory>;
|
||||
|
||||
export const userDALFactory = (db: TDbClient) => {
|
||||
const userOrm = ormify(db, TableName.Users);
|
||||
const findUserByUsername = async (username: string, tx?: Knex) => userOrm.findOne({ username }, tx);
|
||||
const findUserByUsername = async (username: string, tx?: Knex) =>
|
||||
(tx || db)(TableName.Users).whereRaw('lower("username") = :username', { username: username.toLowerCase() });
|
||||
|
||||
const getUsersByFilter = async ({
|
||||
limit,
|
||||
@@ -41,7 +43,7 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
.whereILike("email", `%${searchTerm}%`)
|
||||
.orWhereILike("firstName", `%${searchTerm}%`)
|
||||
.orWhereILike("lastName", `%${searchTerm}%`)
|
||||
.orWhereLike("username", `%${searchTerm}%`);
|
||||
.orWhereRaw('lower("username") like ?', `%${searchTerm}%`);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -65,12 +67,11 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
try {
|
||||
return await db
|
||||
.replicaNode()(TableName.Users)
|
||||
.whereRaw('lower("username") = :username', { username: username.toLowerCase() })
|
||||
.where({
|
||||
username,
|
||||
isGhost: false
|
||||
})
|
||||
.join(TableName.UserEncryptionKey, `${TableName.Users}.id`, `${TableName.UserEncryptionKey}.userId`)
|
||||
.first();
|
||||
.join(TableName.UserEncryptionKey, `${TableName.Users}.id`, `${TableName.UserEncryptionKey}.userId`);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find user enc by email" });
|
||||
}
|
||||
@@ -168,6 +169,38 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findAllMyAccounts = async (email: string) => {
|
||||
try {
|
||||
const doc = await db(TableName.Users)
|
||||
.where({ email })
|
||||
.leftJoin(TableName.OrgMembership, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.Organization, `${TableName.Organization}.id`, `${TableName.OrgMembership}.orgId`)
|
||||
.select(selectAllTableCols(TableName.Users))
|
||||
.select(
|
||||
db.ref("name").withSchema(TableName.Organization).as("orgName"),
|
||||
db.ref("slug").withSchema(TableName.Organization).as("orgSlug")
|
||||
);
|
||||
const formattedDoc = sqlNestRelationships({
|
||||
data: doc,
|
||||
key: "id",
|
||||
parentMapper: (el) => UsersSchema.parse(el),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "orgSlug",
|
||||
label: "organizations" as const,
|
||||
mapper: ({ orgSlug, orgName }) => ({
|
||||
slug: orgSlug,
|
||||
name: orgName
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
return formattedDoc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Upsert user enc key" });
|
||||
}
|
||||
};
|
||||
|
||||
// USER ACTION FUNCTIONS
|
||||
// ---------------------
|
||||
const findOneUserAction = (filter: TUserActionsUpdate, tx?: Knex) => {
|
||||
@@ -200,6 +233,7 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
createUserEncryption,
|
||||
findOneUserAction,
|
||||
createUserAction,
|
||||
getUsersByFilter
|
||||
getUsersByFilter,
|
||||
findAllMyAccounts
|
||||
};
|
||||
};
|
||||
|
@@ -9,7 +9,6 @@ import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-se
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
|
||||
import { AuthMethod } from "../auth/auth-type";
|
||||
import { TGroupProjectDALFactory } from "../group-project/group-project-dal";
|
||||
@@ -21,7 +20,7 @@ type TUserServiceFactoryDep = {
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
| "find"
|
||||
| "findOne"
|
||||
| "findUserByUsername"
|
||||
| "findById"
|
||||
| "transaction"
|
||||
| "updateById"
|
||||
@@ -31,8 +30,8 @@ type TUserServiceFactoryDep = {
|
||||
| "createUserAction"
|
||||
| "findUserEncKeyByUserId"
|
||||
| "delete"
|
||||
| "findAllMyAccounts"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "find" | "insertMany">;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "findByUserId">;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find" | "insertMany" | "findOne" | "updateById">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser" | "validateTokenForUser">;
|
||||
@@ -45,7 +44,6 @@ export type TUserServiceFactory = ReturnType<typeof userServiceFactory>;
|
||||
|
||||
export const userServiceFactory = ({
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
orgMembershipDAL,
|
||||
projectMembershipDAL,
|
||||
groupProjectDAL,
|
||||
@@ -54,8 +52,11 @@ export const userServiceFactory = ({
|
||||
permissionService
|
||||
}: TUserServiceFactoryDep) => {
|
||||
const sendEmailVerificationCode = async (username: string) => {
|
||||
const user = await userDAL.findOne({ username });
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const users = await userDAL.findUserByUsername(username);
|
||||
const user = users?.length > 1 ? users.find((el) => el.username === username) : users?.[0];
|
||||
if (!user) throw new NotFoundError({ name: `User with username '${username}' not found` });
|
||||
|
||||
if (!user.email)
|
||||
throw new BadRequestError({ name: "Failed to send email verification code due to no email on user" });
|
||||
if (user.isEmailVerified)
|
||||
@@ -77,7 +78,10 @@ export const userServiceFactory = ({
|
||||
};
|
||||
|
||||
const verifyEmailVerificationCode = async (username: string, code: string) => {
|
||||
const user = await userDAL.findOne({ username });
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const usersByusername = await userDAL.findUserByUsername(username);
|
||||
const user =
|
||||
usersByusername?.length > 1 ? usersByusername.find((el) => el.username === username) : usersByusername?.[0];
|
||||
if (!user) throw new NotFoundError({ name: `User with username '${username}' not found` });
|
||||
if (!user.email)
|
||||
throw new BadRequestError({ name: "Failed to verify email verification code due to no email on user" });
|
||||
@@ -90,84 +94,8 @@ export const userServiceFactory = ({
|
||||
code
|
||||
});
|
||||
|
||||
const { email } = user;
|
||||
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await userDAL.updateById(
|
||||
user.id,
|
||||
{
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
// check if there are verified users with the same email.
|
||||
const users = await userDAL.find(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
if (users.length > 1) {
|
||||
// merge users
|
||||
const mergeUser = users.find((u) => u.id !== user.id);
|
||||
if (!mergeUser) throw new NotFoundError({ name: "Failed to find merge user" });
|
||||
|
||||
const mergeUserOrgMembershipSet = new Set(
|
||||
(await orgMembershipDAL.find({ userId: mergeUser.id }, { tx })).map((m) => m.orgId)
|
||||
);
|
||||
const myOrgMemberships = (await orgMembershipDAL.find({ userId: user.id }, { tx })).filter(
|
||||
(m) => !mergeUserOrgMembershipSet.has(m.orgId)
|
||||
);
|
||||
|
||||
const userAliases = await userAliasDAL.find(
|
||||
{
|
||||
userId: user.id
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
await userDAL.deleteById(user.id, tx);
|
||||
|
||||
if (myOrgMemberships.length) {
|
||||
await orgMembershipDAL.insertMany(
|
||||
myOrgMemberships.map((orgMembership) => ({
|
||||
...orgMembership,
|
||||
userId: mergeUser.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (userAliases.length) {
|
||||
await userAliasDAL.insertMany(
|
||||
userAliases.map((userAlias) => ({
|
||||
...userAlias,
|
||||
userId: mergeUser.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
} else {
|
||||
await userDAL.delete(
|
||||
{
|
||||
email,
|
||||
isAccepted: false,
|
||||
isEmailVerified: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
// update current user's username to [email]
|
||||
await userDAL.updateById(
|
||||
user.id,
|
||||
{
|
||||
username: email
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
await userDAL.updateById(user.id, {
|
||||
isEmailVerified: true
|
||||
});
|
||||
};
|
||||
|
||||
@@ -212,6 +140,23 @@ export const userServiceFactory = ({
|
||||
return updatedUser;
|
||||
};
|
||||
|
||||
const getAllMyAccounts = async (email: string, userId: string) => {
|
||||
const users = await userDAL.findAllMyAccounts(email);
|
||||
return users?.map((el) => ({ ...el, isMyAccount: el.id === userId }));
|
||||
};
|
||||
|
||||
const removeMyDuplicateAccounts = async (email: string, userId: string) => {
|
||||
const users = await userDAL.find({ email });
|
||||
const duplicatedAccounts = users?.filter((el) => el.id !== userId);
|
||||
const myAccount = users?.find((el) => el.id === userId);
|
||||
if (duplicatedAccounts.length && myAccount) {
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await userDAL.delete({ $in: { id: duplicatedAccounts?.map((el) => el.id) } }, tx);
|
||||
await userDAL.updateById(userId, { username: (myAccount.email || myAccount.username).toLowerCase() }, tx);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const getMe = async (userId: string) => {
|
||||
const user = await userDAL.findUserEncKeyByUserId(userId);
|
||||
if (!user) throw new NotFoundError({ message: `User with ID '${userId}' not found`, name: "GetMe" });
|
||||
@@ -313,9 +258,11 @@ export const userServiceFactory = ({
|
||||
};
|
||||
|
||||
const listUserGroups = async ({ username, actorOrgId, actor, actorId, actorAuthMethod }: TListUserGroupsDTO) => {
|
||||
const user = await userDAL.findOne({
|
||||
username
|
||||
});
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const usersByusername = await userDAL.findUserByUsername(username);
|
||||
const user =
|
||||
usersByusername?.length > 1 ? usersByusername.find((el) => el.username === username) : usersByusername?.[0];
|
||||
if (!user) throw new NotFoundError({ name: `User with username '${username}' not found` });
|
||||
|
||||
// This makes it so the user can always read information about themselves, but no one else if they don't have the Members Read permission.
|
||||
if (user.id !== actorId) {
|
||||
@@ -346,7 +293,9 @@ export const userServiceFactory = ({
|
||||
getUserAction,
|
||||
unlockUser,
|
||||
getUserPrivateKey,
|
||||
getAllMyAccounts,
|
||||
getUserProjectFavorites,
|
||||
removeMyDuplicateAccounts,
|
||||
updateUserProjectFavorites
|
||||
};
|
||||
};
|
||||
|
@@ -884,6 +884,12 @@ func (tm *AgentManager) MonitorSecretChanges(secretTemplate Template, templateId
|
||||
|
||||
if err != nil {
|
||||
log.Error().Msgf("unable to process template because %v", err)
|
||||
|
||||
// case: if exit-after-auth is true, it should exit the agent once an error on secret fetching occurs with the appropriate exit code (1)
|
||||
// previous behavior would exit after 25 sec with status code 0, even if this step errors
|
||||
if tm.exitAfterAuth {
|
||||
os.Exit(1)
|
||||
}
|
||||
} else {
|
||||
if (existingEtag != currentEtag) || firstRun {
|
||||
|
||||
|
@@ -6,9 +6,14 @@ description: "The guide to spending money at Infisical."
|
||||
|
||||
Fairly frequently, you might run into situations when you need to spend company money.
|
||||
|
||||
<Note>
|
||||
Please spend money in a way that you think is in the best interest of the company.
|
||||
</Note>
|
||||
|
||||
# Expensing Meals
|
||||
|
||||
As a perk of working at Infisical, we cover some of your meal expenses.
|
||||
|
||||
HQ team members: meals and unlimited snacks are provided on-site at no cost.
|
||||
|
||||
Remote team members: a food stipend is allocated based on location.
|
||||
|
||||
# Trivial expenses
|
||||
|
||||
@@ -18,6 +23,10 @@ This means expenses that are:
|
||||
1. Non-recurring AND less than $75/month in total.
|
||||
2. Recurring AND less than $20/month.
|
||||
|
||||
<Note>
|
||||
Please spend money in a way that you think is in the best interest of the company.
|
||||
</Note>
|
||||
|
||||
## Saving receipts
|
||||
|
||||
Make sure you keep copies for all receipts. If you expense something on a company card and cannot provide a receipt, this may be deducted from your pay.
|
||||
|
@@ -158,14 +158,4 @@ Once authenticated, the Gateway establishes a secure connection with Infisical t
|
||||
To confirm your Gateway is working, check the deployment status by looking for the message **"Gateway started successfully"** in the Gateway logs. This indicates the Gateway is running properly. Next, verify its registration by opening your Infisical dashboard, navigating to **Organization Access Control**, and selecting the **Gateways** tab. Your newly deployed Gateway should appear in the list.
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Link Gateway to Projects">
|
||||
To enable Infisical features like dynamic secrets or secret rotation to access private resources through the Gateway, you need to link the Gateway to the relevant projects.
|
||||
|
||||
Start by accessing the **Gateway settings** then locate the Gateway in the list, click the options menu (**:**), and select **Edit Details**.
|
||||

|
||||
In the edit modal that appears, choose the projects you want the Gateway to access and click **Save** to confirm your selections.
|
||||

|
||||
Once added to a project, the Gateway becomes available for use by any feature that supports Gateways within that project.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
@@ -38,7 +38,7 @@ Enabling HSM encryption has a set of key benefits:
|
||||
### Requirements
|
||||
- An Infisical instance with a version number that is equal to or greater than `v0.91.0`.
|
||||
- If you are using Docker, your instance must be using the `infisical/infisical-fips` image.
|
||||
- An HSM device from a provider such as [Thales Luna HSM](https://cpl.thalesgroup.com/encryption/data-protection-on-demand/services/luna-cloud-hsm), [AWS CloudHSM](https://aws.amazon.com/cloudhsm/), or others.
|
||||
- An HSM device from a provider such as [Thales Luna HSM](https://cpl.thalesgroup.com/encryption/data-protection-on-demand/services/luna-cloud-hsm), [AWS CloudHSM](https://aws.amazon.com/cloudhsm/), [Fortanix HSM](https://www.fortanix.com/platform/data-security-manager), or others.
|
||||
|
||||
|
||||
### FIPS Compliance
|
||||
@@ -53,14 +53,14 @@ For organizations that work with US government agencies, FIPS compliance is almo
|
||||
|
||||
<Steps>
|
||||
<Step title="Setting up an HSM Device">
|
||||
To set up HSM encryption, you need to configure an HSM provider and HSM key. The HSM provider is used to connect to the HSM device, and the HSM key is used to encrypt Infisical's KMS keys. We recommend using a Cloud HSM provider such as [Thales Luna HSM](https://cpl.thalesgroup.com/encryption/data-protection-on-demand/services/luna-cloud-hsm) or [AWS CloudHSM](https://aws.amazon.com/cloudhsm/).
|
||||
To set up HSM encryption, you need to configure an HSM provider and HSM key. The HSM provider is used to connect to the HSM device, and the HSM key is used to encrypt Infisical's KMS keys. We recommend using a Cloud HSM provider such as [Thales Luna HSM](https://cpl.thalesgroup.com/encryption/data-protection-on-demand/services/luna-cloud-hsm), [AWS CloudHSM](https://aws.amazon.com/cloudhsm/), or [Fortanix HSM](https://www.fortanix.com/platform/data-security-manager).
|
||||
|
||||
You need to follow the instructions provided by the HSM provider to set up the HSM device. Once the HSM device is set up, the HSM device can be used within Infisical.
|
||||
|
||||
After setting up the HSM from your provider, you will have a set of files that you can use to access the HSM. These files need to be present on the machine where Infisical is running.
|
||||
If you are using containers, you will need to mount the folder where these files are stored as a volume in the container.
|
||||
|
||||
The setup process for an HSM device varies depending on the provider. We have created a guide for Thales Luna Cloud HSM, which you can find below.
|
||||
The setup process for an HSM device varies depending on the provider. We have created guides for Thales Luna Cloud HSM and Fortanix HSM, which you can find below.
|
||||
|
||||
</Step>
|
||||
<Step title="Configure HSM on Infisical">
|
||||
@@ -255,6 +255,78 @@ For organizations that work with US government agencies, FIPS compliance is almo
|
||||
</Steps>
|
||||
After following these steps, your Docker setup will be ready to use HSM encryption.
|
||||
</Tab>
|
||||
<Tab title="Fortanix HSM">
|
||||
<Steps>
|
||||
<Step title="Set up Fortanix HSM">
|
||||
To use Fortanix HSM with Infisical, you need to:
|
||||
|
||||
1. Create an App in Fortanix:
|
||||
- Set Interface value to be PKCS#11
|
||||
- Select API key as authentication method
|
||||
- Assign app to a group
|
||||
|
||||

|
||||
|
||||
2. Take note of the domain (e.g., apac.smartkey.io). You will need this to set up the configuration file for the Fortanix client.
|
||||
</Step>
|
||||
|
||||
<Step title="Install PKCS11 Library">
|
||||
The easiest approach would be to download the `.so` file for Linux directly from the [Fortanix PKCS#11 installation page](https://fortanix.zendesk.com/hc/en-us/sections/4408769080724-PKCS-11).
|
||||
|
||||
Create a configuration file named `pkcs11.conf` with the following content:
|
||||
|
||||
```
|
||||
api_endpoint = "https://apac.smartkey.io"
|
||||
prevent_duplicate_opaque_objects = true
|
||||
retry_timeout_millis = 60000
|
||||
```
|
||||
|
||||
Note: Replace `apac.smartkey.io` with your actual Fortanix domain if different. For more details about the configuration file format and additional options, refer to the [Fortanix PKCS#11 Configuration File Documentation](https://support.fortanix.com/docs/clients-pkcs11-library#511-configuration-file-format).
|
||||
</Step>
|
||||
|
||||
<Step title="Create a directory for Fortanix files">
|
||||
Create a directory to store the Fortanix library and configuration file:
|
||||
|
||||
```bash
|
||||
mkdir -p /etc/fortanix-hsm
|
||||
```
|
||||
|
||||
Copy the downloaded `.so` file and the `pkcs11.conf` file to this directory:
|
||||
|
||||
```bash
|
||||
cp /path/to/fortanix_pkcs11_4.37.2554.so /etc/fortanix-hsm/
|
||||
cp /path/to/pkcs11.conf /etc/fortanix-hsm/
|
||||
```
|
||||
</Step>
|
||||
|
||||
<Step title="Run Docker">
|
||||
Run Docker with Fortanix HSM by mounting the directory and setting the required environment variables:
|
||||
|
||||
```bash
|
||||
docker run -p 80:8080 \
|
||||
-v /etc/fortanix-hsm:/etc/fortanix-hsm \
|
||||
-e HSM_LIB_PATH="/etc/fortanix-hsm/fortanix_pkcs11_4.37.2554.so" \ # Path to the PKCS#11 library
|
||||
-e HSM_PIN="MDE3YWUxO..." \ # Your Fortanix app API key used for authentication
|
||||
-e HSM_SLOT=0 \ # Slot value (arbitrary for Fortanix HSM)
|
||||
-e HSM_KEY_LABEL="hsm-key-label" \ # Label to identify the encryption key in the HSM
|
||||
-e FORTANIX_PKCS11_CONFIG_PATH="/etc/fortanix-hsm/pkcs11.conf" \ # Path to Fortanix configuration file
|
||||
|
||||
# The rest are unrelated to HSM setup...
|
||||
-e ENCRYPTION_KEY="<>" \
|
||||
-e AUTH_SECRET="<>" \
|
||||
-e DB_CONNECTION_URI="<>" \
|
||||
-e REDIS_URL="<>" \
|
||||
-e SITE_URL="<>" \
|
||||
infisical/infisical-fips:<version> # Replace <version> with the version you want to use
|
||||
```
|
||||
|
||||
<Warning>
|
||||
Note: Fortanix HSM integration only works for AMD64 CPU architectures.
|
||||
</Warning>
|
||||
</Step>
|
||||
</Steps>
|
||||
After following these steps, your Docker setup will be ready to use Fortanix HSM encryption.
|
||||
</Tab>
|
||||
</Tabs>
|
||||
</Tab>
|
||||
<Tab title="Kubernetes">
|
||||
@@ -569,6 +641,173 @@ For organizations that work with US government agencies, FIPS compliance is almo
|
||||
</Steps>
|
||||
After following these steps, your Kubernetes setup will be ready to use HSM encryption.
|
||||
</Tab>
|
||||
<Tab title="Fortanix HSM">
|
||||
<Steps>
|
||||
<Step title="Set up Fortanix HSM">
|
||||
First, you need to set up Fortanix HSM by:
|
||||
|
||||
1. Creating an App in Fortanix:
|
||||
- Set Interface value to be PKCS#11
|
||||
- Select API key as authentication method
|
||||
- Assign app to a group
|
||||
|
||||

|
||||
|
||||
2. Take note of the domain (e.g., apac.smartkey.io). You will need this when setting up the configuration file.
|
||||
</Step>
|
||||
|
||||
<Step title="Create configuration files">
|
||||
Create a directory to store the Fortanix configuration files:
|
||||
|
||||
```bash
|
||||
mkdir -p /etc/fortanix-hsm
|
||||
```
|
||||
|
||||
Download the Fortanix PKCS#11 library for Linux from the [Fortanix PKCS#11 installation page](https://fortanix.zendesk.com/hc/en-us/sections/4408769080724-PKCS-11).
|
||||
|
||||
Create a configuration file named `pkcs11.conf` with the following content:
|
||||
|
||||
```
|
||||
api_endpoint = "https://apac.smartkey.io"
|
||||
prevent_duplicate_opaque_objects = true
|
||||
retry_timeout_millis = 60000
|
||||
```
|
||||
|
||||
Note: Replace `apac.smartkey.io` with your actual Fortanix domain if different.
|
||||
</Step>
|
||||
|
||||
<Step title="Creating a Persistent Volume Claim (PVC)">
|
||||
Create a Persistent Volume Claim to store the Fortanix files:
|
||||
|
||||
```bash
|
||||
kubectl apply -f - <<EOF
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: fortanix-hsm-pvc
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 100Mi
|
||||
EOF
|
||||
```
|
||||
|
||||
Create a temporary pod to upload the files:
|
||||
|
||||
```bash
|
||||
kubectl apply -f - <<EOF
|
||||
apiVersion: v1
|
||||
kind: Pod
|
||||
metadata:
|
||||
name: fortanix-setup-pod
|
||||
spec:
|
||||
containers:
|
||||
- name: setup
|
||||
image: busybox
|
||||
command: ["/bin/sh", "-c", "sleep 3600"]
|
||||
volumeMounts:
|
||||
- name: fortanix-data
|
||||
mountPath: /data
|
||||
volumes:
|
||||
- name: fortanix-data
|
||||
persistentVolumeClaim:
|
||||
claimName: fortanix-hsm-pvc
|
||||
EOF
|
||||
```
|
||||
|
||||
Ensure the pod is running:
|
||||
|
||||
```bash
|
||||
kubectl wait --for=condition=Ready pod/fortanix-setup-pod --timeout=60s
|
||||
```
|
||||
|
||||
Copy the Fortanix files to the PVC:
|
||||
|
||||
```bash
|
||||
kubectl exec fortanix-setup-pod -- mkdir -p /data/
|
||||
kubectl cp /etc/fortanix-hsm/fortanix_pkcs11_4.37.2554.so fortanix-setup-pod:/data/
|
||||
kubectl cp /etc/fortanix-hsm/pkcs11.conf fortanix-setup-pod:/data/
|
||||
kubectl exec fortanix-setup-pod -- chmod -R 755 /data/
|
||||
```
|
||||
|
||||
Delete the temporary pod:
|
||||
|
||||
```bash
|
||||
kubectl delete pod fortanix-setup-pod
|
||||
```
|
||||
</Step>
|
||||
|
||||
<Step title="Update the Kubernetes Secret">
|
||||
Update your Kubernetes secret with the Fortanix HSM environment variables:
|
||||
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: infisical-secrets
|
||||
type: Opaque
|
||||
stringData:
|
||||
# ... Other environment variables ...
|
||||
HSM_LIB_PATH: "/etc/fortanix-hsm/fortanix_pkcs11_4.37.2554.so" # Path to the PKCS#11 library in the container
|
||||
HSM_PIN: "<your-fortanix-api-key>" # Your Fortanix app API key used for authentication
|
||||
HSM_SLOT: "0" # Slot value (can be set to 0 for Fortanix HSM as it's arbitrary)
|
||||
HSM_KEY_LABEL: "hsm-key-label" # Label to identify the encryption key in the HSM
|
||||
FORTANIX_PKCS11_CONFIG_PATH: "/etc/fortanix-hsm/pkcs11.conf" # Path to Fortanix configuration file
|
||||
```
|
||||
|
||||
Apply the updated secret:
|
||||
|
||||
```bash
|
||||
kubectl apply -f ./secret-file-name.yaml
|
||||
```
|
||||
</Step>
|
||||
|
||||
<Step title="Update Helm Values">
|
||||
Update your Helm values to use the FIPS-compliant image and mount the Fortanix HSM files:
|
||||
|
||||
```yaml
|
||||
# ... The rest of the values.yaml file ...
|
||||
|
||||
image:
|
||||
repository: infisical/infisical-fips # Must use "infisical/infisical-fips"
|
||||
tag: "v0.117.1-postgres"
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
extraVolumeMounts:
|
||||
- name: fortanix-data
|
||||
mountPath: /etc/fortanix-hsm # The path where Fortanix files will be available
|
||||
|
||||
extraVolumes:
|
||||
- name: fortanix-data
|
||||
persistentVolumeClaim:
|
||||
claimName: fortanix-hsm-pvc
|
||||
|
||||
# ... The rest of the values.yaml file ...
|
||||
```
|
||||
|
||||
<Warning>
|
||||
Note: Fortanix HSM integration only works for AMD64 CPU architectures.
|
||||
</Warning>
|
||||
</Step>
|
||||
|
||||
<Step title="Upgrade and Restart">
|
||||
Upgrade the Helm chart with the new values:
|
||||
|
||||
```bash
|
||||
helm upgrade --install infisical infisical-helm-charts/infisical-standalone --values /path/to/values.yaml
|
||||
```
|
||||
|
||||
Restart the deployment:
|
||||
|
||||
```bash
|
||||
kubectl rollout restart deployment/infisical-infisical
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
After following these steps, your Kubernetes setup will be ready to use Fortanix HSM encryption.
|
||||
</Tab>
|
||||
</Tabs>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
@@ -20,6 +20,7 @@ The **Settings** page lets you manage information about your organization includ
|
||||
- **Slug**: The slug of your organization.
|
||||
- **Default Organization Member Role**: The role assigned to users when joining your organization unless otherwise specified.
|
||||
- **Incident Contacts**: Emails that should be alerted if anything abnormal is detected within the organization.
|
||||
- **Enabled Products**: Products which are enabled for your organization. This setting strictly affects the sidebar UI; disabling a product does not disable its API or routes.
|
||||
|
||||

|
||||
|
||||
@@ -43,7 +44,7 @@ In the **Organization Roles** tab, you can edit current or create new custom rol
|
||||
|
||||
<Info>
|
||||
Note that Role-Based Access Management (RBAC) is partly a paid feature.
|
||||
|
||||
|
||||
Infisical provides immutable roles like `admin`, `member`, etc.
|
||||
at the organization and project level for free.
|
||||
|
||||
|
@@ -28,7 +28,7 @@ description: "Learn how to automatically rotate LDAP passwords."
|
||||
3. Select the **LDAP Connection** to use and configure the rotation behavior. Then click **Next**.
|
||||

|
||||
|
||||
- **LDAP Connection** - the connection that will perform the rotation of the configured DN's password.
|
||||
- **LDAP Connection** - the connection that will perform the rotation of the configured principal's password.
|
||||
<Note>
|
||||
LDAP Password Rotations require an LDAP Connection that uses ldaps:// protocol.
|
||||
</Note>
|
||||
@@ -40,13 +40,20 @@ description: "Learn how to automatically rotate LDAP passwords."
|
||||
</Note>
|
||||
|
||||
|
||||
4. Specify the Distinguished Name (DN) of the principal whose password you want to rotate and configure the password requirements. Then click **Next**.
|
||||
4. Configure the required Parameters for your rotation. Then click **Next**.
|
||||

|
||||
|
||||
- **Rotation Method** - The method to use when rotating the target principal's password.
|
||||
- **Connection Principal** - Infisical will use the LDAP Connection's binding principal to rotate the target principal's password.
|
||||
- **Target Principal** - Infisical will bind with the target Principal to rotate their own password.
|
||||
- **DN/UPN** - The Distinguished Name (DN), or User Principal Name (UPN) if supported, of the principal whose password you want to rotate.
|
||||
- **Password** - The target principal's password (if **Rotation Method** is set to **Target Principal**).
|
||||
- **Password Requirements** - The constraints to apply when generating new passwords.
|
||||
|
||||
5. Specify the secret names that the client credentials should be mapped to. Then click **Next**.
|
||||

|
||||
|
||||
- **DN** - the name of the secret that the principal's Distinguished Name (DN) will be mapped to.
|
||||
- **DN/UPN** - the name of the secret that the principal's Distinguished Name (DN) or User Principal Name (UPN) will be mapped to.
|
||||
- **Password** - the name of the secret that the rotated password will be mapped to.
|
||||
|
||||
6. Give your rotation a name and description (optional). Then click **Next**.
|
||||
@@ -85,6 +92,7 @@ description: "Learn how to automatically rotate LDAP passwords."
|
||||
"minutes": 0
|
||||
},
|
||||
"parameters": {
|
||||
"rotationMethod": "connection-principal",
|
||||
"dn": "CN=John,CN=Users,DC=example,DC=com",
|
||||
"passwordRequirements": {
|
||||
"length": 48,
|
||||
@@ -154,6 +162,7 @@ description: "Learn how to automatically rotate LDAP passwords."
|
||||
"lastRotationMessage": null,
|
||||
"type": "ldap-password",
|
||||
"parameters": {
|
||||
"rotationMethod": "connection-principal",
|
||||
"dn": "CN=John,CN=Users,DC=example,DC=com",
|
||||
"passwordRequirements": {
|
||||
"length": 48,
|
||||
|
BIN
docs/images/platform/kms/hsm/fortanix-hsm-setup.png
Normal file
After Width: | Height: | Size: 369 KiB |
Before Width: | Height: | Size: 985 KiB After Width: | Height: | Size: 993 KiB |
Before Width: | Height: | Size: 758 KiB After Width: | Height: | Size: 778 KiB |
Before Width: | Height: | Size: 782 KiB After Width: | Height: | Size: 791 KiB |
@@ -10,7 +10,7 @@ Infisical supports the use of [Simple Binding](https://ldap.com/the-ldap-bind-op
|
||||
You will need the following information to establish an LDAP connection:
|
||||
|
||||
- **LDAP URL** - The LDAP/LDAPS URL to connect to (e.g., ldap://domain-or-ip:389 or ldaps://domain-or-ip:636)
|
||||
- **Binding DN** - The Distinguished Name (DN) of the principal to bind with (e.g., 'CN=John,CN=Users,DC=example,DC=com')
|
||||
- **Binding DN/UPN** - The Distinguished Name (DN), or User Principal Name (UPN) if supported, of the principal to bind with (e.g., 'CN=John,CN=Users,DC=example,DC=com')
|
||||
- **Binding Password** - The password to bind with for authentication
|
||||
- **CA Certificate** - The SSL certificate (PEM format) to use for secure connection when using ldaps:// with a self-signed certificate
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: "Kubernetes CSI"
|
||||
description: "How to use Infisical to inject secrets directly into Kubernetes pods."
|
||||
description: "How to use the Infisical Kubernetes CSI provider to inject secrets directly into Kubernetes pods."
|
||||
---
|
||||
|
||||
## Overview
|
||||
@@ -15,9 +15,9 @@ flowchart LR
|
||||
CSP --> CSD(Secrets Store CSI Driver)
|
||||
end
|
||||
|
||||
subgraph Application
|
||||
subgraph Pod
|
||||
CSD --> V(Volume)
|
||||
V <--> P(Pod)
|
||||
V <--> P(Application)
|
||||
end
|
||||
|
||||
```
|
||||
|
317
docs/integrations/platforms/kubernetes-injector.mdx
Normal file
@@ -0,0 +1,317 @@
|
||||
---
|
||||
title: "Kubernetes Agent Injector"
|
||||
description: "How to use the Infisical Kubernetes Agent Injector to inject secrets directly into Kubernetes pods."
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
The Infisical Kubernetes Agent Injector allows you to inject secrets directly into your Kubernetes pods. The Injector will create a [Infisical Agent](/integrations/platforms/infisical-agent) container within your pod that syncs secrets from Infisical into a shared volume mount within your pod.
|
||||
|
||||
|
||||
The Infisical Agent Injector will patch and modify your pod's deployment to contain an [Infisical Agent](/integrations/platforms/infisical-agent) container which renders your Infisical secrets into a shared volume mount within your pod.
|
||||
|
||||
The Infisical Agent Injector is built on [Kubernetes Mutating Admission Webhooks](https://kubernetes.io/docs/reference/access-authn-authz/admission-controllers), and will watch for `CREATE` and `UPDATE` events on pods in your cluster.
|
||||
The injector is namespace-agnostic, and will watch for pods in any namespace, but will only patch pods that have the `org.infisical.com/inject` annotation set to `true`.
|
||||
|
||||
|
||||
```mermaid
|
||||
flowchart LR
|
||||
subgraph Secrets Management
|
||||
SS(Infisical) --> INJ(Infisical Injector)
|
||||
end
|
||||
|
||||
subgraph Pod
|
||||
INJ --> INIT(Agent Init Container)
|
||||
INIT --> V(Volume)
|
||||
V <--> P(Application)
|
||||
end
|
||||
|
||||
```
|
||||
|
||||
## Install the Infisical Agent Injector
|
||||
|
||||
To install the Infisical Agent Injector, you will need to install our helm charts using [Helm](https://helm.sh/).
|
||||
|
||||
```bash
|
||||
helm repo add infisical-helm-charts 'https://dl.cloudsmith.io/public/infisical/helm-charts/helm/charts/'
|
||||
helm repo update
|
||||
helm install --generate-name infisical-helm-charts/infisical-agent-injector
|
||||
```
|
||||
|
||||
After installing the helm chart you can verify that the injector is running and working as intended by checking the logs of the injector pod.
|
||||
```bash
|
||||
$ kubectl logs deployment/infisical-agent-injector
|
||||
2025/05/19 14:20:05 Starting infisical-agent-injector...
|
||||
2025/05/19 14:20:05 Generating self-signed certificate...
|
||||
2025/05/19 14:20:06 Creating directory: /tmp/tls
|
||||
2025/05/19 14:20:06 Writing cert to: /tmp/tls/tls.crt
|
||||
2025/05/19 14:20:06 Writing key to: /tmp/tls/tls.key
|
||||
2025/05/19 14:20:06 Starting HTTPS server on port 8585...
|
||||
2025/05/19 14:20:06 Attempting to update webhook config (attempt 1)...
|
||||
2025/05/19 14:20:06 Successfully updated webhook configuration with CA bundle
|
||||
```
|
||||
|
||||
## Supported annotations
|
||||
|
||||
The Infisical Agent Injector supports the following annotations:
|
||||
|
||||
<Accordion title="org.infisical.com/inject">
|
||||
The inject annotation is used to enable the injector on a pod. Set the value to `true` and the pod will be patched with an Infisical Agent container on update or create.
|
||||
</Accordion>
|
||||
<Accordion title="org.infisical.com/inject-mode">
|
||||
The inject mode annotation is used to specify the mode to use to inject the secrets into the pod. Currently only `init` mode is supported.
|
||||
|
||||
- `init`: The init method will create an init container for the pod that will render the secrets into a shared volume mount within the pod. The agent init container will run before any other containers in the pod runs, including other init containers.
|
||||
</Accordion>
|
||||
<Accordion title="org.infisical.com/agent-config-map">
|
||||
The agent config map annotation is used to specify the name of the config map that contains the configuration for the injector. The config map must be in the same namespace as the pod.
|
||||
</Accordion>
|
||||
|
||||
## ConfigMap Configuration
|
||||
|
||||
### Supported Fields
|
||||
|
||||
When you are configuring a pod to use the injector, you must create a config map in the same namespace as the pod you want to inject secrets into.
|
||||
The entire config needs to be of string format and needs to be assigned to the `config.yaml` key in the config map. You can find a full example of the config at the end of this section.
|
||||
|
||||
<Accordion title="infisical.address">
|
||||
The address of your Infisical instance. This field is optional and will default to `https://app.infisical.com` if not provided.
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="infisical.auth.type">
|
||||
The authentication type to use to connect to Infisical. Currently only the `kubernetes` authentication type is supported.
|
||||
You can refer to our [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth) documentation for more information on how to create a machine identity for Kubernetes Auth.
|
||||
Please note that the pod's default service account will be used to authenticate with Infisical.
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="infisical.auth.config.identity-id">
|
||||
The ID of the machine identity to use to connect to Infisical. This field is required if the `infisical.auth.type` is set to `kubernetes`.
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="templates[]">
|
||||
The templates hold an array of templates that will be rendered and injected into the pod.
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="templates[].destination-path">
|
||||
The path to inject the secrets into within the pod.
|
||||
If not specified, this will default to `/shared/infisical-secrets`. If you have multiple templates and don't provide a destination path, the destination paths will default to `/shared/infisical-secrets-1`, `/shared/infisical-secrets-2`, etc.
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="templates[].template-content">
|
||||
The content of the template to render.
|
||||
This will be rendered as a [Go Template](https://pkg.go.dev/text/template) and will have access to the following variables.
|
||||
It follows the templating format and supports the same functions as the [Infisical Agent](/integrations/platforms/infisical-agent#quick-start-infisical-agent)
|
||||
</Accordion>
|
||||
|
||||
|
||||
### Authentication
|
||||
The Infisical Agent Injector only supports Machine Identity [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth) authentication at the moment.
|
||||
|
||||
To configure Kubernetes Auth, you need to set the `auth.type` field to `kubernetes` and set the `auth.config.identity-id` to the ID of the machine identity you wish to use for authentication.
|
||||
|
||||
```yaml
|
||||
auth:
|
||||
type: "kubernetes"
|
||||
config:
|
||||
identity-id: "<your-infisical-machine-identity-id>"
|
||||
```
|
||||
|
||||
### Example ConfigMap
|
||||
```yaml config-map.yaml
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: demo-config-map
|
||||
data:
|
||||
config.yaml: |
|
||||
infisical:
|
||||
address: "https://app.infisical.com"
|
||||
auth:
|
||||
type: "kubernetes"
|
||||
config:
|
||||
identity-id: "<your-infisical-machine-identity-id>"
|
||||
templates:
|
||||
- destination-path: "/path/to/save/secrets/file.txt"
|
||||
template-content: |
|
||||
{{- with secret "<your-project-id>" "dev" "/" }}
|
||||
{{- range . }}
|
||||
{{ .Key }}={{ .Value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
```
|
||||
|
||||
```bash
|
||||
kubectl apply -f config-map.yaml
|
||||
```
|
||||
|
||||
To use the config map in your pod, you will need to add the `org.infisical.com/agent-config-map` annotation to your pod's deployment. The value of the annotation is the name of the config map you created above.
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: Pod
|
||||
metadata:
|
||||
name: demo
|
||||
labels:
|
||||
app: demo
|
||||
annotations:
|
||||
org.infisical.com/inject: "true" # Set to true for the injector to patch the pod on create/update events
|
||||
org.infisical.com/inject-mode: "init" # The mode to use to inject the secrets into the pod. Currently only `init` mode is supported.
|
||||
org.infisical.com/agent-config-map: "name-of-config-map" # The name of the config map that you created above, which contains all the settings for injecting the secrets into the pod
|
||||
spec:
|
||||
# ...
|
||||
```
|
||||
|
||||
|
||||
## Quick Start
|
||||
In this section we'll walk through a full example of how to inject secrets into a pod using the Infisical Agent Injector.
|
||||
In this example we'll create a basic nginx deployment and print a Infisical secret called `API_KEY` to the container logs.
|
||||
|
||||
### Create secrets in Infisical
|
||||
First you'll need to create the secret in Infisical.
|
||||
|
||||
- `API_KEY`: The API key to use for the nginx deployment.
|
||||
|
||||
Once you've created the secret, save your project ID, environment slug, and secret path, as these will be used in the next step.
|
||||
|
||||
### Configuration
|
||||
To use the injector you must create a config map in the same namespace as the pod you want to inject secrets into. In this example we'll create a config map in the `test-namespace` namespace.
|
||||
|
||||
The agent injector will authenticate with Infisical using a [Kubernetes Auth](/documentation/platform/identities/kubernetes-auth) machine identity. Please follow the [instructions](/documentation/platform/identities/kubernetes-auth) to create a machine identity configured for Kubernetes Auth.
|
||||
The agent injector will use the service account token of the pod to authenticate with Infisical.
|
||||
|
||||
The `template-content` will be rendered as a [Go Template](https://pkg.go.dev/text/template) and will have access to the following variables. It follows the templating format and supports the same functions as the [Infisical Agent](/integrations/platforms/infisical-agent#quick-start-infisical-agent)
|
||||
The `destination-path` refers to the path within the pod that the secrets will be injected into. In this case we're injecting the secrets into a file called `/infisical/secrets`.
|
||||
|
||||
|
||||
Replace the `<your-project-id>`, `<your-environment-slug>`, with your project ID and the environment slug of where you created your secrets in Infisical. Replace `<your-infisical-machine-identity-id>` with the ID of your machine identity configured for Kubernetes Auth.
|
||||
```yaml config-map.yaml
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: nginx-infisical-config-map
|
||||
namespace: test-namespace
|
||||
data:
|
||||
config.yaml: |
|
||||
infisical:
|
||||
address: "https://app.infisical.com"
|
||||
auth:
|
||||
type: "kubernetes"
|
||||
config:
|
||||
identity-id: "<your-infisical-machine-identity-id>"
|
||||
templates:
|
||||
- destination-path: "/infisical/secrets"
|
||||
template-content: |
|
||||
{{- with secret "<your-project-id>" "<your-environment-slug>" "/" }}
|
||||
{{- range . }}
|
||||
{{ .Key }}={{ .Value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
```
|
||||
|
||||
Now apply the config map:
|
||||
```bash
|
||||
kubectl apply -f config-map.yaml
|
||||
```
|
||||
|
||||
### Injecting secrets into your pod
|
||||
|
||||
To inject secrets into your pod, you will need to add the `org.infisical.com/inject: "true"` annotation to your pod's deployment.
|
||||
|
||||
The `org.infisical.com/agent-config-map` annotation will point to the config map we created in the previous step. It's important that the config map is in the same namespace as the pod.
|
||||
|
||||
We are creating a nginx deployment with a PVC to store the database data.
|
||||
|
||||
```yaml nginx.yaml
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Pod
|
||||
metadata:
|
||||
name: nginx-pod
|
||||
namespace: test-namespace
|
||||
labels:
|
||||
app: nginx
|
||||
annotations:
|
||||
org.infisical.com/inject: "true"
|
||||
org.infisical.com/inject-mode: "init"
|
||||
org.infisical.com/agent-config-map: "nginx-infisical-config-map"
|
||||
spec:
|
||||
containers:
|
||||
- name: simple-app-demo
|
||||
image: nginx:alpine
|
||||
command: ["/bin/sh", "-c"]
|
||||
args:
|
||||
- |
|
||||
export $(cat /infisical/secrets | xargs)
|
||||
echo "API_KEY is set to: $API_KEY"
|
||||
nginx -g "daemon off;"
|
||||
```
|
||||
|
||||
### Applying the deployment
|
||||
|
||||
To apply the deployment, you can use the following command:
|
||||
|
||||
```bash
|
||||
kubectl apply -f nginx.yaml
|
||||
```
|
||||
It may take a few minutes for the pod to be ready and for the Infisical secrets to be injected. You can check the status of the pod by running:
|
||||
|
||||
```bash
|
||||
kubectl get pods -n test-namespace
|
||||
```
|
||||
|
||||
### Verifying the secrets are injected
|
||||
|
||||
To verify the secrets are injected, you can check the pod's logs:
|
||||
|
||||
```bash
|
||||
$ kubectl exec -it pod/nginx-pod -n test-namespace -- cat /infisical/secrets
|
||||
|
||||
Defaulted container "simple-app-demo" out of: simple-app-demo, infisical-agent-init (init)
|
||||
|
||||
API_KEY=sk_api_... # The secret you created in Infisical
|
||||
```
|
||||
|
||||
Additionally you can now check that the `API_KEY` secret is being logged to the nginx container logs:
|
||||
```bash
|
||||
$ kubectl logs pod/nginx-pod -n test-namespace
|
||||
Defaulted container "simple-app-demo" out of: simple-app-demo, infisical-agent-init (init)
|
||||
API_KEY is set to: sk_api_... # The secret you created in Infisical
|
||||
```
|
||||
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
||||
<Accordion title="The pod is stuck in `Init` state">
|
||||
If the pod is stuck in `Init` state, it means the Agent init container is failing to start or is stuck in a restart loop.
|
||||
This could be due to a number of reasons, such as the machine identity not having the correct permissions, or trying to fetch secrets from a non-existent project/environment.
|
||||
|
||||
You can check the logs of the infisical init container by running:
|
||||
```bash
|
||||
# For deployments
|
||||
kubectl logs deployment/your-deployment-name -c infisical-agent-init -n "<namespace>"
|
||||
|
||||
# For pods
|
||||
kubectl logs pod/your-pod-name -c infisical-agent-init -n "<namespace>"
|
||||
```
|
||||
|
||||
You can also check the logs of the pod by running:
|
||||
```bash
|
||||
kubectl logs deployment/postgres-deployment -n test-namespace
|
||||
```
|
||||
|
||||
When checking the logs of the agent init container, you may see something like the following:
|
||||
```bash
|
||||
Starting infisical agent...
|
||||
11:10AM INF starting Infisical agent...
|
||||
11:10AM INF Infisical instance address set to https://daniel1.tunn.dev
|
||||
11:10AM INF template engine started for template 1...
|
||||
11:10AM INF attempting to authenticate...
|
||||
11:10AM INF new access token saved to file at path '/home/infisical/config/identity-access-token'
|
||||
11:10AM ERR unable to process template because template: literalTemplate:1:9: executing "literalTemplate" at <secret "3c0d3ff6-165c-4dc9-b52c-ff3ffaedfce311111" "dev" "/">: error calling secret: CallGetRawSecretsV3: Unsuccessful response [GET https://daniel1.tunn.dev/api/v3/secrets/raw?environment=dev&expandSecretReferences=true&include_imports=true&secretPath=%2F&workspaceId=3c0d3ff6-165c-4dc9-b52c-ff3ffaedfce311111] [status-code=404] [response={"reqId":"req-ljqNq567jchFrK","statusCode":404,"message":"Project with ID '3c0d3ff6-165c-4dc9-b52c-ff3ffaedfce311111' not found during bot lookup. Are you sure you are using the correct project ID?","error":"NotFound"}]
|
||||
+ echo 'Agent failed with exit code 1'
|
||||
+ exit 1
|
||||
Agent failed with exit code 1
|
||||
```
|
||||
|
||||
In the above error, the project ID was invalid in the config map.
|
||||
</Accordion>
|
@@ -97,24 +97,22 @@ via the UI or API for the third-party service you intend to sync secrets to.
|
||||
|
||||
## Key Schemas
|
||||
|
||||
Key Schemas let you control how Infisical names your secret keys when syncing to external destinations. This makes it clear which secrets are managed by Infisical and prevents accidental changes to unrelated secrets.
|
||||
Key Schemas transform your secret keys by applying a prefix, suffix, or format pattern during sync to external destinations. This makes it clear which secrets are managed by Infisical and prevents accidental changes to unrelated secrets.
|
||||
|
||||
A Key Schema adds a prefix, suffix, or format to your secrets before they reach the destination.
|
||||
|
||||
This example demonstrates key behavior if the schema was set to `INFISICAL_{{secretKey}}`:
|
||||
**Example:**
|
||||
- Infisical key: `SECRET_1`
|
||||
- Schema: `INFISICAL_{{secretKey}}`
|
||||
- Synced key: `INFISICAL_SECRET_1`
|
||||
|
||||
<div align="center">
|
||||
```mermaid
|
||||
graph LR
|
||||
A[SECRET_1] --> T["Syncs as"] --> B[INFISICAL_SECRET_1]
|
||||
|
||||
style A fill:#E6F4FF,stroke:#0096D6,stroke-width:2px,color:black,rx:15px
|
||||
style B fill:#F4FFE6,stroke:#96D600,stroke-width:2px,color:black,rx:15px
|
||||
style T fill:#FFF2B2,stroke:#E6C34A,stroke-width:2px,color:black,rx:2px,font-size:12px
|
||||
|
||||
A[Infisical: **SECRET_1**] -->|Apply Schema| B[Destination: **INFISICAL_SECRET_1**]
|
||||
style B fill:#F4FFE6,stroke:#96D600,stroke-width:2px,color:black,rx:15px
|
||||
style A fill:#E6F4FF,stroke:#0096D6,stroke-width:2px,color:black,rx:15px
|
||||
```
|
||||
</div>
|
||||
|
||||
<Note>
|
||||
When importing secrets from the destination into infisical, the schema is stripped from imported secret keys.
|
||||
When importing secrets from the destination into Infisical, the schema is stripped from imported secret keys.
|
||||
</Note>
|
||||
|
@@ -10,8 +10,10 @@ We value reports that help identify vulnerabilities that affect the integrity of
|
||||
### How to Report
|
||||
|
||||
- Send reports to **security@infisical.com** with clear steps to reproduce, impact, and (if possible) a proof-of-concept.
|
||||
- We will acknowledge receipt within 3 business days.
|
||||
- We will acknowledge receipt within 3 business days for reports that are clearly written, technically sound, and plausibly within scope.
|
||||
- We'll provide an initial assessment or next steps within 5 business days.
|
||||
- **Please note**: We do not respond to spam, auto generated reports, inaccurate claims, or submissions that are clearly out of scope.
|
||||
|
||||
|
||||
### What's in Scope?
|
||||
|
||||
|
@@ -218,3 +218,4 @@ Supports conditions and permission inversion
|
||||
| `create-gateways` | Add new gateways to organization |
|
||||
| `edit-gateways` | Modify existing gateway settings |
|
||||
| `delete-gateways` | Remove gateways from organization |
|
||||
| `attach-gateways` | Attach gateways to resources |
|
||||
|
@@ -441,6 +441,7 @@
|
||||
"integrations/platforms/kubernetes/infisical-dynamic-secret-crd"
|
||||
]
|
||||
},
|
||||
"integrations/platforms/kubernetes-injector",
|
||||
"integrations/platforms/kubernetes-csi",
|
||||
"integrations/platforms/docker-swarm-with-agent",
|
||||
"integrations/platforms/ecs-with-agent"
|
||||
|
@@ -18,9 +18,7 @@ export const ViewLdapPasswordRotationGeneratedCredentials = ({
|
||||
<ViewRotationGeneratedCredentialsDisplay
|
||||
activeCredentials={
|
||||
<>
|
||||
<CredentialDisplay label="Distinguished Name (DN)">
|
||||
{activeCredentials?.dn}
|
||||
</CredentialDisplay>
|
||||
<CredentialDisplay label="DN/UPN">{activeCredentials?.dn}</CredentialDisplay>
|
||||
<CredentialDisplay isSensitive label="Password">
|
||||
{activeCredentials?.password}
|
||||
</CredentialDisplay>
|
||||
@@ -28,9 +26,7 @@ export const ViewLdapPasswordRotationGeneratedCredentials = ({
|
||||
}
|
||||
inactiveCredentials={
|
||||
<>
|
||||
<CredentialDisplay label="Distinguished Name (DN)">
|
||||
{inactiveCredentials?.dn}
|
||||
</CredentialDisplay>
|
||||
<CredentialDisplay label="DN/UPN">{inactiveCredentials?.dn}</CredentialDisplay>
|
||||
<CredentialDisplay isSensitive label="Password">
|
||||
{inactiveCredentials?.password}
|
||||
</CredentialDisplay>
|
||||
|
@@ -48,7 +48,8 @@ const FORM_TABS: { name: string; key: string; fields: (keyof TSecretRotationV2Fo
|
||||
"rotateAtUtc"
|
||||
]
|
||||
},
|
||||
{ name: "Parameters", key: "parameters", fields: ["parameters"] },
|
||||
// @ts-expect-error temporary parameters aren't present on all forms
|
||||
{ name: "Parameters", key: "parameters", fields: ["parameters", "temporaryParameters"] },
|
||||
{ name: "Mappings", key: "secretsMapping", fields: ["secretsMapping"] },
|
||||
{ name: "Details", key: "details", fields: ["name", "description"] },
|
||||
{ name: "Review", key: "review", fields: [] }
|
||||
@@ -75,7 +76,7 @@ export const SecretRotationV2Form = ({
|
||||
const { rotationOption } = useSecretRotationV2Option(type);
|
||||
|
||||
const formMethods = useForm<TSecretRotationV2Form>({
|
||||
resolver: zodResolver(SecretRotationV2FormSchema),
|
||||
resolver: zodResolver(SecretRotationV2FormSchema(Boolean(secretRotation))),
|
||||
defaultValues: secretRotation
|
||||
? {
|
||||
...secretRotation,
|
||||
|
@@ -2,40 +2,135 @@ import { Controller, useFormContext } from "react-hook-form";
|
||||
|
||||
import { TSecretRotationV2Form } from "@app/components/secret-rotations-v2/forms/schemas";
|
||||
import { DEFAULT_PASSWORD_REQUIREMENTS } from "@app/components/secret-rotations-v2/forms/schemas/shared";
|
||||
import { FormControl, Input } from "@app/components/v2";
|
||||
import { FormControl, Input, Select, SelectItem } from "@app/components/v2";
|
||||
import { SecretRotation } from "@app/hooks/api/secretRotationsV2";
|
||||
import { LdapPasswordRotationMethod } from "@app/hooks/api/secretRotationsV2/types/ldap-password-rotation";
|
||||
|
||||
export const LdapPasswordRotationParametersFields = () => {
|
||||
const { control } = useFormContext<
|
||||
const { control, watch, setValue } = useFormContext<
|
||||
TSecretRotationV2Form & {
|
||||
type: SecretRotation.LdapPassword;
|
||||
}
|
||||
>();
|
||||
|
||||
const [id, rotationMethod] = watch(["id", "parameters.rotationMethod"]);
|
||||
const isUpdate = Boolean(id);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Controller
|
||||
name="parameters.dn"
|
||||
name="parameters.rotationMethod"
|
||||
control={control}
|
||||
defaultValue={LdapPasswordRotationMethod.ConnectionPrincipal}
|
||||
render={({ field: { value, onChange }, fieldState: { error } }) => (
|
||||
<FormControl
|
||||
isError={Boolean(error)}
|
||||
tooltipText={
|
||||
<>
|
||||
<span>Determines how the rotation will be performed:</span>
|
||||
<ul className="ml-4 mt-2 flex list-disc flex-col gap-2">
|
||||
<li>
|
||||
<span className="font-medium">Connection Principal</span> - The Connection
|
||||
principal will rotate the target principal's password.
|
||||
</li>
|
||||
<li>
|
||||
<span className="font-medium">Target Principal</span> - The target principal
|
||||
will rotate their own password.
|
||||
</li>
|
||||
</ul>
|
||||
</>
|
||||
}
|
||||
tooltipClassName="max-w-sm"
|
||||
errorText={error?.message}
|
||||
label="Distinguished Name (DN)"
|
||||
isError={Boolean(error?.message)}
|
||||
label="Rotation Method"
|
||||
helperText={
|
||||
// eslint-disable-next-line no-nested-ternary
|
||||
isUpdate
|
||||
? "Cannot be updated."
|
||||
: value === LdapPasswordRotationMethod.ConnectionPrincipal
|
||||
? "The connection principal will rotate the target principal's password"
|
||||
: "The target principal will rotate their own password"
|
||||
}
|
||||
>
|
||||
<Input
|
||||
<Select
|
||||
isDisabled={isUpdate}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
placeholder="CN=John,OU=Users,DC=example,DC=com"
|
||||
/>
|
||||
onValueChange={(val) => {
|
||||
setValue(
|
||||
"temporaryParameters",
|
||||
val === LdapPasswordRotationMethod.TargetPrincipal
|
||||
? {
|
||||
password: ""
|
||||
}
|
||||
: undefined
|
||||
);
|
||||
onChange(val);
|
||||
}}
|
||||
className="w-full border border-mineshaft-500 capitalize"
|
||||
position="popper"
|
||||
dropdownContainerClassName="max-w-none"
|
||||
>
|
||||
{Object.values(LdapPasswordRotationMethod).map((method) => {
|
||||
return (
|
||||
<SelectItem value={method} className="capitalize" key={method}>
|
||||
{method.replace("-", " ")}
|
||||
</SelectItem>
|
||||
);
|
||||
})}
|
||||
</Select>
|
||||
</FormControl>
|
||||
)}
|
||||
/>
|
||||
<div className="flex gap-3">
|
||||
<Controller
|
||||
name="parameters.dn"
|
||||
control={control}
|
||||
render={({ field: { value, onChange }, fieldState: { error } }) => (
|
||||
<FormControl
|
||||
className="flex-1"
|
||||
isError={Boolean(error)}
|
||||
errorText={error?.message}
|
||||
label="Target Principal's DN/UPN"
|
||||
tooltipText="The DN/UPN of the principal that you want to perform password rotation on."
|
||||
tooltipClassName="max-w-sm"
|
||||
helperText={isUpdate ? "Cannot be updated." : undefined}
|
||||
>
|
||||
<Input
|
||||
isDisabled={isUpdate}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
placeholder="CN=John,OU=Users,DC=example,DC=com"
|
||||
/>
|
||||
</FormControl>
|
||||
)}
|
||||
/>
|
||||
{rotationMethod === LdapPasswordRotationMethod.TargetPrincipal && !isUpdate && (
|
||||
<Controller
|
||||
name="temporaryParameters.password"
|
||||
control={control}
|
||||
render={({ field: { value, onChange }, fieldState: { error } }) => (
|
||||
<FormControl
|
||||
className="flex-1"
|
||||
isError={Boolean(error)}
|
||||
errorText={error?.message}
|
||||
label="Target Principal's Password"
|
||||
>
|
||||
<Input
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
type="password"
|
||||
placeholder="****************"
|
||||
/>
|
||||
</FormControl>
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex flex-col gap-3">
|
||||
<div className="w-full border-b border-mineshaft-600">
|
||||
<span className="text-sm text-mineshaft-300">Password Requirements</span>
|
||||
</div>
|
||||
<div className="grid grid-cols-2 gap-3 rounded border border-mineshaft-600 bg-mineshaft-700 px-3 py-2">
|
||||
<div className="grid grid-cols-2 gap-x-3 gap-y-1 rounded border border-mineshaft-600 bg-mineshaft-700 px-3 pt-3">
|
||||
<Controller
|
||||
control={control}
|
||||
name="parameters.passwordRequirements.length"
|
||||
@@ -45,7 +140,7 @@ export const LdapPasswordRotationParametersFields = () => {
|
||||
label="Password Length"
|
||||
isError={Boolean(error)}
|
||||
errorText={error?.message}
|
||||
helperText="The length of the password to generate"
|
||||
tooltipText="The length of the password to generate"
|
||||
>
|
||||
<Input
|
||||
type="number"
|
||||
@@ -67,7 +162,7 @@ export const LdapPasswordRotationParametersFields = () => {
|
||||
label="Digit Count"
|
||||
isError={Boolean(error)}
|
||||
errorText={error?.message}
|
||||
helperText="Minimum number of digits"
|
||||
tooltipText="Minimum number of digits"
|
||||
>
|
||||
<Input
|
||||
type="number"
|
||||
@@ -88,7 +183,7 @@ export const LdapPasswordRotationParametersFields = () => {
|
||||
label="Lowercase Character Count"
|
||||
isError={Boolean(error)}
|
||||
errorText={error?.message}
|
||||
helperText="Minimum number of lowercase characters"
|
||||
tooltipText="Minimum number of lowercase characters"
|
||||
>
|
||||
<Input
|
||||
type="number"
|
||||
@@ -109,7 +204,7 @@ export const LdapPasswordRotationParametersFields = () => {
|
||||
label="Uppercase Character Count"
|
||||
isError={Boolean(error)}
|
||||
errorText={error?.message}
|
||||
helperText="Minimum number of uppercase characters"
|
||||
tooltipText="Minimum number of uppercase characters"
|
||||
>
|
||||
<Input
|
||||
type="number"
|
||||
@@ -130,7 +225,7 @@ export const LdapPasswordRotationParametersFields = () => {
|
||||
label="Symbol Count"
|
||||
isError={Boolean(error)}
|
||||
errorText={error?.message}
|
||||
helperText="Minimum number of symbols"
|
||||
tooltipText="Minimum number of symbols"
|
||||
>
|
||||
<Input
|
||||
type="number"
|
||||
@@ -151,7 +246,7 @@ export const LdapPasswordRotationParametersFields = () => {
|
||||
label="Allowed Symbols"
|
||||
isError={Boolean(error)}
|
||||
errorText={error?.message}
|
||||
helperText="Symbols to use in generated password"
|
||||
tooltipText="Symbols to use in generated password"
|
||||
>
|
||||
<Input
|
||||
placeholder="-_.~!*"
|
||||
|
@@ -15,13 +15,35 @@ export const LdapPasswordRotationReviewFields = () => {
|
||||
|
||||
const [parameters, { dn, password }] = watch(["parameters", "secretsMapping"]);
|
||||
|
||||
const { passwordRequirements } = parameters;
|
||||
|
||||
return (
|
||||
<>
|
||||
<SecretRotationReviewSection label="Parameters">
|
||||
<GenericFieldLabel label="Distinguished Name (DN)">{parameters.dn}</GenericFieldLabel>
|
||||
<GenericFieldLabel label="DN/UPN">{parameters.dn}</GenericFieldLabel>
|
||||
</SecretRotationReviewSection>
|
||||
{passwordRequirements && (
|
||||
<SecretRotationReviewSection label="Password Requirements">
|
||||
<GenericFieldLabel label="Length">{passwordRequirements.length}</GenericFieldLabel>
|
||||
<GenericFieldLabel label="Minimum Digits">
|
||||
{passwordRequirements.required.digits}
|
||||
</GenericFieldLabel>
|
||||
<GenericFieldLabel label="Minimum Lowercase Characters">
|
||||
{passwordRequirements.required.lowercase}
|
||||
</GenericFieldLabel>
|
||||
<GenericFieldLabel label="Minimum Uppercase Characters">
|
||||
{passwordRequirements.required.uppercase}
|
||||
</GenericFieldLabel>
|
||||
<GenericFieldLabel label="Minimum Symbols">
|
||||
{passwordRequirements.required.symbols}
|
||||
</GenericFieldLabel>
|
||||
<GenericFieldLabel label="Allowed Symbols">
|
||||
{passwordRequirements.allowedSymbols}
|
||||
</GenericFieldLabel>
|
||||
</SecretRotationReviewSection>
|
||||
)}
|
||||
<SecretRotationReviewSection label="Secrets Mapping">
|
||||
<GenericFieldLabel label="Distinguished Name (DN)">{dn}</GenericFieldLabel>
|
||||
<GenericFieldLabel label="DN/UPN">{dn}</GenericFieldLabel>
|
||||
<GenericFieldLabel label="Password">{password}</GenericFieldLabel>
|
||||
</SecretRotationReviewSection>
|
||||
</>
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { ReactNode } from "react";
|
||||
|
||||
type Props = {
|
||||
label: "Parameters" | "Secrets Mapping";
|
||||
label: "Parameters" | "Secrets Mapping" | "Password Requirements";
|
||||
children: ReactNode;
|
||||
};
|
||||
|
||||
|
@@ -17,7 +17,7 @@ export const LdapPasswordRotationSecretsMappingFields = () => {
|
||||
|
||||
const items = [
|
||||
{
|
||||
name: "DN",
|
||||
name: "DN/UPN",
|
||||
input: (
|
||||
<Controller
|
||||
render={({ field: { value, onChange }, fieldState: { error } }) => (
|
||||
|
@@ -6,16 +6,36 @@ import { AzureClientSecretRotationSchema } from "@app/components/secret-rotation
|
||||
import { LdapPasswordRotationSchema } from "@app/components/secret-rotations-v2/forms/schemas/ldap-password-rotation-schema";
|
||||
import { MsSqlCredentialsRotationSchema } from "@app/components/secret-rotations-v2/forms/schemas/mssql-credentials-rotation-schema";
|
||||
import { PostgresCredentialsRotationSchema } from "@app/components/secret-rotations-v2/forms/schemas/postgres-credentials-rotation-schema";
|
||||
import { SecretRotation } from "@app/hooks/api/secretRotationsV2";
|
||||
import { LdapPasswordRotationMethod } from "@app/hooks/api/secretRotationsV2/types/ldap-password-rotation";
|
||||
|
||||
const SecretRotationUnionSchema = z.discriminatedUnion("type", [
|
||||
Auth0ClientSecretRotationSchema,
|
||||
AzureClientSecretRotationSchema,
|
||||
PostgresCredentialsRotationSchema,
|
||||
MsSqlCredentialsRotationSchema,
|
||||
LdapPasswordRotationSchema,
|
||||
AwsIamUserSecretRotationSchema
|
||||
]);
|
||||
export const SecretRotationV2FormSchema = (isUpdate: boolean) =>
|
||||
z
|
||||
.intersection(
|
||||
z.discriminatedUnion("type", [
|
||||
Auth0ClientSecretRotationSchema,
|
||||
AzureClientSecretRotationSchema,
|
||||
PostgresCredentialsRotationSchema,
|
||||
MsSqlCredentialsRotationSchema,
|
||||
LdapPasswordRotationSchema,
|
||||
AwsIamUserSecretRotationSchema
|
||||
]),
|
||||
z.object({ id: z.string().optional() })
|
||||
)
|
||||
.superRefine((val, ctx) => {
|
||||
if (val.type !== SecretRotation.LdapPassword || isUpdate) return;
|
||||
|
||||
export const SecretRotationV2FormSchema = SecretRotationUnionSchema;
|
||||
// this has to go on union or breaks discrimination
|
||||
if (
|
||||
val.parameters.rotationMethod === LdapPasswordRotationMethod.TargetPrincipal &&
|
||||
!val.temporaryParameters?.password
|
||||
) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "Password required",
|
||||
path: ["temporaryParameters", "password"]
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export type TSecretRotationV2Form = z.infer<typeof SecretRotationV2FormSchema>;
|
||||
export type TSecretRotationV2Form = z.infer<ReturnType<typeof SecretRotationV2FormSchema>>;
|
||||
|
@@ -2,8 +2,9 @@ import { z } from "zod";
|
||||
|
||||
import { BaseSecretRotationSchema } from "@app/components/secret-rotations-v2/forms/schemas/base-secret-rotation-v2-schema";
|
||||
import { PasswordRequirementsSchema } from "@app/components/secret-rotations-v2/forms/schemas/shared";
|
||||
import { DistinguishedNameRegex } from "@app/helpers/string";
|
||||
import { DistinguishedNameRegex, UserPrincipalNameRegex } from "@app/helpers/string";
|
||||
import { SecretRotation } from "@app/hooks/api/secretRotationsV2";
|
||||
import { LdapPasswordRotationMethod } from "@app/hooks/api/secretRotationsV2/types/ldap-password-rotation";
|
||||
|
||||
export const LdapPasswordRotationSchema = z
|
||||
.object({
|
||||
@@ -12,13 +13,24 @@ export const LdapPasswordRotationSchema = z
|
||||
dn: z
|
||||
.string()
|
||||
.trim()
|
||||
.regex(DistinguishedNameRegex, "Invalid Distinguished Name format")
|
||||
.min(1, "Distinguished Name (DN) required"),
|
||||
passwordRequirements: PasswordRequirementsSchema.optional()
|
||||
.min(1, "DN/UPN required")
|
||||
.refine(
|
||||
(value) => DistinguishedNameRegex.test(value) || UserPrincipalNameRegex.test(value),
|
||||
{
|
||||
message: "Invalid DN/UPN format"
|
||||
}
|
||||
),
|
||||
passwordRequirements: PasswordRequirementsSchema.optional(),
|
||||
rotationMethod: z.nativeEnum(LdapPasswordRotationMethod).optional()
|
||||
}),
|
||||
secretsMapping: z.object({
|
||||
dn: z.string().trim().min(1, "Distinguished Name (DN) required"),
|
||||
dn: z.string().trim().min(1, "DN/UPN required"),
|
||||
password: z.string().trim().min(1, "Password required")
|
||||
})
|
||||
}),
|
||||
temporaryParameters: z
|
||||
.object({
|
||||
password: z.string().min(1, "Password required")
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
.merge(BaseSecretRotationSchema);
|
||||
|
@@ -1,5 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export type TPasswordRequirements = z.infer<typeof PasswordRequirementsSchema>;
|
||||
|
||||
export const PasswordRequirementsSchema = z
|
||||
.object({
|
||||
length: z
|
||||
|
@@ -144,6 +144,7 @@ export const SecretSyncOptionsFields = ({ hideInitialSync }: Props) => {
|
||||
<a
|
||||
href="https://infisical.com/docs/integrations/secret-syncs/overview#key-schemas"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
Key Schema
|
||||
</a>{" "}
|
||||
|
@@ -13,10 +13,11 @@ export const BaseSecretSyncSchema = <T extends AnyZodObject | undefined = undefi
|
||||
.string()
|
||||
.optional()
|
||||
.refine(
|
||||
(val) => !val || /^(?:[a-zA-Z0-9\-/]*)(?:\{\{secretKey\}\})(?:[a-zA-Z0-9\-/]*)$/.test(val),
|
||||
(val) =>
|
||||
!val || /^(?:[a-zA-Z0-9_\-/]*)(?:\{\{secretKey\}\})(?:[a-zA-Z0-9_\-/]*)$/.test(val),
|
||||
{
|
||||
message:
|
||||
"Key schema must include one {{secretKey}} and only contain letters, numbers, dashes, slashes, and the {{secretKey}} placeholder."
|
||||
"Key schema must include one {{secretKey}} and only contain letters, numbers, dashes, underscores, slashes, and the {{secretKey}} placeholder."
|
||||
}
|
||||
)
|
||||
});
|
||||
|