Compare commits

..

52 Commits

Author SHA1 Message Date
Daniel Hougaard
cf5391d6d4 Update overview.mdx 2025-06-07 03:06:01 +04:00
Daniel Hougaard
2ca476f21e Update gateway.mdx 2025-06-07 03:04:45 +04:00
Daniel Hougaard
bf81469341 Merge branch 'heads/main' into daniel/gateway-docs 2025-06-07 03:00:16 +04:00
Daniel Hougaard
8445127fad feat(gateway): multiple authentication methods 2025-06-07 02:58:07 +04:00
Scott Wilson
f8c822eda7 Merge pull request #3744 from Infisical/project-group-users-page
feature(group-projects): Add project group details page
2025-06-06 14:30:50 -07:00
Scott Wilson
ea5a5e0aa7 improvements: address feedback 2025-06-06 14:13:18 -07:00
Akhil Mohan
f20e4e189d Merge pull request #3722 from Infisical/feat/dynamicSecretIdentityName
Add identityName to Dynamic Secrets userName template
2025-06-07 02:23:41 +05:30
Scott Wilson
c7ec6236e1 Merge pull request #3738 from Infisical/gcp-sync-location
feature(gcp-sync): Add support for syncing to locations
2025-06-06 13:47:55 -07:00
carlosmonastyrski
c4dea2d51f Type fix 2025-06-06 17:34:29 -03:00
carlosmonastyrski
e89b0fdf3f Merge remote-tracking branch 'origin/main' into feat/dynamicSecretIdentityName 2025-06-06 17:27:48 -03:00
Scott Wilson
d57f76d230 improvements: address feedback 2025-06-06 13:22:45 -07:00
Maidul Islam
7ba79dec19 Merge pull request #3752 from akhilmhdh/feat/k8s-metadata-auth
feat: added k8s metadata in template policy
2025-06-06 15:30:33 -04:00
Akhil Mohan
6ea8bff224 Merge pull request #3750 from akhilmhdh/feat/dynamic-secret-aws
feat: assume role mode for aws dynamic secret iam
2025-06-07 00:59:22 +05:30
=
65f4e1bea1 feat: corrected typo 2025-06-07 00:56:03 +05:30
=
73ce3b8bb7 feat: review based update 2025-06-07 00:48:45 +05:30
Akhil Mohan
e63af81e60 Update docs/documentation/platform/access-controls/abac/managing-machine-identity-attributes.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-06 23:47:40 +05:30
=
6c2c2b319b feat: updated doc for k8s policy 2025-06-06 23:43:15 +05:30
=
82c2be64a1 feat: completed changes for backend to have k8s auth 2025-06-06 23:42:56 +05:30
x032205
051d0780a8 Merge pull request #3721 from Infisical/fix/user-stuck-on-invited
fix invite bug
2025-06-06 13:43:33 -04:00
carlosmonastyrski
5406871c30 feat(dynamic-secret): Minor improvements on usernameTemplate 2025-06-06 14:34:32 -03:00
=
8b89edc277 feat: resolved ts fail in license 2025-06-06 22:46:51 +05:30
x032205
b394e191a8 Fix accepting invite while logged out 2025-06-06 13:02:23 -04:00
Daniel Hougaard
92030884ec Merge pull request #3751 from Infisical/daniel/gateway-http-handle-multple-requests
fix(gateway): allow multiple requests when using http proxy
2025-06-06 20:54:22 +04:00
=
4583eb1732 feat: removed console log 2025-06-06 22:13:06 +05:30
Daniel Hougaard
4c8bf9bd92 Update values.yaml 2025-06-06 20:16:50 +04:00
Daniel Hougaard
a6554deb80 Update connection.go 2025-06-06 20:14:03 +04:00
carlosmonastyrski
ae00e74c17 Merge pull request #3715 from Infisical/feat/addAzureDevopsDocsOIDC
feat(oidc): add azure docs for OIDC authentication
2025-06-06 13:11:25 -03:00
=
adfd5a1b59 feat: doc for assume aws iam 2025-06-06 21:35:40 +05:30
=
d6c321d34d feat: ui for aws dynamic secret 2025-06-06 21:35:25 +05:30
=
09a7346f32 feat: backend changes for assume permission in aws dynamic secret 2025-06-06 21:33:19 +05:30
x032205
e4abac91b4 Merge branch 'main' into fix/user-stuck-on-invited 2025-06-06 11:50:03 -04:00
Maidul Islam
b4f37193ac Merge pull request #3748 from Infisical/akhilmhdh-patch-3
feat: updated dynamic secret,secret import to support glob in environment
2025-06-06 10:50:36 -04:00
Akhil Mohan
c8be5a637a feat: updated dynamic secret,secret import to support glob in environment 2025-06-06 20:08:21 +05:30
Akhil Mohan
45485f8bd3 Merge pull request #3739 from akhilmhdh/feat/limit-project-create
feat: added invalidate function to lock
2025-06-06 18:55:03 +05:30
Daniel Hougaard
766254c4e3 Merge pull request #3742 from Infisical/daniel/gateway-fix
fix(gateway): handle malformed URL's
2025-06-06 16:20:48 +04:00
Scott Wilson
4c22024d13 feature: project group details page 2025-06-05 19:17:46 -07:00
Daniel Hougaard
4bd1eb6f70 Update helm-charts/infisical-gateway/CHANGELOG.md
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-06 04:12:04 +04:00
Daniel Hougaard
022ecf75e1 fix(gateway): handle malformed URL's 2025-06-06 04:02:24 +04:00
carlosmonastyrski
ac5bfbb6c9 feat(dynamic-secret): Minor improvements on usernameTemplate 2025-06-05 17:18:56 -03:00
=
1f80ff040d feat: added invalidate function to lock 2025-06-06 01:45:01 +05:30
Scott Wilson
f8939835e1 feature(gcp-sync): add support for syncing to locations 2025-06-05 13:02:05 -07:00
x032205
d2b0ca94d8 Remove commented line 2025-06-05 11:59:10 -04:00
x032205
5255f0ac17 Fix select org 2025-06-05 11:30:05 -04:00
x032205
4f67834eaa Merge branch 'main' into fix/user-stuck-on-invited 2025-06-05 10:46:22 -04:00
x032205
952e60f08a Select organization checkpoint 2025-06-04 16:54:14 -04:00
carlosmonastyrski
5367d1ac2e feat(dynamic-secret): Added new options to username template 2025-06-04 16:43:17 -03:00
x032205
92b9abb52b Fix type issue 2025-06-03 21:48:59 -04:00
x032205
e2680d9aee Insert old code as comment 2025-06-03 21:48:42 -04:00
x032205
aa049dc43b Fix invite problem on backend 2025-06-03 21:06:48 -04:00
carlosmonastyrski
419e9ac755 Add identityName to Dynamic Secrets userName template 2025-06-03 21:21:36 -03:00
x032205
b7b36a475d fix invite bug 2025-06-03 20:12:29 -04:00
carlosmonastyrski
9159a9fa36 feat(oidc): add azure docs for OIDC authentication 2025-06-03 16:52:12 -03:00
126 changed files with 4182 additions and 2149 deletions

View File

@@ -119,6 +119,10 @@ declare module "@fastify/request-context" {
oidc?: {
claims: Record<string, string>;
};
kubernetes?: {
namespace: string;
name: string;
};
};
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };

View File

@@ -23,7 +23,10 @@ const validateUsernameTemplateCharacters = characterValidator([
CharacterType.CloseBrace,
CharacterType.CloseBracket,
CharacterType.OpenBracket,
CharacterType.Fullstop
CharacterType.Fullstop,
CharacterType.SingleQuote,
CharacterType.Spaces,
CharacterType.Pipe
]);
const userTemplateSchema = z
@@ -33,7 +36,7 @@ const userTemplateSchema = z
.refine((el) => validateUsernameTemplateCharacters(el))
.refine((el) =>
isValidHandleBarTemplate(el, {
allowedExpressions: (val) => ["randomUsername", "unixTimestamp"].includes(val)
allowedExpressions: (val) => ["randomUsername", "unixTimestamp", "identity.name"].includes(val)
})
);

View File

@@ -99,7 +99,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId, {
projectId: folder.projectId
});
await dynamicSecretLeaseDAL.deleteById(dynamicSecretLease.id);
return;
}
@@ -133,7 +135,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
await Promise.all(
dynamicSecretLeases.map(({ externalEntityId }) =>
selectedProvider.revoke(decryptedStoredInput, externalEntityId)
selectedProvider.revoke(decryptedStoredInput, externalEntityId, {
projectId: folder.projectId
})
)
);
}

View File

@@ -1,4 +1,5 @@
import { ForbiddenError, subject } from "@casl/ability";
import RE2 from "re2";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
@@ -11,10 +12,13 @@ import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ms } from "@app/lib/ms";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityDALFactory } from "@app/services/identity/identity-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
import { DynamicSecretProviders, TDynamicProviderFns } from "../dynamic-secret/providers/models";
@@ -39,6 +43,8 @@ type TDynamicSecretLeaseServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
userDAL: Pick<TUserDALFactory, "findById">;
identityDAL: TIdentityDALFactory;
};
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
@@ -52,8 +58,16 @@ export const dynamicSecretLeaseServiceFactory = ({
dynamicSecretQueueService,
projectDAL,
licenseService,
kmsService
kmsService,
userDAL,
identityDAL
}: TDynamicSecretLeaseServiceFactoryDep) => {
const extractEmailUsername = (email: string) => {
const regex = new RE2(/^([^@]+)/);
const match = email.match(regex);
return match ? match[1] : email;
};
const create = async ({
environmentSlug,
path,
@@ -132,10 +146,24 @@ export const dynamicSecretLeaseServiceFactory = ({
let result;
try {
const identity: { name: string } = { name: "" };
if (actor === ActorType.USER) {
const user = await userDAL.findById(actorId);
if (user) {
identity.name = extractEmailUsername(user.username);
}
} else if (actor === ActorType.Machine) {
const machineIdentity = await identityDAL.findById(actorId);
if (machineIdentity) {
identity.name = machineIdentity.name;
}
}
result = await selectedProvider.create({
inputs: decryptedStoredInput,
expireAt: expireAt.getTime(),
usernameTemplate: dynamicSecretCfg.usernameTemplate
usernameTemplate: dynamicSecretCfg.usernameTemplate,
identity,
metadata: { projectId }
});
} catch (error: unknown) {
if (error && typeof error === "object" && error !== null && "sqlMessage" in error) {
@@ -237,7 +265,8 @@ export const dynamicSecretLeaseServiceFactory = ({
const { entityId } = await selectedProvider.renew(
decryptedStoredInput,
dynamicSecretLease.externalEntityId,
expireAt.getTime()
expireAt.getTime(),
{ projectId }
);
await dynamicSecretQueueService.unsetLeaseRevocation(dynamicSecretLease.id);
@@ -313,7 +342,7 @@ export const dynamicSecretLeaseServiceFactory = ({
) as object;
const revokeResponse = await selectedProvider
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId)
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId, { projectId })
.catch(async (err) => {
// only propogate this error if forced is false
if (!isForced) return { error: err as Error };

View File

@@ -116,7 +116,7 @@ export const dynamicSecretServiceFactory = ({
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
const selectedProvider = dynamicSecretProviders[provider.type];
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
const inputs = await selectedProvider.validateProviderInputs(provider.inputs, { projectId });
let selectedGatewayId: string | null = null;
if (inputs && typeof inputs === "object" && "gatewayId" in inputs && inputs.gatewayId) {
@@ -146,7 +146,7 @@ export const dynamicSecretServiceFactory = ({
selectedGatewayId = gateway.id;
}
const isConnected = await selectedProvider.validateConnection(provider.inputs);
const isConnected = await selectedProvider.validateConnection(provider.inputs, { projectId });
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
@@ -272,7 +272,7 @@ export const dynamicSecretServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
const updatedInput = await selectedProvider.validateProviderInputs(newInput, { projectId });
let selectedGatewayId: string | null = null;
if (updatedInput && typeof updatedInput === "object" && "gatewayId" in updatedInput && updatedInput?.gatewayId) {
@@ -301,7 +301,7 @@ export const dynamicSecretServiceFactory = ({
selectedGatewayId = gateway.id;
}
const isConnected = await selectedProvider.validateConnection(newInput);
const isConnected = await selectedProvider.validateConnection(newInput, { projectId });
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const updatedDynamicCfg = await dynamicSecretDAL.transaction(async (tx) => {
@@ -472,7 +472,9 @@ export const dynamicSecretServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput, {
projectId
})) as object;
return { ...dynamicSecretCfg, inputs: providerInputs };
};

View File

@@ -16,6 +16,7 @@ import { BadRequestError } from "@app/lib/errors";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const CreateElastiCacheUserSchema = z.object({
UserId: z.string().trim().min(1),
@@ -132,14 +133,14 @@ const generatePassword = () => {
return customAlphabet(charset, 64)();
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-";
const randomUsername = `inf-${customAlphabet(charset, 32)()}`;
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -174,14 +175,21 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
return true;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
if (!(await validateConnection(providerInputs))) {
throw new BadRequestError({ message: "Failed to establish connection" });
}
const leaseUsername = generateUsername(usernameTemplate);
const leaseUsername = generateUsername(usernameTemplate, identity);
const leasePassword = generatePassword();
const leaseExpiration = new Date(expireAt).toISOString();

View File

@@ -16,21 +16,25 @@ import {
PutUserPolicyCommand,
RemoveUserFromGroupCommand
} from "@aws-sdk/client-iam";
import handlebars from "handlebars";
import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts";
import { randomUUID } from "crypto";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -40,7 +44,43 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>, projectId: string) => {
const appCfg = getConfig();
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
const stsClient = new STSClient({
region: providerInputs.region,
credentials:
appCfg.DYNAMIC_SECRET_AWS_ACCESS_KEY_ID && appCfg.DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY
? {
accessKeyId: appCfg.DYNAMIC_SECRET_AWS_ACCESS_KEY_ID,
secretAccessKey: appCfg.DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY
}
: undefined // if hosting on AWS
});
const command = new AssumeRoleCommand({
RoleArn: providerInputs.roleArn,
RoleSessionName: `infisical-dynamic-secret-${randomUUID()}`,
DurationSeconds: 900, // 15 mins
ExternalId: projectId
});
const assumeRes = await stsClient.send(command);
if (!assumeRes.Credentials?.AccessKeyId || !assumeRes.Credentials?.SecretAccessKey) {
throw new BadRequestError({ message: "Failed to assume role - verify credentials and role configuration" });
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
accessKeyId: assumeRes.Credentials?.AccessKeyId,
secretAccessKey: assumeRes.Credentials?.SecretAccessKey,
sessionToken: assumeRes.Credentials?.SessionToken
}
});
return client;
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
@@ -52,21 +92,41 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return client;
};
const validateConnection = async (inputs: unknown) => {
const validateConnection = async (inputs: unknown, { projectId }: { projectId: string }) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
const client = await $getClient(providerInputs, projectId);
const isConnected = await client
.send(new GetUserCommand({}))
.then(() => true)
.catch((err) => {
const message = (err as Error)?.message;
if (
providerInputs.method === AwsIamAuthType.AssumeRole &&
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
message.includes("Must specify userName when calling with non-User credentials")
) {
return true;
}
throw err;
});
return isConnected;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
metadata: { projectId: string };
}) => {
const { inputs, usernameTemplate, metadata, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await $getClient(providerInputs, metadata.projectId);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
const createUserRes = await client.send(
new CreateUserCommand({
@@ -76,6 +136,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
UserName: username
})
);
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
if (userGroups) {
await Promise.all(
@@ -125,9 +186,9 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
};
};
const revoke = async (inputs: unknown, entityId: string) => {
const revoke = async (inputs: unknown, entityId: string, metadata: { projectId: string }) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await $getClient(providerInputs, metadata.projectId);
const username = entityId;

View File

@@ -8,19 +8,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -75,12 +76,17 @@ export const CassandraProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const { keyspace } = providerInputs;
const expiration = new Date(expireAt).toISOString();

View File

@@ -1,5 +1,4 @@
import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import { z } from "zod";
@@ -7,19 +6,20 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -71,12 +71,12 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
return infoResponse;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
await connection.security.putUser({

View File

@@ -1,21 +1,13 @@
import axios from "axios";
import handlebars from "handlebars";
import https from "https";
import { InternalServerError } from "@app/lib/errors";
import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { TKubernetesTokenRequest } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-types";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import {
DynamicSecretKubernetesSchema,
KubernetesAuthMethod,
KubernetesCredentialType,
KubernetesRoleType,
TDynamicProviderFns
} from "./models";
import { DynamicSecretKubernetesSchema, TDynamicProviderFns } from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
@@ -23,16 +15,6 @@ type TKubernetesProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
};
const generateUsername = (usernameTemplate?: string | null) => {
const randomUsername = `dynamic-secret-sa-${alphaNumericNanoId(10).toLowerCase()}`;
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
});
};
export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretKubernetesSchema.parseAsync(inputs);
@@ -48,27 +30,20 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
gatewayId: string;
targetHost: string;
targetPort: number;
caCert?: string;
reviewTokenThroughGateway: boolean;
enableSsl: boolean;
},
gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent) => Promise<T>
gatewayCallback: (host: string, port: number) => Promise<T>
): Promise<T> => {
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
const callbackResult = await withGatewayProxy(
async (port, httpsAgent) => {
async (port) => {
// Needs to be https protocol or the kubernetes API server will fail with "Client sent an HTTP request to an HTTPS server"
const res = await gatewayCallback(
inputs.reviewTokenThroughGateway ? "http://localhost" : "https://localhost",
port,
httpsAgent
);
const res = await gatewayCallback("https://localhost", port);
return res;
},
{
protocol: inputs.reviewTokenThroughGateway ? GatewayProxyProtocol.Http : GatewayProxyProtocol.Tcp,
protocol: GatewayProxyProtocol.Tcp,
targetHost: inputs.targetHost,
targetPort: inputs.targetPort,
relayHost,
@@ -79,12 +54,7 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
},
// we always pass this, because its needed for both tcp and http protocol
httpsAgent: new https.Agent({
ca: inputs.caCert,
rejectUnauthorized: inputs.enableSsl
})
}
}
);
@@ -94,151 +64,7 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const serviceAccountName = generateUsername();
const roleBindingName = `${serviceAccountName}-role-binding`;
// 1. Create a test service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts`,
{
metadata: {
name: serviceAccountName,
namespace: providerInputs.namespace
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 2. Create a test role binding
const roleBindingUrl =
providerInputs.roleType === KubernetesRoleType.ClusterRole
? `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings`
: `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${providerInputs.namespace}/rolebindings`;
const roleBindingMetadata = {
name: roleBindingName,
...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace: providerInputs.namespace })
};
await axios.post(
roleBindingUrl,
{
metadata: roleBindingMetadata,
roleRef: {
kind: providerInputs.roleType === KubernetesRoleType.ClusterRole ? "ClusterRole" : "Role",
name: providerInputs.role,
apiGroup: "rbac.authorization.k8s.io"
},
subjects: [
{
kind: "ServiceAccount",
name: serviceAccountName,
namespace: providerInputs.namespace
}
]
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 3. Request a token for the test service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${serviceAccountName}/token`,
{
spec: {
expirationSeconds: 600, // 10 minutes
...(providerInputs.audiences?.length ? { audiences: providerInputs.audiences } : {})
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 4. Cleanup: delete role binding and service account
if (providerInputs.roleType === KubernetesRoleType.Role) {
await axios.delete(
`${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${providerInputs.namespace}/rolebindings/${roleBindingName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
} else {
await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
});
}
await axios.delete(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${serviceAccountName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
};
const serviceAccountStaticCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Static) {
throw new Error("invalid callback");
}
const serviceAccountGetCallback = async (host: string, port: number) => {
const baseUrl = port ? `${host}:${port}` : host;
await axios.get(
@@ -246,57 +72,36 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
Authorization: `Bearer ${providerInputs.clusterToken}`
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
httpsAgent: new https.Agent({
ca: providerInputs.ca,
rejectUnauthorized: providerInputs.sslEnabled
})
}
);
};
const url = new URL(providerInputs.url);
const k8sGatewayHost = url.hostname;
const k8sPort = url.port ? Number(url.port) : 443;
const k8sHost = `${url.protocol}//${url.hostname}`;
try {
if (providerInputs.gatewayId) {
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
providerInputs.credentialType === KubernetesCredentialType.Static
? serviceAccountStaticCallback
: serviceAccountDynamicCallback
);
} else {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
providerInputs.credentialType === KubernetesCredentialType.Static
? serviceAccountStaticCallback
: serviceAccountDynamicCallback
);
}
} else if (providerInputs.credentialType === KubernetesCredentialType.Static) {
await serviceAccountStaticCallback(k8sHost, k8sPort);
const k8sHost = url.hostname;
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort
},
serviceAccountGetCallback
);
} else {
await serviceAccountDynamicCallback(k8sHost, k8sPort);
const k8sHost = `${url.protocol}//${url.hostname}`;
await serviceAccountGetCallback(k8sHost, k8sPort);
}
return true;
@@ -312,119 +117,10 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
}
};
const create = async ({
inputs,
expireAt,
usernameTemplate
}: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
}) => {
const create = async ({ inputs, expireAt }: { inputs: unknown; expireAt: number }) => {
const providerInputs = await validateProviderInputs(inputs);
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const serviceAccountName = generateUsername(usernameTemplate);
const roleBindingName = `${serviceAccountName}-role-binding`;
// 1. Create the service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts`,
{
metadata: {
name: serviceAccountName,
namespace: providerInputs.namespace
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 2. Create the role binding
const roleBindingUrl =
providerInputs.roleType === KubernetesRoleType.ClusterRole
? `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings`
: `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${providerInputs.namespace}/rolebindings`;
const roleBindingMetadata = {
name: roleBindingName,
...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace: providerInputs.namespace })
};
await axios.post(
roleBindingUrl,
{
metadata: roleBindingMetadata,
roleRef: {
kind: providerInputs.roleType === KubernetesRoleType.ClusterRole ? "ClusterRole" : "Role",
name: providerInputs.role,
apiGroup: "rbac.authorization.k8s.io"
},
subjects: [
{
kind: "ServiceAccount",
name: serviceAccountName,
namespace: providerInputs.namespace
}
]
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 3. Request a token for the service account
const res = await axios.post<TKubernetesTokenRequest>(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${serviceAccountName}/token`,
{
spec: {
expirationSeconds: Math.floor((expireAt - Date.now()) / 1000),
...(providerInputs.audiences?.length ? { audiences: providerInputs.audiences } : {})
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
return { ...res.data, serviceAccountName };
};
const tokenRequestStaticCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Static) {
throw new Error("invalid callback");
}
const tokenRequestCallback = async (host: string, port: number) => {
const baseUrl = port ? `${host}:${port}` : host;
const res = await axios.post<TKubernetesTokenRequest>(
@@ -438,17 +134,18 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
Authorization: `Bearer ${providerInputs.clusterToken}`
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
httpsAgent: new https.Agent({
ca: providerInputs.ca,
rejectUnauthorized: providerInputs.sslEnabled
})
}
);
return { ...res.data, serviceAccountName: providerInputs.serviceAccountName };
return res.data;
};
const url = new URL(providerInputs.url);
@@ -457,46 +154,19 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
const k8sPort = url.port ? Number(url.port) : 443;
try {
let tokenData;
if (providerInputs.gatewayId) {
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
tokenData = await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
providerInputs.credentialType === KubernetesCredentialType.Static
? tokenRequestStaticCallback
: serviceAccountDynamicCallback
);
} else {
tokenData = await $gatewayProxyWrapper(
const tokenData = providerInputs.gatewayId
? await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
targetPort: k8sPort
},
providerInputs.credentialType === KubernetesCredentialType.Static
? tokenRequestStaticCallback
: serviceAccountDynamicCallback
);
}
} else {
tokenData =
providerInputs.credentialType === KubernetesCredentialType.Static
? await tokenRequestStaticCallback(k8sHost, k8sPort)
: await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
tokenRequestCallback
)
: await tokenRequestCallback(k8sHost, k8sPort);
return {
entityId: tokenData.serviceAccountName,
entityId: providerInputs.serviceAccountName,
data: { TOKEN: tokenData.status.token }
};
} catch (error) {
@@ -511,97 +181,7 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
}
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const roleBindingName = `${entityId}-role-binding`;
if (providerInputs.roleType === KubernetesRoleType.Role) {
await axios.delete(
`${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${providerInputs.namespace}/rolebindings/${roleBindingName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
} else {
await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
});
}
// Delete the service account
await axios.delete(`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${entityId}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
});
};
if (providerInputs.credentialType === KubernetesCredentialType.Dynamic) {
const url = new URL(providerInputs.url);
const k8sGatewayHost = url.hostname;
const k8sPort = url.port ? Number(url.port) : 443;
const k8sHost = `${url.protocol}//${url.hostname}`;
if (providerInputs.gatewayId) {
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
serviceAccountDynamicCallback
);
} else {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
serviceAccountDynamicCallback
);
}
} else {
await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
}
const revoke = async (_inputs: unknown, entityId: string) => {
return { entityId };
};

View File

@@ -9,6 +9,7 @@ import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
@@ -22,13 +23,13 @@ const encodePassword = (password?: string) => {
return base64Password;
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -196,8 +197,8 @@ export const LdapProvider = (): TDynamicProviderFns => {
return dnArray;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
@@ -224,7 +225,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
});
}
} else {
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });

View File

@@ -20,6 +20,11 @@ export enum SqlProviders {
Vertica = "vertica"
}
export enum AwsIamAuthType {
AssumeRole = "assume-role",
AccessKey = "access-key"
}
export enum ElasticSearchAuthTypes {
User = "user",
ApiKey = "api-key"
@@ -31,18 +36,7 @@ export enum LdapCredentialType {
}
export enum KubernetesCredentialType {
Static = "static",
Dynamic = "dynamic"
}
export enum KubernetesRoleType {
ClusterRole = "cluster-role",
Role = "role"
}
export enum KubernetesAuthMethod {
Gateway = "gateway",
Api = "api"
Static = "static"
}
export enum TotpConfigType {
@@ -179,16 +173,38 @@ export const DynamicSecretSapAseSchema = z.object({
revocationStatement: z.string().trim()
});
export const DynamicSecretAwsIamSchema = z.object({
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
});
export const DynamicSecretAwsIamSchema = z.preprocess(
(val) => {
if (typeof val === "object" && val !== null && !Object.hasOwn(val, "method")) {
// eslint-disable-next-line no-param-reassign
(val as { method: string }).method = AwsIamAuthType.AccessKey;
}
return val;
},
z.discriminatedUnion("method", [
z.object({
method: z.literal(AwsIamAuthType.AccessKey),
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
}),
z.object({
method: z.literal(AwsIamAuthType.AssumeRole),
roleArn: z.string().trim().min(1, "Role ARN required"),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
})
])
);
export const DynamicSecretMongoAtlasSchema = z.object({
adminPublicKey: z.string().trim().min(1).describe("Admin user public api key"),
@@ -293,50 +309,17 @@ export const LdapSchema = z.union([
})
]);
export const DynamicSecretKubernetesSchema = z
.discriminatedUnion("credentialType", [
z.object({
url: z.string().url().trim().min(1),
clusterToken: z.string().trim().optional(),
ca: z.string().optional(),
sslEnabled: z.boolean().default(false),
credentialType: z.literal(KubernetesCredentialType.Static),
serviceAccountName: z.string().trim().min(1),
namespace: z.string().trim().min(1),
gatewayId: z.string().optional(),
audiences: z.array(z.string().trim().min(1)),
authMethod: z.nativeEnum(KubernetesAuthMethod).default(KubernetesAuthMethod.Api)
}),
z.object({
url: z.string().url().trim().min(1),
clusterToken: z.string().trim().optional(),
ca: z.string().optional(),
sslEnabled: z.boolean().default(false),
credentialType: z.literal(KubernetesCredentialType.Dynamic),
namespace: z.string().trim().min(1),
gatewayId: z.string().optional(),
audiences: z.array(z.string().trim().min(1)),
roleType: z.nativeEnum(KubernetesRoleType),
role: z.string().trim().min(1),
authMethod: z.nativeEnum(KubernetesAuthMethod).default(KubernetesAuthMethod.Api)
})
])
.superRefine((data, ctx) => {
if (data.authMethod === KubernetesAuthMethod.Gateway && !data.gatewayId) {
ctx.addIssue({
path: ["gatewayId"],
code: z.ZodIssueCode.custom,
message: "When auth method is set to Gateway, a gateway must be selected"
});
}
if ((data.authMethod === KubernetesAuthMethod.Api || !data.authMethod) && !data.clusterToken) {
ctx.addIssue({
path: ["clusterToken"],
code: z.ZodIssueCode.custom,
message: "When auth method is set to Manual Token, a cluster token must be provided"
});
}
});
export const DynamicSecretKubernetesSchema = z.object({
url: z.string().url().trim().min(1),
gatewayId: z.string().nullable().optional(),
sslEnabled: z.boolean().default(true),
clusterToken: z.string().trim().min(1),
ca: z.string().optional(),
serviceAccountName: z.string().trim().min(1),
credentialType: z.literal(KubernetesCredentialType.Static),
namespace: z.string().trim().min(1),
audiences: z.array(z.string().trim().min(1))
});
export const DynamicSecretVerticaSchema = z.object({
host: z.string().trim().toLowerCase(),
@@ -444,9 +427,18 @@ export type TDynamicProviderFns = {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
metadata: { projectId: string };
}) => Promise<{ entityId: string; data: unknown }>;
validateConnection: (inputs: unknown) => Promise<boolean>;
validateProviderInputs: (inputs: object) => Promise<unknown>;
revoke: (inputs: unknown, entityId: string) => Promise<{ entityId: string }>;
renew: (inputs: unknown, entityId: string, expireAt: number) => Promise<{ entityId: string }>;
validateConnection: (inputs: unknown, metadata: { projectId: string }) => Promise<boolean>;
validateProviderInputs: (inputs: object, metadata: { projectId: string }) => Promise<unknown>;
revoke: (inputs: unknown, entityId: string, metadata: { projectId: string }) => Promise<{ entityId: string }>;
renew: (
inputs: unknown,
entityId: string,
expireAt: number,
metadata: { projectId: string }
) => Promise<{ entityId: string }>;
};

View File

@@ -1,5 +1,4 @@
import axios, { AxiosError } from "axios";
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import { z } from "zod";
@@ -7,19 +6,20 @@ import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -64,12 +64,17 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();
await client({

View File

@@ -1,4 +1,3 @@
import handlebars from "handlebars";
import { MongoClient } from "mongodb";
import { customAlphabet } from "nanoid";
import { z } from "zod";
@@ -7,19 +6,20 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -60,12 +60,12 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const db = client.db(providerInputs.database);

View File

@@ -1,5 +1,4 @@
import axios, { Axios } from "axios";
import handlebars from "handlebars";
import https from "https";
import { customAlphabet } from "nanoid";
import { z } from "zod";
@@ -9,19 +8,20 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -117,12 +117,12 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
return infoResponse;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
await createRabbitMqUser({

View File

@@ -9,19 +9,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -121,12 +122,17 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
return pingResponse;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();

View File

@@ -9,19 +9,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = `inf_${alphaNumericNanoId(25)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -87,11 +88,11 @@ export const SapAseProvider = (): TDynamicProviderFns => {
return true;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const client = await $getClient(providerInputs);

View File

@@ -15,19 +15,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -97,11 +98,16 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
return testResult;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();

View File

@@ -8,6 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
// destroy client requires callback...
const noop = () => {};
@@ -17,13 +18,13 @@ const generatePassword = (size = 48) => {
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = `infisical_${alphaNumericNanoId(32)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -88,13 +89,18 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
return isValidConnection;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
try {

View File

@@ -10,6 +10,7 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSqlDBSchema, PasswordRequirements, SqlProviders, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
@@ -104,9 +105,8 @@ const generatePassword = (provider: SqlProviders, requirements?: PasswordRequire
}
};
const generateUsername = (provider: SqlProviders, usernameTemplate?: string | null) => {
const generateUsername = (provider: SqlProviders, usernameTemplate?: string | null, identity?: { name: string }) => {
let randomUsername = "";
// For oracle, the client assumes everything is upper case when not using quotes around the password
if (provider === SqlProviders.Oracle) {
randomUsername = alphaNumericNanoId(32).toUpperCase();
@@ -114,10 +114,13 @@ const generateUsername = (provider: SqlProviders, usernameTemplate?: string | nu
randomUsername = alphaNumericNanoId(32);
}
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity,
options: {
toUpperCase: provider === SqlProviders.Oracle
}
});
};
@@ -221,11 +224,16 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
return isConnected;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(providerInputs.client, usernameTemplate);
const username = generateUsername(providerInputs.client, usernameTemplate, identity);
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {

View File

@@ -0,0 +1,80 @@
/* eslint-disable func-names */
import handlebars from "handlebars";
import RE2 from "re2";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
export const compileUsernameTemplate = ({
usernameTemplate,
randomUsername,
identity,
unixTimestamp,
options
}: {
usernameTemplate: string;
randomUsername: string;
identity?: { name: string };
unixTimestamp?: number;
options?: {
toUpperCase?: boolean;
};
}): string => {
// Create isolated handlebars instance
const hbs = handlebars.create();
// Register random helper on local instance
hbs.registerHelper("random", function (length: number) {
if (typeof length !== "number" || length <= 0 || length > 100) {
return "";
}
return alphaNumericNanoId(length);
});
// Register replace helper on local instance
hbs.registerHelper("replace", function (text: string, searchValue: string, replaceValue: string) {
// Convert to string if it's not already
const textStr = String(text || "");
if (!textStr) {
return textStr;
}
try {
const re2Pattern = new RE2(searchValue, "g");
// Replace all occurrences
return re2Pattern.replace(textStr, replaceValue);
} catch (error) {
logger.error(error, "RE2 pattern failed, using original template");
return textStr;
}
});
// Register truncate helper on local instance
hbs.registerHelper("truncate", function (text: string, length: number) {
// Convert to string if it's not already
const textStr = String(text || "");
if (!textStr) {
return textStr;
}
if (typeof length !== "number" || length <= 0) return textStr;
return textStr.substring(0, length);
});
// Compile template with context using local instance
const context = {
randomUsername,
unixTimestamp: unixTimestamp || Math.floor(Date.now() / 100),
identity: {
name: identity?.name
}
};
const result = hbs.compile(usernameTemplate)(context);
if (options?.toUpperCase) {
return result.toUpperCase();
}
return result;
};

View File

@@ -42,6 +42,10 @@ export type TListGroupUsersDTO = {
filter?: EFilterReturnedUsers;
} & TGenericPermission;
export type TListProjectGroupUsersDTO = TListGroupUsersDTO & {
projectId: string;
};
export type TAddUserToGroupDTO = {
id: string;
username: string;

View File

@@ -709,6 +709,10 @@ export const licenseServiceFactory = ({
return licenses;
};
const invalidateGetPlan = async (orgId: string) => {
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
};
return {
generateOrgCustomerId,
removeOrgCustomer,
@@ -723,6 +727,7 @@ export const licenseServiceFactory = ({
return onPremFeatures;
},
getPlan,
invalidateGetPlan,
updateSubscriptionOrgMemberCount,
refreshPlan,
getOrgPlan,

View File

@@ -376,7 +376,8 @@ const DynamicSecretConditionV2Schema = z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
@@ -404,6 +405,23 @@ const DynamicSecretConditionV2Schema = z
})
.partial();
const SecretImportConditionSchema = z
.object({
environment: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
secretPath: SECRET_PATH_PERMISSION_OPERATOR_SCHEMA
})
.partial();
const SecretConditionV2Schema = z
.object({
environment: z.union([
@@ -741,7 +759,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
),
conditions: SecretConditionV1Schema.describe(
conditions: SecretImportConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),

View File

@@ -89,6 +89,7 @@ export const GROUPS = {
limit: "The number of users to return.",
username: "The username to search for.",
search: "The text string that user email or name will be filtered by.",
projectId: "The ID of the project the group belongs to.",
filterUsers:
"Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization."
},
@@ -2276,7 +2277,8 @@ export const SecretSyncs = {
},
GCP: {
scope: "The Google project scope that secrets should be synced to.",
projectId: "The ID of the Google project secrets should be synced to."
projectId: "The ID of the Google project secrets should be synced to.",
locationId: 'The ID of the Google project location secrets should be synced to (ie "us-west4").'
},
DATABRICKS: {
scope: "The Databricks secret scope that secrets should be synced to."

View File

@@ -213,6 +213,12 @@ const envSchema = z
GATEWAY_RELAY_AUTH_SECRET: zpStr(z.string().optional()),
DYNAMIC_SECRET_ALLOW_INTERNAL_IP: zodStrBool.default("false"),
DYNAMIC_SECRET_AWS_ACCESS_KEY_ID: zpStr(z.string().optional()).default(
process.env.INF_APP_CONNECTION_AWS_ACCESS_KEY_ID
),
DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY: zpStr(z.string().optional()).default(
process.env.INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY
),
/* ----------------------------------------------------------------------------- */
/* App Connections ----------------------------------------------------------------------------- */

View File

@@ -7,13 +7,24 @@ type SanitizationArg = {
allowedExpressions?: (arg: string) => boolean;
};
const isValidExpression = (expression: string, dto: SanitizationArg): boolean => {
// Allow helper functions (replace, truncate)
const allowedHelpers = ["replace", "truncate", "random"];
if (allowedHelpers.includes(expression)) {
return true;
}
// Check regular allowed expressions
return dto?.allowedExpressions?.(expression) || false;
};
export const validateHandlebarTemplate = (templateName: string, template: string, dto: SanitizationArg) => {
const parsedAst = handlebars.parse(template);
parsedAst.body.forEach((el) => {
if (el.type === "ContentStatement") return;
if (el.type === "MustacheStatement" && "path" in el) {
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return;
if (path.type === "PathExpression" && isValidExpression(path.original, dto)) return;
}
logger.error(el, "Template sanitization failed");
throw new BadRequestError({ message: `Template sanitization failed: ${templateName}` });
@@ -26,7 +37,7 @@ export const isValidHandleBarTemplate = (template: string, dto: SanitizationArg)
if (el.type === "ContentStatement") return true;
if (el.type === "MustacheStatement" && "path" in el) {
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return true;
if (path.type === "PathExpression" && isValidExpression(path.original, dto)) return true;
}
return false;
});

View File

@@ -155,6 +155,12 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
oidc: token?.identityAuth?.oidc
});
}
if (token?.identityAuth?.kubernetes) {
requestContext.set("identityAuthInfo", {
identityId: identity.identityId,
kubernetes: token?.identityAuth?.kubernetes
});
}
break;
}
case AuthMode.SERVICE_TOKEN: {

View File

@@ -1516,7 +1516,9 @@ export const registerRoutes = async (
dynamicSecretProviders,
folderDAL,
licenseService,
kmsService
kmsService,
userDAL,
identityDAL
});
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
auditLogDAL,

View File

@@ -45,4 +45,37 @@ export const registerGcpConnectionRouter = async (server: FastifyZodProvider) =>
return projects;
}
});
server.route({
method: "GET",
url: `/:connectionId/secret-manager-project-locations`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
querystring: z.object({
projectId: z.string()
}),
response: {
200: z.object({ displayName: z.string(), locationId: z.string() }).array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
params: { connectionId },
query: { projectId }
} = req;
const locations = await server.services.appConnection.gcp.listSecretManagerProjectLocations(
{ connectionId, projectId },
req.permission
);
return locations;
}
});
};

View File

@@ -4,9 +4,11 @@ import {
GroupProjectMembershipsSchema,
GroupsSchema,
ProjectMembershipRole,
ProjectUserMembershipRolesSchema
ProjectUserMembershipRolesSchema,
UsersSchema
} from "@app/db/schemas";
import { ApiDocsTags, PROJECTS } from "@app/lib/api-docs";
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
import { ApiDocsTags, GROUPS, PROJECTS } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@@ -301,4 +303,61 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) =>
return { groupMembership };
}
});
server.route({
method: "GET",
url: "/:projectId/groups/:groupId/users",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.ProjectGroups],
description: "Return project group users",
params: z.object({
projectId: z.string().trim().describe(GROUPS.LIST_USERS.projectId),
groupId: z.string().trim().describe(GROUPS.LIST_USERS.id)
}),
querystring: z.object({
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
}),
response: {
200: z.object({
users: UsersSchema.pick({
email: true,
username: true,
firstName: true,
lastName: true,
id: true
})
.merge(
z.object({
isPartOfGroup: z.boolean(),
joinedGroupAt: z.date().nullable()
})
)
.array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const { users, totalCount } = await server.services.groupProject.listProjectGroupUsers({
id: req.params.groupId,
projectId: req.params.projectId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
return { users, totalCount };
}
});
};

View File

@@ -11,8 +11,10 @@ import { AppConnection } from "../app-connection-enums";
import { GcpConnectionMethod } from "./gcp-connection-enums";
import {
GCPApp,
GCPGetProjectLocationsRes,
GCPGetProjectsRes,
GCPGetServiceRes,
GCPLocation,
TGcpConnection,
TGcpConnectionConfig
} from "./gcp-connection-types";
@@ -145,6 +147,45 @@ export const getGcpSecretManagerProjects = async (appConnection: TGcpConnection)
return projects;
};
export const getGcpSecretManagerProjectLocations = async (projectId: string, appConnection: TGcpConnection) => {
const accessToken = await getGcpConnectionAuthToken(appConnection);
let gcpLocations: GCPLocation[] = [];
const pageSize = 100;
let pageToken: string | undefined;
let hasMorePages = true;
while (hasMorePages) {
const params = new URLSearchParams({
pageSize: String(pageSize),
...(pageToken ? { pageToken } : {})
});
// eslint-disable-next-line no-await-in-loop
const { data } = await request.get<GCPGetProjectLocationsRes>(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${projectId}/locations`,
{
params,
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
}
);
gcpLocations = gcpLocations.concat(data.locations);
if (!data.nextPageToken) {
hasMorePages = false;
}
pageToken = data.nextPageToken;
}
return gcpLocations.sort((a, b) => a.displayName.localeCompare(b.displayName));
};
export const validateGcpConnectionCredentials = async (appConnection: TGcpConnectionConfig) => {
// Check if provided service account email suffix matches organization ID.
// We do this to mitigate confused deputy attacks in multi-tenant instances

View File

@@ -1,8 +1,8 @@
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { getGcpSecretManagerProjects } from "./gcp-connection-fns";
import { TGcpConnection } from "./gcp-connection-types";
import { getGcpSecretManagerProjectLocations, getGcpSecretManagerProjects } from "./gcp-connection-fns";
import { TGcpConnection, TGetGCPProjectLocationsDTO } from "./gcp-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
@@ -23,7 +23,23 @@ export const gcpConnectionService = (getAppConnection: TGetAppConnectionFunc) =>
}
};
const listSecretManagerProjectLocations = async (
{ connectionId, projectId }: TGetGCPProjectLocationsDTO,
actor: OrgServiceActor
) => {
const appConnection = await getAppConnection(AppConnection.GCP, connectionId, actor);
try {
const locations = await getGcpSecretManagerProjectLocations(projectId, appConnection);
return locations;
} catch (error) {
return [];
}
};
return {
listSecretManagerProjects
listSecretManagerProjects,
listSecretManagerProjectLocations
};
};

View File

@@ -38,6 +38,22 @@ export type GCPGetProjectsRes = {
nextPageToken?: string;
};
export type GCPLocation = {
name: string;
locationId: string;
displayName: string;
};
export type GCPGetProjectLocationsRes = {
locations: GCPLocation[];
nextPageToken?: string;
};
export type TGetGCPProjectLocationsDTO = {
projectId: string;
connectionId: string;
};
export type GCPGetServiceRes = {
name: string;
parent: string;

View File

@@ -397,7 +397,7 @@ export const authLoginServiceFactory = ({
// Check if the user actually has access to the specified organization.
const userOrgs = await orgDAL.findAllOrgsByUserId(user.id);
const hasOrganizationMembership = userOrgs.some((org) => org.id === organizationId);
const hasOrganizationMembership = userOrgs.some((org) => org.id === organizationId && org.userStatus !== "invited");
const selectedOrg = await orgDAL.findById(organizationId);
if (!hasOrganizationMembership) {

View File

@@ -1,6 +1,7 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType, ProjectMembershipRole, SecretKeyEncoding, TGroups } from "@app/db/schemas";
import { TListProjectGroupUsersDTO } from "@app/ee/services/group/group-types";
import {
constructPermissionErrorMessage,
validatePrivilegeChangeOperation
@@ -42,7 +43,7 @@ type TGroupProjectServiceFactoryDep = {
projectKeyDAL: Pick<TProjectKeyDALFactory, "findLatestProjectKey" | "delete" | "insertMany" | "transaction">;
projectRoleDAL: Pick<TProjectRoleDALFactory, "find">;
projectBotDAL: TProjectBotDALFactory;
groupDAL: Pick<TGroupDALFactory, "findOne">;
groupDAL: Pick<TGroupDALFactory, "findOne" | "findAllGroupPossibleMembers">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getProjectPermissionByRole">;
};
@@ -471,11 +472,54 @@ export const groupProjectServiceFactory = ({
return groupMembership;
};
const listProjectGroupUsers = async ({
id,
projectId,
offset,
limit,
username,
actor,
actorId,
actorAuthMethod,
actorOrgId,
search,
filter
}: TListProjectGroupUsersDTO) => {
const project = await projectDAL.findById(projectId);
if (!project) {
throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` });
}
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
const { members, totalCount } = await groupDAL.findAllGroupPossibleMembers({
orgId: project.orgId,
groupId: id,
offset,
limit,
username,
search,
filter
});
return { users: members, totalCount };
};
return {
addGroupToProject,
updateGroupInProject,
removeGroupFromProject,
listGroupsInProject,
getGroupInProject
getGroupInProject,
listProjectGroupUsers
};
};

View File

@@ -11,5 +11,9 @@ export type TIdentityAccessTokenJwtPayload = {
oidc?: {
claims: Record<string, string>;
};
kubernetes?: {
namespace: string;
name: string;
};
};
};

View File

@@ -274,9 +274,27 @@ export const identityKubernetesAuthServiceFactory = ({
if (identityKubernetesAuth.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway) {
const { kubernetesHost } = identityKubernetesAuth;
const lastColonIndex = kubernetesHost.lastIndexOf(":");
const k8sHost = kubernetesHost.substring(0, lastColonIndex);
const k8sPort = kubernetesHost.substring(lastColonIndex + 1);
let urlString = kubernetesHost;
if (!kubernetesHost.startsWith("http://") && !kubernetesHost.startsWith("https://")) {
urlString = `https://${kubernetesHost}`;
}
const url = new URL(urlString);
let { port: k8sPort } = url;
const { protocol, hostname: k8sHost } = url;
const cleanedProtocol = new RE2(/[^a-zA-Z0-9]/g).replace(protocol, "").toLowerCase();
if (!["https", "http"].includes(cleanedProtocol)) {
throw new BadRequestError({
message: "Invalid Kubernetes host URL, must start with http:// or https://"
});
}
if (!k8sPort) {
k8sPort = cleanedProtocol === "https" ? "443" : "80";
}
if (!identityKubernetesAuth.gatewayId) {
throw new BadRequestError({
@@ -287,7 +305,7 @@ export const identityKubernetesAuthServiceFactory = ({
data = await $gatewayProxyWrapper(
{
gatewayId: identityKubernetesAuth.gatewayId,
targetHost: k8sHost, // note(daniel): must include the protocol (https|http)
targetHost: `${cleanedProtocol}://${k8sHost}`, // note(daniel): must include the protocol (https|http)
targetPort: k8sPort ? Number(k8sPort) : 443,
caCert,
reviewTokenThroughGateway: true
@@ -398,7 +416,13 @@ export const identityKubernetesAuthServiceFactory = ({
{
identityId: identityKubernetesAuth.identityId,
identityAccessTokenId: identityAccessToken.id,
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN,
identityAuth: {
kubernetes: {
namespace: targetNamespace,
name: targetName
}
}
} as TIdentityAccessTokenJwtPayload,
appCfg.AUTH_SECRET,
// akhilmhdh: for non-expiry tokens you should not even set the value, including undefined. Even for undefined jsonwebtoken throws error

View File

@@ -212,7 +212,7 @@ export const orgDALFactory = (db: TDbClient) => {
// special query
const findAllOrgsByUserId = async (
userId: string
): Promise<(TOrganizations & { orgAuthMethod: string; userRole: string })[]> => {
): Promise<(TOrganizations & { orgAuthMethod: string; userRole: string; userStatus: string })[]> => {
try {
const org = (await db
.replicaNode()(TableName.OrgMembership)
@@ -234,6 +234,7 @@ export const orgDALFactory = (db: TDbClient) => {
})
.select(selectAllTableCols(TableName.Organization))
.select(db.ref("role").withSchema(TableName.OrgMembership).as("userRole"))
.select(db.ref("status").withSchema(TableName.OrgMembership).as("userStatus"))
.select(
db.raw(`
CASE
@@ -242,7 +243,7 @@ export const orgDALFactory = (db: TDbClient) => {
ELSE ''
END as "orgAuthMethod"
`)
)) as (TOrganizations & { orgAuthMethod: string; userRole: string })[];
)) as (TOrganizations & { orgAuthMethod: string; userRole: string; userStatus: string })[];
return org;
} catch (error) {

View File

@@ -183,7 +183,9 @@ export const orgServiceFactory = ({
* */
const findAllOrganizationOfUser = async (userId: string) => {
const orgs = await orgDAL.findAllOrgsByUserId(userId);
return orgs;
// Filter out orgs where the membership object is an invitation
return orgs.filter((org) => org.userStatus !== "invited");
};
/*
* Get all workspace members

View File

@@ -165,7 +165,7 @@ type TProjectServiceFactoryDep = {
sshHostGroupDAL: Pick<TSshHostGroupDALFactory, "find" | "findSshHostGroupsWithLoginMappings">;
permissionService: TPermissionServiceFactory;
orgService: Pick<TOrgServiceFactory, "addGhostUser">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "invalidateGetPlan">;
queueService: Pick<TQueueServiceFactory, "stopRepeatableJob">;
smtpService: Pick<TSmtpService, "sendMail">;
orgDAL: Pick<TOrgDALFactory, "findOne">;
@@ -494,6 +494,10 @@ export const projectServiceFactory = ({
);
}
// no need to invalidate if there was no limit
if (plan.workspaceLimit) {
await licenseService.invalidateGetPlan(organization.id);
}
return {
...project,
environments: envs,

View File

@@ -1,3 +1,63 @@
export enum GcpSyncScope {
Global = "global"
Global = "global",
Region = "region"
}
export enum GCPSecretManagerLocation {
// Asia Pacific
ASIA_SOUTHEAST3 = "asia-southeast3", // Bangkok
ASIA_SOUTH2 = "asia-south2", // Delhi
ASIA_EAST2 = "asia-east2", // Hong Kong
ASIA_SOUTHEAST2 = "asia-southeast2", // Jakarta
AUSTRALIA_SOUTHEAST2 = "australia-southeast2", // Melbourne
ASIA_SOUTH1 = "asia-south1", // Mumbai
ASIA_NORTHEAST2 = "asia-northeast2", // Osaka
ASIA_NORTHEAST3 = "asia-northeast3", // Seoul
ASIA_SOUTHEAST1 = "asia-southeast1", // Singapore
AUSTRALIA_SOUTHEAST1 = "australia-southeast1", // Sydney
ASIA_EAST1 = "asia-east1", // Taiwan
ASIA_NORTHEAST1 = "asia-northeast1", // Tokyo
// Europe
EUROPE_WEST1 = "europe-west1", // Belgium
EUROPE_WEST10 = "europe-west10", // Berlin
EUROPE_NORTH1 = "europe-north1", // Finland
EUROPE_NORTH2 = "europe-north2", // Stockholm
EUROPE_WEST3 = "europe-west3", // Frankfurt
EUROPE_WEST2 = "europe-west2", // London
EUROPE_SOUTHWEST1 = "europe-southwest1", // Madrid
EUROPE_WEST8 = "europe-west8", // Milan
EUROPE_WEST4 = "europe-west4", // Netherlands
EUROPE_WEST12 = "europe-west12", // Turin
EUROPE_WEST9 = "europe-west9", // Paris
EUROPE_CENTRAL2 = "europe-central2", // Warsaw
EUROPE_WEST6 = "europe-west6", // Zurich
// North America
US_CENTRAL1 = "us-central1", // Iowa
US_WEST4 = "us-west4", // Las Vegas
US_WEST2 = "us-west2", // Los Angeles
NORTHAMERICA_SOUTH1 = "northamerica-south1", // Mexico
NORTHAMERICA_NORTHEAST1 = "northamerica-northeast1", // Montréal
US_EAST4 = "us-east4", // Northern Virginia
US_CENTRAL2 = "us-central2", // Oklahoma
US_WEST1 = "us-west1", // Oregon
US_WEST3 = "us-west3", // Salt Lake City
US_EAST1 = "us-east1", // South Carolina
NORTHAMERICA_NORTHEAST2 = "northamerica-northeast2", // Toronto
US_EAST5 = "us-east5", // Columbus
US_SOUTH1 = "us-south1", // Dallas
US_WEST8 = "us-west8", // Phoenix
// South America
SOUTHAMERICA_EAST1 = "southamerica-east1", // São Paulo
SOUTHAMERICA_WEST1 = "southamerica-west1", // Santiago
// Middle East
ME_CENTRAL2 = "me-central2", // Dammam
ME_CENTRAL1 = "me-central1", // Doha
ME_WEST1 = "me-west1", // Tel Aviv
// Africa
AFRICA_SOUTH1 = "africa-south1" // Johannesburg
}

View File

@@ -4,6 +4,7 @@ import { request } from "@app/lib/config/request";
import { logger } from "@app/lib/logger";
import { getGcpConnectionAuthToken } from "@app/services/app-connection/gcp";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { GcpSyncScope } from "@app/services/secret-sync/gcp/gcp-sync-enums";
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
import { SecretSyncError } from "../secret-sync-errors";
@@ -15,9 +16,17 @@ import {
TGcpSyncWithCredentials
} from "./gcp-sync-types";
const getGcpSecrets = async (accessToken: string, secretSync: TGcpSyncWithCredentials) => {
const getProjectUrl = (secretSync: TGcpSyncWithCredentials) => {
const { destinationConfig } = secretSync;
if (destinationConfig.scope === GcpSyncScope.Global) {
return `${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}`;
}
return `https://secretmanager.${destinationConfig.locationId}.rep.googleapis.com/v1/projects/${destinationConfig.projectId}/locations/${destinationConfig.locationId}`;
};
const getGcpSecrets = async (accessToken: string, secretSync: TGcpSyncWithCredentials) => {
let gcpSecrets: GCPSecret[] = [];
const pageSize = 100;
@@ -31,16 +40,13 @@ const getGcpSecrets = async (accessToken: string, secretSync: TGcpSyncWithCreden
});
// eslint-disable-next-line no-await-in-loop
const { data: secretsRes } = await request.get<GCPSMListSecretsRes>(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${secretSync.destinationConfig.projectId}/secrets`,
{
params,
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
const { data: secretsRes } = await request.get<GCPSMListSecretsRes>(`${getProjectUrl(secretSync)}/secrets`, {
params,
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
);
});
if (secretsRes.secrets) {
gcpSecrets = gcpSecrets.concat(secretsRes.secrets);
@@ -61,7 +67,7 @@ const getGcpSecrets = async (accessToken: string, secretSync: TGcpSyncWithCreden
try {
const { data: secretLatest } = await request.get<GCPLatestSecretVersionAccess>(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}/secrets/${key}/versions/latest:access`,
`${getProjectUrl(secretSync)}/secrets/${key}/versions/latest:access`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
@@ -113,11 +119,14 @@ export const GcpSyncFns = {
if (!(key in gcpSecrets)) {
// case: create secret
await request.post(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}/secrets`,
`${getProjectUrl(secretSync)}/secrets`,
{
replication: {
automatic: {}
}
replication:
destinationConfig.scope === GcpSyncScope.Global
? {
automatic: {}
}
: undefined
},
{
params: {
@@ -131,7 +140,7 @@ export const GcpSyncFns = {
);
await request.post(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}/secrets/${key}:addVersion`,
`${getProjectUrl(secretSync)}/secrets/${key}:addVersion`,
{
payload: {
data: Buffer.from(secretMap[key].value).toString("base64")
@@ -163,15 +172,12 @@ export const GcpSyncFns = {
if (secretSync.syncOptions.disableSecretDeletion) continue;
// case: delete secret
await request.delete(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}/secrets/${key}`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
await request.delete(`${getProjectUrl(secretSync)}/secrets/${key}`, {
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
);
});
} else if (secretMap[key].value !== gcpSecrets[key]) {
if (!secretMap[key].value) {
logger.warn(
@@ -180,7 +186,7 @@ export const GcpSyncFns = {
}
await request.post(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}/secrets/${key}:addVersion`,
`${getProjectUrl(secretSync)}/secrets/${key}:addVersion`,
{
payload: {
data: Buffer.from(secretMap[key].value).toString("base64")
@@ -212,21 +218,18 @@ export const GcpSyncFns = {
},
removeSecrets: async (secretSync: TGcpSyncWithCredentials, secretMap: TSecretMap) => {
const { destinationConfig, connection } = secretSync;
const { connection } = secretSync;
const accessToken = await getGcpConnectionAuthToken(connection);
const gcpSecrets = await getGcpSecrets(accessToken, secretSync);
for await (const [key] of Object.entries(gcpSecrets)) {
if (key in secretMap) {
await request.delete(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}/secrets/${key}`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
await request.delete(`${getProjectUrl(secretSync)}/secrets/${key}`, {
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
);
});
}
}
}

View File

@@ -10,14 +10,33 @@ import {
import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types";
import { SecretSync } from "../secret-sync-enums";
import { GcpSyncScope } from "./gcp-sync-enums";
import { GCPSecretManagerLocation, GcpSyncScope } from "./gcp-sync-enums";
const GcpSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true };
const GcpSyncDestinationConfigSchema = z.object({
scope: z.literal(GcpSyncScope.Global).describe(SecretSyncs.DESTINATION_CONFIG.GCP.scope),
projectId: z.string().min(1, "Project ID is required").describe(SecretSyncs.DESTINATION_CONFIG.GCP.projectId)
});
const GcpSyncDestinationConfigSchema = z.discriminatedUnion("scope", [
z
.object({
scope: z.literal(GcpSyncScope.Global).describe(SecretSyncs.DESTINATION_CONFIG.GCP.scope),
projectId: z.string().min(1, "Project ID is required").describe(SecretSyncs.DESTINATION_CONFIG.GCP.projectId)
})
.describe(
JSON.stringify({
title: "Global"
})
),
z
.object({
scope: z.literal(GcpSyncScope.Region).describe(SecretSyncs.DESTINATION_CONFIG.GCP.scope),
projectId: z.string().min(1, "Project ID is required").describe(SecretSyncs.DESTINATION_CONFIG.GCP.projectId),
locationId: z.nativeEnum(GCPSecretManagerLocation).describe(SecretSyncs.DESTINATION_CONFIG.GCP.locationId)
})
.describe(
JSON.stringify({
title: "Region"
})
)
]);
export const GcpSyncSchema = BaseSecretSyncSchema(SecretSync.GCPSecretManager, GcpSyncOptionsConfig).extend({
destination: z.literal(SecretSync.GCPSecretManager),

View File

@@ -43,7 +43,8 @@ func getInfisicalSdkInstance(cmd *cobra.Command) (infisicalSdk.InfisicalClientIn
}
// if the --token param is not set, we use the auth-method flag to determine the authentication method, and perform the appropriate login flow based on that
authMethod, err := cmd.Flags().GetString("auth-method")
authMethod, err := util.GetCmdFlagOrEnv(cmd, "auth-method", []string{util.INFISICAL_AUTH_METHOD_NAME})
if err != nil {
cancel()
return nil, nil, err

View File

@@ -243,6 +243,7 @@ var loginCmd = &cobra.Command{
util.AuthStrategy.GCP_IAM_AUTH: sdkAuthenticator.HandleGcpIamAuthLogin,
util.AuthStrategy.AWS_IAM_AUTH: sdkAuthenticator.HandleAwsIamAuthLogin,
util.AuthStrategy.OIDC_AUTH: sdkAuthenticator.HandleOidcAuthLogin,
util.AuthStrategy.JWT_AUTH: sdkAuthenticator.HandleJwtAuthLogin,
}
credential, err := authStrategies[strategy]()

View File

@@ -12,6 +12,7 @@ import (
"io"
"net"
"net/http"
"net/url"
"os"
"strings"
"sync"
@@ -25,9 +26,13 @@ func handleConnection(ctx context.Context, quicConn quic.Connection) {
log.Info().Msgf("New connection from: %s", quicConn.RemoteAddr().String())
// Use WaitGroup to track all streams
var wg sync.WaitGroup
contextWithTimeout, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
for {
// Accept the first stream, which we'll use for commands
stream, err := quicConn.AcceptStream(ctx)
stream, err := quicConn.AcceptStream(contextWithTimeout)
if err != nil {
log.Printf("Failed to accept QUIC stream: %v", err)
break
@@ -51,7 +56,12 @@ func handleStream(stream quic.Stream, quicConn quic.Connection) {
// Use buffered reader for better handling of fragmented data
reader := bufio.NewReader(stream)
defer stream.Close()
defer func() {
log.Info().Msgf("Closing stream %d", streamID)
if stream != nil {
stream.Close()
}
}()
for {
msg, err := reader.ReadBytes('\n')
@@ -106,6 +116,11 @@ func handleStream(stream quic.Stream, quicConn quic.Connection) {
targetURL := string(argParts[0])
if !isValidURL(targetURL) {
log.Error().Msgf("Invalid target URL: %s", targetURL)
return
}
// Parse optional parameters
var caCertB64, verifyParam string
for _, part := range argParts[1:] {
@@ -160,7 +175,6 @@ func handleHTTPProxy(stream quic.Stream, reader *bufio.Reader, targetURL string,
}
}
// set certificate verification based on what the gateway client sent
if verifyParam != "" {
tlsConfig.InsecureSkipVerify = verifyParam == "false"
log.Info().Msgf("TLS verification set to: %s", verifyParam)
@@ -169,82 +183,94 @@ func handleHTTPProxy(stream quic.Stream, reader *bufio.Reader, targetURL string,
transport.TLSClientConfig = tlsConfig
}
// read and parse the http request from the stream
req, err := http.ReadRequest(reader)
if err != nil {
return fmt.Errorf("failed to read HTTP request: %v", err)
}
actionHeader := req.Header.Get("x-infisical-action")
if actionHeader != "" {
if actionHeader == "inject-k8s-sa-auth-token" {
token, err := os.ReadFile("/var/run/secrets/kubernetes.io/serviceaccount/token")
if err != nil {
stream.Write([]byte(buildHttpInternalServerError("failed to read k8s sa auth token")))
return fmt.Errorf("failed to read k8s sa auth token: %v", err)
}
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", string(token)))
log.Info().Msgf("Injected gateway k8s SA auth token in request to %s", targetURL)
}
req.Header.Del("x-infisical-action")
}
var targetFullURL string
if strings.HasPrefix(targetURL, "http://") || strings.HasPrefix(targetURL, "https://") {
baseURL := strings.TrimSuffix(targetURL, "/")
targetFullURL = baseURL + req.URL.Path
if req.URL.RawQuery != "" {
targetFullURL += "?" + req.URL.RawQuery
}
} else {
baseURL := strings.TrimSuffix("http://"+targetURL, "/")
targetFullURL = baseURL + req.URL.Path
if req.URL.RawQuery != "" {
targetFullURL += "?" + req.URL.RawQuery
}
}
// create the request to the target
proxyReq, err := http.NewRequest(req.Method, targetFullURL, req.Body)
proxyReq.Header = req.Header.Clone()
if err != nil {
return fmt.Errorf("failed to create proxy request: %v", err)
}
log.Info().Msgf("Proxying %s %s to %s", req.Method, req.URL.Path, targetFullURL)
client := &http.Client{
Transport: transport,
Timeout: 30 * time.Second,
}
// make the request to the target
resp, err := client.Do(proxyReq)
if err != nil {
stream.Write([]byte(buildHttpInternalServerError(fmt.Sprintf("failed to reach target due to networking error: %s", err.Error()))))
return fmt.Errorf("failed to reach target due to networking error: %v", err)
// Loop to handle multiple HTTP requests on the same stream
for {
req, err := http.ReadRequest(reader)
if err != nil {
if errors.Is(err, io.EOF) {
log.Info().Msg("Client closed HTTP connection")
return nil
}
return fmt.Errorf("failed to read HTTP request: %v", err)
}
log.Info().Msgf("Received HTTP request: %s", req.URL.Path)
actionHeader := req.Header.Get("x-infisical-action")
if actionHeader != "" {
if actionHeader == "inject-k8s-sa-auth-token" {
token, err := os.ReadFile("/var/run/secrets/kubernetes.io/serviceaccount/token")
if err != nil {
stream.Write([]byte(buildHttpInternalServerError("failed to read k8s sa auth token")))
continue // Continue to next request instead of returning
}
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", string(token)))
log.Info().Msgf("Injected gateway k8s SA auth token in request to %s", targetURL)
}
req.Header.Del("x-infisical-action")
}
// Build full target URL
var targetFullURL string
if strings.HasPrefix(targetURL, "http://") || strings.HasPrefix(targetURL, "https://") {
baseURL := strings.TrimSuffix(targetURL, "/")
targetFullURL = baseURL + req.URL.Path
if req.URL.RawQuery != "" {
targetFullURL += "?" + req.URL.RawQuery
}
} else {
baseURL := strings.TrimSuffix("http://"+targetURL, "/")
targetFullURL = baseURL + req.URL.Path
if req.URL.RawQuery != "" {
targetFullURL += "?" + req.URL.RawQuery
}
}
// create the request to the target
proxyReq, err := http.NewRequest(req.Method, targetFullURL, req.Body)
if err != nil {
log.Error().Msgf("Failed to create proxy request: %v", err)
stream.Write([]byte(buildHttpInternalServerError("failed to create proxy request")))
continue // Continue to next request
}
proxyReq.Header = req.Header.Clone()
log.Info().Msgf("Proxying %s %s to %s", req.Method, req.URL.Path, targetFullURL)
resp, err := client.Do(proxyReq)
if err != nil {
log.Error().Msgf("Failed to reach target: %v", err)
stream.Write([]byte(buildHttpInternalServerError(fmt.Sprintf("failed to reach target due to networking error: %s", err.Error()))))
continue // Continue to next request
}
// Write the entire response (status line, headers, body) to the stream
// http.Response.Write handles this for "Connection: close" correctly.
// For other connection tokens, manual removal might be needed if they cause issues with QUIC.
// For a simple proxy, this is generally sufficient.
resp.Header.Del("Connection") // Good practice for proxies
log.Info().Msgf("Writing response to stream: %s", resp.Status)
if err := resp.Write(stream); err != nil {
log.Error().Err(err).Msg("Failed to write response to stream")
resp.Body.Close()
return fmt.Errorf("failed to write response to stream: %w", err)
}
resp.Body.Close()
// Check if client wants to close connection
if req.Header.Get("Connection") == "close" {
log.Info().Msg("Client requested connection close")
return nil
}
}
defer resp.Body.Close()
// Write the entire response (status line, headers, body) to the stream
// http.Response.Write handles this for "Connection: close" correctly.
// For other connection tokens, manual removal might be needed if they cause issues with QUIC.
// For a simple proxy, this is generally sufficient.
resp.Header.Del("Connection") // Good practice for proxies
log.Info().Msgf("Writing response to stream: %s", resp.Status)
if err := resp.Write(stream); err != nil {
// If writing the response fails, the connection to the client might be broken.
// Logging the error is important. The original error will be returned.
log.Error().Err(err).Msg("Failed to write response to stream")
return fmt.Errorf("failed to write response to stream: %w", err)
}
return nil
}
func buildHttpInternalServerError(message string) string {
@@ -255,6 +281,11 @@ type CloseWrite interface {
CloseWrite() error
}
func isValidURL(str string) bool {
u, err := url.Parse(str)
return err == nil && u.Scheme != "" && u.Host != ""
}
func CopyDataFromQuicToTcp(quicStream quic.Stream, tcpConn net.Conn) {
// Create a WaitGroup to wait for both copy operations
var wg sync.WaitGroup

View File

@@ -13,6 +13,8 @@ const (
VAULT_BACKEND_AUTO_MODE = "auto"
VAULT_BACKEND_FILE_MODE = "file"
INFISICAL_AUTH_METHOD_NAME = "INFISICAL_AUTH_METHOD"
// Universal Auth
INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_ID"
INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET"
@@ -29,6 +31,8 @@ const (
// JWT AUTH
INFISICAL_JWT_NAME = "INFISICAL_JWT"
INFISICAL_GATEWAY_TOKEN_NAME_LEGACY = "TOKEN" // backwards compatibility with gateway helm chart, where token was the only supported auth method
// Generic env variable used for auth methods that require a machine identity ID
INFISICAL_MACHINE_IDENTITY_ID_NAME = "INFISICAL_MACHINE_IDENTITY_ID"

View File

@@ -96,6 +96,11 @@ func GetInfisicalToken(cmd *cobra.Command) (token *models.TokenDetails, err erro
infisicalToken = os.Getenv(INFISICAL_TOKEN_NAME)
source = fmt.Sprintf("%s environment variable", INFISICAL_TOKEN_NAME)
}
if infisicalToken == "" { // if its still empty, check for the `TOKEN` environment variable (for gateway helm)
infisicalToken = os.Getenv(INFISICAL_GATEWAY_TOKEN_NAME_LEGACY)
source = fmt.Sprintf("%s environment variable", INFISICAL_GATEWAY_TOKEN_NAME_LEGACY)
}
}
if infisicalToken == "" { // If it's empty, we return nothing at all.

View File

@@ -26,28 +26,215 @@ Run the Infisical gateway in the foreground or manage its systemd service instal
Run the Infisical gateway in the foreground. The gateway will connect to the relay service and maintain a persistent connection.
```bash
infisical gateway --token=<token> --domain=<domain>
infisical gateway --domain=<domain> --auth-method=<auth-method>
```
### Flags
### Authentication
<Accordion title="--token">
The machine identity access token to authenticate with Infisical.
The Infisical CLI supports multiple authentication methods. Below are the available authentication methods, with their respective flags.
<AccordionGroup>
<Accordion title="Universal Auth">
The Universal Auth method is a simple and secure way to authenticate with Infisical. It requires a client ID and a client secret to authenticate with Infisical.
<ParamField query="Flags">
<Expandable title="properties">
<ParamField query="client-id" type="string" required>
Your machine identity client ID.
</ParamField>
<ParamField query="client-secret" type="string" required>
Your machine identity client secret.
</ParamField>
<ParamField query="auth-method" type="string" required>
The authentication method to use. Must be `universal-auth` when using Universal Auth.
</ParamField>
</Expandable>
</ParamField>
```bash
# Example
infisical gateway --token=<token>
infisical gateway --auth-method=universal-auth --client-id=<client-id> --client-secret=<client-secret>
```
You may also expose the token to the CLI by setting the environment variable `INFISICAL_TOKEN` before executing the gateway command.
</Accordion>
<Accordion title="Native Kubernetes">
The Native Kubernetes method is used to authenticate with Infisical when running in a Kubernetes environment. It requires a service account token to authenticate with Infisical.
<ParamField query="Flags">
<Expandable title="properties">
<ParamField query="machine-identity-id" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="service-account-token-path" type="string" optional>
Path to the Kubernetes service account token to use. Default: `/var/run/secrets/kubernetes.io/serviceaccount/token`.
</ParamField>
<ParamField query="auth-method" type="string" required>
The authentication method to use. Must be `kubernetes` when using Native Kubernetes.
</ParamField>
</Expandable>
</ParamField>
```bash
infisical gateway --auth-method=kubernetes --machine-identity-id=<machine-identity-id>
```
</Accordion>
<Accordion title="Native Azure">
The Native Azure method is used to authenticate with Infisical when running in an Azure environment.
<ParamField query="Flags">
<Expandable title="properties">
<ParamField query="machine-identity-id" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="auth-method" type="string" required>
The authentication method to use. Must be `azure` when using Native Azure.
</ParamField>
</Expandable>
</ParamField>
```bash
infisical gateway --auth-method=azure --machine-identity-id=<machine-identity-id>
```
</Accordion>
<Accordion title="Native GCP ID Token">
The Native GCP ID Token method is used to authenticate with Infisical when running in a GCP environment.
<ParamField query="Flags">
<Expandable title="properties">
<ParamField query="machine-identity-id" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="auth-method" type="string" required>
The authentication method to use. Must be `gcp-id-token` when using Native GCP ID Token.
</ParamField>
</Expandable>
</ParamField>
```bash
infisical gateway --auth-method=gcp-id-token --machine-identity-id=<machine-identity-id>
```
</Accordion>
<Accordion title="GCP IAM">
The GCP IAM method is used to authenticate with Infisical with a GCP service account key.
<ParamField query="Flags">
<Expandable title="properties">
<ParamField query="machine-identity-id" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="service-account-key-file-path" type="string" required>
Path to your GCP service account key file _(Must be in JSON format!)_
</ParamField>
<ParamField query="auth-method" type="string" required>
The authentication method to use. Must be `gcp-iam` when using GCP IAM.
</ParamField>
</Expandable>
</ParamField>
```bash
infisical gateway --auth-method=gcp-iam --machine-identity-id=<machine-identity-id> --service-account-key-file-path=<service-account-key-file-path>
```
</Accordion>
<Accordion title="Native AWS IAM">
The AWS IAM method is used to authenticate with Infisical with an AWS IAM role while running in an AWS environment like EC2, Lambda, etc.
<ParamField query="Flags">
<Expandable title="properties">
<ParamField query="machine-identity-id" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="auth-method" type="string" required>
The authentication method to use. Must be `aws-iam` when using Native AWS IAM.
</ParamField>
</Expandable>
</ParamField>
```bash
infisical gateway --auth-method=aws-iam --machine-identity-id=<machine-identity-id>
```
</Accordion>
<Accordion title="OIDC Auth">
The OIDC Auth method is used to authenticate with Infisical via identity tokens with OIDC.
<ParamField query="Flags">
<Expandable title="properties">
<ParamField query="machine-identity-id" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="jwt" type="string" required>
The OIDC JWT from the identity provider.
</ParamField>
<ParamField query="auth-method" type="string" required>
The authentication method to use. Must be `oidc-auth` when using OIDC Auth.
</ParamField>
</Expandable>
</ParamField>
```bash
infisical gateway --auth-method=oidc-auth --machine-identity-id=<machine-identity-id> --jwt=<oidc-jwt>
```
</Accordion>
<Accordion title="JWT Auth">
The JWT Auth method is used to authenticate with Infisical via a JWT token.
<ParamField query="Flags">
<Expandable title="properties">
<ParamField query="jwt" type="string" required>
The JWT token to use for authentication.
</ParamField>
<ParamField query="machine-identity-id" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="auth-method" type="string" required>
The authentication method to use. Must be `jwt-auth` when using JWT Auth.
</ParamField>
</Expandable>
</ParamField>
```bash
infisical gateway --auth-method=jwt-auth --jwt=<jwt> --machine-identity-id=<machine-identity-id>
```
</Accordion>
<Accordion title="Token Auth">
You can use the `INFISICAL_TOKEN` environment variable to authenticate with Infisical with a raw machine identity access token.
<ParamField query="Flags">
<Expandable title="properties">
<ParamField query="token" type="string" required>
The machine identity access token to use for authentication.
</ParamField>
</Expandable>
</ParamField>
```bash
infisical gateway --token=<token>
```
</Accordion>
</AccordionGroup>
### Other Flags
<Accordion title="--domain">
Domain of your self-hosted Infisical instance.
```bash
# Example
sudo infisical gateway install --domain=https://app.your-domain.com
infisical gateway --domain=https://app.your-domain.com
```
</Accordion>
</Accordion>

View File

@@ -190,7 +190,7 @@ The Infisical CLI supports multiple authentication methods. Below are the availa
</Steps>
</Accordion>
<Accordion title="OIDC Auth">
<Accordion title="OIDC Auth">
The OIDC Auth method is used to authenticate with Infisical via identity tokens with OIDC.
<ParamField query="Flags">
@@ -198,7 +198,7 @@ The Infisical CLI supports multiple authentication methods. Below are the availa
<ParamField query="machine-identity-id" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="oidc-jwt" type="string" required>
<ParamField query="jwt" type="string" required>
The OIDC JWT from the identity provider.
</ParamField>
</Expandable>
@@ -212,11 +212,35 @@ The Infisical CLI supports multiple authentication methods. Below are the availa
Run the `login` command with the following flags to obtain an access token:
```bash
infisical login --method=oidc-auth --machine-identity-id=<machine-identity-id> --oidc-jwt=<oidc-jwt>
infisical login --method=oidc-auth --machine-identity-id=<machine-identity-id> --jwt=<oidc-jwt>
```
</Step>
</Steps>
</Accordion>
<Accordion title="JWT Auth">
The JWT Auth method is used to authenticate with Infisical via a JWT token.
<ParamField query="Flags">
<Expandable title="properties">
<ParamField query="jwt" type="string" required>
The JWT token to use for authentication.
</ParamField>
<ParamField query="machine-identity-id" type="string" required>
Your machine identity ID.
</ParamField>
</Expandable>
</ParamField>
<Steps>
<Step title="Obtain an access token">
Run the `login` command with the following flags to obtain an access token:
```bash
infisical login --method=jwt-auth --jwt=<jwt> --machine-identity-id=<machine-identity-id>
```
</Step>
</Steps>
</Accordion>
</AccordionGroup>

View File

@@ -1,5 +1,5 @@
---
title: "Machine identities"
title: "Machine identities"
description: "Learn how to set metadata and leverage authentication attributes for machine identities."
---
@@ -25,7 +25,7 @@ Machine identities can have metadata set manually, just like users. In addition,
#### Accessing Attributes From Machine Identity Login
When machine identities authenticate, they may receive additional payloads/attributes from the service provider.
When machine identities authenticate, they may receive additional payloads/attributes from the service provider.
For methods like OIDC, these come as claims in the token and can be made available in your policies.
<Tabs>
@@ -50,17 +50,29 @@ For methods like OIDC, these come as claims in the token and can be made availab
```
You might map:
- **department:** to `user.department`
- **department:** to `user.department`
- **role:** to `user.role`
Once configured, these attributes become available in your policies using the following format:
```
{{ identity.auth.oidc.claims.<permission claim name> }}
```
<img src="/images/platform/access-controls/abac-policy-oidc-format.png" />
</Tab>
<Tab title="Kubernetes Login Attributes">
For identities authenticated using Kubernetes, the service account's namespace and name are available in their policy and can be accessed as follows:
```
{{ identity.auth.kubernetes.namespace }}
{{ identity.auth.kubernetes.name }}
```
<img src="/images/platform/access-controls/abac-policy-k8s-format.png" />
</Tab>
<Tab title="Other Authentication Method Attributes">
At the moment we only support OIDC claims. Payloads on other authentication methods are not yet accessible.

View File

@@ -95,12 +95,28 @@ The Infisical AWS ElastiCache dynamic secret allows you to generate AWS ElastiCa
</Step>
<Step title="(Optional) Modify ElastiCache Statements">
![Modify ElastiCache Statements Modal](/images/platform/dynamic-secrets/modify-elasticache-statement.png)
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
<ParamField path="Customize ElastiCache Statement" type="string">
If you want to provide specific privileges for the generated dynamic credentials, you can modify the ElastiCache statement to your needs. This is useful if you want to only give access to a specific resource.

View File

@@ -50,110 +50,304 @@ Replace **\<account id\>** with your AWS account id and **\<aws-scope-path\>** w
## Set up Dynamic Secrets with AWS IAM
<Steps>
<Step title="Secret Overview Dashboard">
Navigate to the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret to.
</Step>
<Step title="Click on the 'Add Dynamic Secret' button">
![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png)
</Step>
<Step title="Select AWS IAM">
![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-aws-iam.png)
</Step>
<Step title="Provide the inputs for dynamic secret parameters">
<ParamField path="Secret Name" type="string" required>
Name by which you want the secret to be referenced
</ParamField>
<Tabs>
<Tab title="Assume Role (Recommended)">
Infisical will assume the provided role in your AWS account securely, without the need to share any credentials.
<Accordion title="Self-Hosted Instance">
To connect your self-hosted Infisical instance with AWS, you need to set up an AWS IAM User account that can assume the configured AWS IAM Role.
<ParamField path="Default TTL" type="string" required>
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
</ParamField>
If your instance is deployed on AWS, the aws-sdk will automatically retrieve the credentials. Ensure that you assign the provided permission policy to your deployed instance, such as ECS or EC2.
<ParamField path="Max TTL" type="string" required>
Maximum time-to-live for a generated secret
</ParamField>
The following steps are for instances not deployed on AWS:
<Steps>
<Step title="Create an IAM User">
Navigate to [Create IAM User](https://console.aws.amazon.com/iamv2/home#/users/create) in your AWS Console.
</Step>
<Step title="Create an Inline Policy">
Attach the following inline permission policy to the IAM User to allow it to assume any IAM Roles:
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "AllowAssumeAnyRole",
"Effect": "Allow",
"Action": "sts:AssumeRole",
"Resource": "arn:aws:iam::*:role/*"
}
]
}
```
</Step>
<Step title="Obtain the IAM User Credentials">
Obtain the AWS access key ID and secret access key for your IAM User by navigating to **IAM > Users > [Your User] > Security credentials > Access keys**.
<ParamField path="AWS Access Key" type="string" required>
The managing AWS IAM User Access Key
</ParamField>
![Access Key Step 1](/images/integrations/aws/integrations-aws-access-key-1.png)
![Access Key Step 2](/images/integrations/aws/integrations-aws-access-key-2.png)
![Access Key Step 3](/images/integrations/aws/integrations-aws-access-key-3.png)
</Step>
<Step title="Set Up Connection Keys">
1. Set the access key as **DYNAMIC_SECRET_AWS_ACCESS_KEY_ID**.
2. Set the secret key as **DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY**.
</Step>
</Steps>
</Accordion>
<ParamField path="AWS Secret Key" type="string" required>
The managing AWS IAM User Secret Key
</ParamField>
<Steps>
<Step title="Create the Managing User IAM Role for Infisical">
1. Navigate to the [Create IAM Role](https://console.aws.amazon.com/iamv2/home#/roles/create?step=selectEntities) page in your AWS Console.
![IAM Role Creation](/images/integrations/aws/integration-aws-iam-assume-role.png)
<ParamField path="AWS IAM Path" type="string">
[IAM AWS Path](https://aws.amazon.com/blogs/security/optimize-aws-administration-with-iam-paths/) to scope created IAM User resource access.
</ParamField>
2. Select **AWS Account** as the **Trusted Entity Type**.
3. Select **Another AWS Account** and provide the appropriate Infisical AWS Account ID: use **381492033652** for the **US region**, and **345594589636** for the **EU region**. This restricts the role to be assumed only by Infisical. If self-hosting, provide your AWS account number instead.
4. (Recommended) <strong>Enable "Require external ID"</strong> and input your **Project ID** to strengthen security and mitigate the [confused deputy problem](https://docs.aws.amazon.com/IAM/latest/UserGuide/confused-deputy.html).
5. Assign permission as shared in prerequisite.
<ParamField path="AWS Region" type="string" required>
The AWS data center region.
</ParamField>
<Warning type="warning" title="Security Best Practice: Use External ID to Prevent Confused Deputy Attacks">
When configuring an IAM Role that Infisical will assume, its highly recommended to enable the **"Require external ID"** option and specify your **Project ID**.
<ParamField path="IAM User Permission Boundary" type="string" required>
The IAM Policy ARN of the [AWS Permissions Boundary](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html) to attach to IAM users created in the role.
</ParamField>
This precaution helps protect your AWS account against the [confused deputy problem](https://docs.aws.amazon.com/IAM/latest/UserGuide/confused-deputy.html), a potential security vulnerability where Infisical could be tricked into performing actions on your behalf by an unauthorized actor.
<ParamField path="AWS IAM Groups" type="string">
The AWS IAM groups that should be assigned to the created users. Multiple values can be provided by separating them with commas
</ParamField>
<strong>Always enable "Require external ID" and use your Project ID when setting up the IAM Role.</strong>
</Warning>
</Step>
<Step title="Copy the AWS IAM Role ARN">
![Copy IAM Role ARN](/images/integrations/aws/integration-aws-iam-assume-arn.png)
</Step>
<Step title="Secret Overview Dashboard">
Navigate to the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret to.
</Step>
<Step title="Click on the 'Add Dynamic Secret' button">
![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png)
</Step>
<Step title="Select AWS IAM">
![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-aws-iam.png)
</Step>
<Step title="Provide the inputs for dynamic secret parameters">
![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-aws-iam-assume-role.png)
<ParamField path="Secret Name" type="string" required>
Name by which you want the secret to be referenced
</ParamField>
<ParamField path="AWS Policy ARNs" type="string">
The AWS IAM managed policies that should be attached to the created users. Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="Default TTL" type="string" required>
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
</ParamField>
<ParamField path="AWS IAM Policy Document" type="string">
The AWS IAM inline policy that should be attached to the created users. Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="Max TTL" type="string" required>
Maximum time-to-live for a generated secret
</ParamField>
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-aws-iam.png)
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
</Step>
<Step title="Click 'Submit'">
After submitting the form, you will see a dynamic secret created in the dashboard.
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
<ParamField path="Method" type="string" required>
Select *Assume Role* method.
</ParamField>
![Dynamic Secret](../../../images/platform/dynamic-secrets/dynamic-secret.png)
</Step>
<Step title="Generate dynamic secrets">
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
<ParamField path="Aws Role ARN" type="string" required>
The ARN of the AWS Role to assume.
</ParamField>
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png)
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png)
<ParamField path="AWS IAM Path" type="string">
[IAM AWS Path](https://aws.amazon.com/blogs/security/optimize-aws-administration-with-iam-paths/) to scope created IAM User resource access.
</ParamField>
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
<ParamField path="AWS Region" type="string" required>
The AWS data center region.
</ParamField>
![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png)
<ParamField path="IAM User Permission Boundary" type="string" required>
The IAM Policy ARN of the [AWS Permissions Boundary](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html) to attach to IAM users created in the role.
</ParamField>
<Tip>
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret in step 4.
</Tip>
<ParamField path="AWS IAM Groups" type="string">
The AWS IAM groups that should be assigned to the created users. Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="AWS Policy ARNs" type="string">
The AWS IAM managed policies that should be attached to the created users. Multiple values can be provided by separating them with commas
</ParamField>
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
<ParamField path="AWS IAM Policy Document" type="string">
The AWS IAM inline policy that should be attached to the created users.
Multiple values can be provided by separating them with commas
</ParamField>
![Provision Lease](/images/platform/dynamic-secrets/lease-values-aws-iam.png)
</Step>
</Steps>
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
</Step>
<Step title="Click 'Submit'">
After submitting the form, you will see a dynamic secret created in the dashboard.
![Dynamic Secret](../../../images/platform/dynamic-secrets/dynamic-secret.png)
</Step>
<Step title="Generate dynamic secrets">
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png)
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png)
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png)
<Tip>
Ensure that the TTL for the lease falls within the maximum TTL defined when configuring the dynamic secret in step 4.
</Tip>
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
![Provision Lease](/images/platform/dynamic-secrets/lease-values-aws-iam.png)
</Step>
</Steps>
</Tab>
<Tab title="Access Key">
Infisical will use the provided **Access Key ID** and **Secret Key** to connect to your AWS instance.
<Steps>
<Step title="Secret Overview Dashboard">
Navigate to the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret to.
</Step>
<Step title="Click on the 'Add Dynamic Secret' button">
![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png)
</Step>
<Step title="Select AWS IAM">
![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-aws-iam.png)
</Step>
<Step title="Provide the inputs for dynamic secret parameters">
![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-aws-iam-access-key.png)
<ParamField path="Secret Name" type="string" required>
Name by which you want the secret to be referenced
</ParamField>
<ParamField path="Default TTL" type="string" required>
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
</ParamField>
<ParamField path="Max TTL" type="string" required>
Maximum time-to-live for a generated secret
</ParamField>
<ParamField path="Method" type="string" required>
Select *Access Key* method.
</ParamField>
<ParamField path="AWS Access Key" type="string" required>
The managing AWS IAM User Access Key
</ParamField>
<ParamField path="AWS Secret Key" type="string" required>
The managing AWS IAM User Secret Key
</ParamField>
<ParamField path="AWS IAM Path" type="string">
[IAM AWS Path](https://aws.amazon.com/blogs/security/optimize-aws-administration-with-iam-paths/) to scope created IAM User resource access.
</ParamField>
<ParamField path="AWS Region" type="string" required>
The AWS data center region.
</ParamField>
<ParamField path="IAM User Permission Boundary" type="string" required>
The IAM Policy ARN of the [AWS Permissions Boundary](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html) to attach to IAM users created in the role.
</ParamField>
<ParamField path="AWS IAM Groups" type="string">
The AWS IAM groups that should be assigned to the created users. Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="AWS Policy ARNs" type="string">
The AWS IAM managed policies that should be attached to the created users. Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="AWS IAM Policy Document" type="string">
The AWS IAM inline policy that should be attached to the created users.
Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
</Step>
<Step title="Click 'Submit'">
After submitting the form, you will see a dynamic secret created in the dashboard.
![Dynamic Secret](../../../images/platform/dynamic-secrets/dynamic-secret.png)
</Step>
<Step title="Generate dynamic secrets">
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png)
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png)
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png)
<Tip>
Ensure that the TTL for the lease falls within the maximum TTL defined when configuring the dynamic secret in step 4.
</Tip>
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
![Provision Lease](/images/platform/dynamic-secrets/lease-values-aws-iam.png)
</Step>
</Steps>
</Tab>
</Tabs>
## Audit or Revoke Leases
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
This will allow you to see the lease details and delete the lease ahead of its expiration time.
![Provision Lease](/images/platform/dynamic-secrets/lease-data.png)
## Renew Leases
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** button as illustrated below.
![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png)
<Warning>
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic
secret
</Warning>

View File

@@ -80,11 +80,27 @@ The above configuration allows user creation and granting permissions.
<Step title="(Optional) Modify CQL Statements">
![Modify CQL Statements Modal](../../../images/platform/dynamic-secrets/modify-cql-statements.png)
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
<ParamField path="Customize CQL Statement" type="string">
If you want to provide specific privileges for the generated dynamic credentials, you can modify the CQL statement to your needs. This is useful if you want to only give access to a specific key-space(s).

View File

@@ -87,13 +87,29 @@ The port that your Elasticsearch instance is running on. _(Example: 9200)_
<ParamField path="CA(SSL)" type="string">
A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service.
</ParamField>
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-input-modal-elastic-search.png)

View File

@@ -33,6 +33,125 @@ This feature is ideal for scenarios where you need to:
- Maintain a secure audit trail of cluster access
- Manage access to multiple Kubernetes clusters
## Prerequisites
- A Kubernetes cluster with a service account
- Cluster access token with permissions to create service account tokens
- (Optional) [Gateway](/documentation/platform/gateways/overview) for private cluster access
## RBAC Configuration
Before you can start generating dynamic service account tokens, you'll need to configure the appropriate permissions in your Kubernetes cluster. This involves setting up Role-Based Access Control (RBAC) to allow the creation and management of service account tokens.
The RBAC configuration serves a crucial security purpose: it creates a dedicated service account with minimal permissions that can only create and manage service account tokens. This follows the principle of least privilege, ensuring that the token generation process is secure and controlled.
The following RBAC configuration creates the necessary permissions for generating service account tokens:
```yaml rbac.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: tokenrequest
rules:
- apiGroups: [""]
resources:
- "serviceaccounts/token"
- "serviceaccounts"
verbs:
- "create"
- "get"
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: tokenrequest
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: tokenrequest
subjects:
- kind: ServiceAccount
name: infisical-token-requester
namespace: default
```
```bash
kubectl apply -f rbac.yaml
```
This configuration:
1. Creates a `ClusterRole` named `tokenrequest` that allows:
- Creating and getting service account tokens
- Getting service account information
2. Creates a `ClusterRoleBinding` that binds the role to a service account named `infisical-token-requester` in the `default` namespace
You can customize the service account name and namespace according to your needs.
## Obtaining the Cluster Token
After setting up the RBAC configuration, you need to obtain a token for the service account that will be used to create dynamic secrets. Here's how to get the token:
1. Create a service account in your Kubernetes cluster that will be used to create service account tokens:
```yaml infisical-service-account.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: infisical-token-requester
namespace: default
```
```bash
kubectl apply -f infisical-service-account.yaml
```
2. Create a long-lived service account token using this configuration file:
```yaml service-account-token.yaml
apiVersion: v1
kind: Secret
type: kubernetes.io/service-account-token
metadata:
name: infisical-token-requester-token
annotations:
kubernetes.io/service-account.name: "infisical-token-requester"
```
```bash
kubectl apply -f service-account-token.yaml
```
3. Link the secret to the service account:
```bash
kubectl patch serviceaccount infisical-token-requester -p '{"secrets": [{"name": "infisical-token-requester-token"}]}' -n default
```
4. Retrieve the token:
```bash
kubectl get secret infisical-token-requester-token -n default -o=jsonpath='{.data.token}' | base64 --decode
```
This token will be used as the "Cluster Token" in the dynamic secret configuration.
## Obtaining the Cluster URL
The cluster URL is the address of your Kubernetes API server. The simplest way to find it is to use the `kubectl cluster-info` command:
```bash
kubectl cluster-info
```
This command works for all Kubernetes environments (managed services like GKE, EKS, AKS, or self-hosted clusters) and will show you the Kubernetes control plane address, which is your cluster URL.
<Note>
Make sure the cluster URL is accessible from where you're running Infisical.
If you're using a private cluster, you'll need to configure a [Gateway](/documentation/platform/gateways/overview) to
access it.
</Note>
## Set up Dynamic Secrets with Kubernetes
<Steps>
@@ -45,348 +164,6 @@ This feature is ideal for scenarios where you need to:
<Step title="Select Kubernetes">
![Dynamic Secret Modal](/images/platform/dynamic-secrets/dynamic-secret-modal-kubernetes.png)
</Step>
<Step title="Choose your configuration options">
Before proceeding with the setup, you'll need to make two key decisions:
1. **Credential Type**: How you want to manage service accounts
- **Static**: Use an existing service account with predefined permissions
- **Dynamic**: Create temporary service accounts with specific role assignments
2. **Authentication Method**: How you want to authenticate with the cluster
- **Token (API)**: Use a service account token for direct API access
- **Gateway**: Use an Infisical Gateway deployed in your cluster
<Tabs>
<Tab title="Static Credentials">
Static credentials generate service account tokens for a predefined service account. This is useful when you want to:
- Generate tokens for an existing service account
- Maintain consistent permissions across token generations
- Use a service account that already has the necessary RBAC permissions
### Prerequisites
- A Kubernetes cluster with a service account
- Cluster access token with permissions to create service account tokens
- (Optional) [Gateway](/documentation/platform/gateways/overview) for private cluster access
### Authentication Setup
Choose your authentication method:
<AccordionGroup>
<Accordion title="Token (API) Authentication">
This method uses a service account token to authenticate with the Kubernetes cluster. It's suitable when:
- You want to use a specific service account token that you've created
- You're working with a public cluster or have network access to the cluster's API server
- You want to explicitly control which service account is used for operations
<Note>
With Token (API) authentication, Infisical uses the provided service account token
to make API calls to your Kubernetes cluster. This token must have the necessary
permissions to generate tokens for the target service account.
</Note>
1. Create a service account:
```yaml infisical-service-account.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: infisical-token-requester
namespace: default
```
```bash
kubectl apply -f infisical-service-account.yaml
```
2. Set up RBAC permissions:
```yaml rbac.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: tokenrequest
rules:
- apiGroups: [""]
resources:
- "serviceaccounts/token"
- "serviceaccounts"
verbs:
- "create"
- "get"
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: tokenrequest
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: tokenrequest
subjects:
- kind: ServiceAccount
name: infisical-token-requester
namespace: default
```
```bash
kubectl apply -f rbac.yaml
```
3. Create and obtain the token:
```yaml service-account-token.yaml
apiVersion: v1
kind: Secret
type: kubernetes.io/service-account-token
metadata:
name: infisical-token-requester-token
annotations:
kubernetes.io/service-account.name: "infisical-token-requester"
```
```bash
kubectl apply -f service-account-token.yaml
kubectl patch serviceaccount infisical-token-requester -p '{"secrets": [{"name": "infisical-token-requester-token"}]}' -n default
kubectl get secret infisical-token-requester-token -n default -o=jsonpath='{.data.token}' | base64 --decode
```
</Accordion>
<Accordion title="Gateway Authentication">
This method uses an Infisical Gateway deployed in your Kubernetes cluster. It's ideal when:
- You want to avoid storing static service account tokens
- You prefer to use the Gateway's pre-configured service account
- You want centralized management of cluster operations
<Note>
With Gateway authentication, Infisical communicates with the Gateway, which then
uses its own service account to make API calls to the Kubernetes API server.
The Gateway's service account must have the necessary permissions to generate
tokens for the target service account.
</Note>
1. Deploy the Infisical Gateway in your cluster
2. Set up RBAC permissions for the Gateway's service account:
```yaml rbac.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: tokenrequest
rules:
- apiGroups: [""]
resources:
- "serviceaccounts/token"
- "serviceaccounts"
verbs:
- "create"
- "get"
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: tokenrequest
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: tokenrequest
subjects:
- kind: ServiceAccount
name: infisical-gateway
namespace: infisical
```
```bash
kubectl apply -f rbac.yaml
```
</Accordion>
</AccordionGroup>
</Tab>
<Tab title="Dynamic Credentials">
Dynamic credentials create a temporary service account, assign it to a defined role/cluster-role, and generate a service account token. This is useful when you want to:
- Create temporary service accounts with specific permissions
- Automatically clean up service accounts after token expiration
- Assign different roles to different users or applications
- Maintain strict control over service account permissions
### Prerequisites
- A Kubernetes cluster with a service account
- Cluster access token with permissions to create service accounts and manage RBAC
- (Optional) [Gateway](/documentation/platform/gateways/overview) for private cluster access
### Authentication Setup
Choose your authentication method:
<AccordionGroup>
<Accordion title="Token (API) Authentication">
This method uses a service account token to authenticate with the Kubernetes cluster. It's suitable when:
- You want to use a specific service account token that you've created
- You're working with a public cluster or have network access to the cluster's API server
- You want to explicitly control which service account is used for operations
<Note>
With Token (API) authentication, Infisical uses the provided service account token
to make API calls to your Kubernetes cluster. This token must have the necessary
permissions to create and manage service accounts, their tokens, and RBAC resources.
</Note>
1. Create a service account:
```yaml service-account.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: infisical-token-requester
namespace: default
---
apiVersion: v1
kind: Secret
type: kubernetes.io/service-account-token
metadata:
name: infisical-token-requester-token
annotations:
kubernetes.io/service-account.name: "infisical-token-requester"
```
```bash
kubectl apply -f service-account.yaml
```
2. Set up RBAC permissions:
```yaml rbac.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: tokenrequest
rules:
- apiGroups: [""]
resources:
- "serviceaccounts/token"
- "serviceaccounts"
verbs:
- "create"
- "get"
- "delete"
- apiGroups: ["rbac.authorization.k8s.io"]
resources:
- "rolebindings"
- "clusterrolebindings"
verbs:
- "create"
- "delete"
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: tokenrequest
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: tokenrequest
subjects:
- kind: ServiceAccount
name: infisical-token-requester
namespace: default
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: infisical-dynamic-role-binding-sa
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: infisical-dynamic-role
subjects:
- kind: ServiceAccount
name: infisical-token-requester
namespace: default
```
```bash
kubectl apply -f rbac.yaml
```
</Accordion>
<Accordion title="Gateway Authentication">
This method uses an Infisical Gateway deployed in your Kubernetes cluster. It's ideal when:
- You want to avoid storing static service account tokens
- You prefer to use the Gateway's pre-configured service account
- You want centralized management of cluster operations
<Note>
With Gateway authentication, Infisical communicates with the Gateway, which then
uses its own service account to make API calls to the Kubernetes API server.
The Gateway's service account must have the necessary permissions to create and
manage service accounts, their tokens, and RBAC resources.
</Note>
1. Deploy the Infisical Gateway in your cluster
2. Set up RBAC permissions for the Gateway's service account:
```yaml rbac.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: tokenrequest
rules:
- apiGroups: [""]
resources:
- "serviceaccounts/token"
- "serviceaccounts"
verbs:
- "create"
- "get"
- "delete"
- apiGroups: ["rbac.authorization.k8s.io"]
resources:
- "rolebindings"
- "clusterrolebindings"
verbs:
- "create"
- "delete"
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: tokenrequest
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: tokenrequest
subjects:
- kind: ServiceAccount
name: infisical-gateway
namespace: infisical
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: infisical-dynamic-role-binding-sa
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: infisical-dynamic-role
subjects:
- kind: ServiceAccount
name: infisical-gateway
namespace: infisical
```
```bash
kubectl apply -f rbac.yaml
```
</Accordion>
</AccordionGroup>
<Note>
In Kubernetes RBAC, a service account can only create role bindings for resources that it has access to.
This means that if you want to create dynamic service accounts with access to certain resources,
the service account creating these bindings (either the token requester or Gateway service account)
must also have access to those same resources. For example, if you want to create dynamic service
accounts that can access secrets, the token requester service account must also have access to secrets.
</Note>
</Tab>
</Tabs>
</Step>
<Step title="Provide the inputs for dynamic secret parameters">
<ParamField path="Secret Name" type="string" required>
Name by which you want the secret to be referenced
@@ -409,62 +186,48 @@ This feature is ideal for scenarios where you need to:
<ParamField path="CA" type="string">
Custom CA certificate for the Kubernetes API server. Leave blank to use the system/public CA.
</ParamField>
<ParamField path="Auth Method" type="string" required>
Choose between Token (API) or Gateway authentication. If using Gateway, the Gateway must be deployed in your Kubernetes cluster.
</ParamField>
<ParamField path="Cluster Token" type="string" required>
Token with permissions to create service accounts and manage RBAC (required when using Token authentication)
</ParamField>
<ParamField path="Credential Type" type="string" required>
Choose between Static (predefined service account) or Dynamic (temporary service accounts with role assignments)
Token with permissions to create service account tokens
</ParamField>
<ParamField path="Service Account Name" type="string" required>
Name of the service account to generate tokens for (required for Static credentials)
Name of the service account to generate tokens for
</ParamField>
<ParamField path="Namespace" type="string" required>
Kubernetes namespace where the service account exists or will be created
</ParamField>
<ParamField path="Role Type" type="string" required>
Type of role to assign (ClusterRole or Role) (required for Dynamic credentials)
</ParamField>
<ParamField path="Role" type="string" required>
Name of the role to assign to the temporary service account (required for Dynamic credentials)
Kubernetes namespace where the service account exists
</ParamField>
<ParamField path="Audiences" type="array">
Optional list of audiences to include in the generated token
</ParamField>
![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-kubernetes-1.png)
![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-kubernetes-2.png)
![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-kubernetes.png)
</Step>
<Step title="Click 'Submit'">
After submitting the form, you will see a dynamic secret created in the dashboard.
</Step>
<Step title="Generate dynamic secrets">
Once you've successfully configured the dynamic secret, you're ready to generate on-demand service account tokens.
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png)
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png)
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png)
<Tip>
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
</Tip>
Once you click the `Submit` button, a new secret lease will be generated and the service account token will be shown to you.
![Provision Lease](/images/platform/dynamic-secrets/kubernetes-lease-value.png)
</Step>
</Steps>
## Generate and Manage Tokens
Once you've successfully configured the dynamic secret, you're ready to generate on-demand service account tokens.
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png)
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png)
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png)
<Tip>
Ensure that the TTL for the lease fall within the maximum TTL defined when
configuring the dynamic secret.
</Tip>
Once you click the `Submit` button, a new secret lease will be generated and the service account token will be shown to you.
![Provision Lease](/images/platform/dynamic-secrets/kubernetes-lease-value.png)
## Audit or Revoke Leases
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.

View File

@@ -123,13 +123,29 @@ The Infisical LDAP dynamic secret allows you to generate user credentials on dem
changetype: delete
```
</ParamField>
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
</Step>
<Step title="Click `Submit`">

View File

@@ -63,13 +63,29 @@ Create a project scoped API Key with the required permission in your Mongo Atlas
![Modify Scope Modal](../../../images/platform/dynamic-secrets/advanced-option-atlas.png)
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
<ParamField path="Customize Scope" type="string">
List that contains clusters, MongoDB Atlas Data Lakes, and MongoDB Atlas Streams Instances that this database user can access. If omitted, MongoDB Cloud grants the database user access to all the clusters, MongoDB Atlas Data Lakes, and MongoDB Atlas Streams Instances in the project.

View File

@@ -66,12 +66,28 @@ Create a user with the required permission in your MongoDB instance. This user w
<ParamField path="CA(SSL)" type="string">
A CA may be required if your DB requires it for incoming connections.
</ParamField>
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-mongodb.png)

View File

@@ -9,7 +9,6 @@ The Infisical MS SQL dynamic secret allows you to generate Microsoft SQL server
Create a user with the required permission in your SQL instance. This user will be used to create new accounts on-demand.
## Set up Dynamic Secrets with MS SQL
<Steps>
@@ -27,104 +26,123 @@ Create a user with the required permission in your SQL instance. This user will
Name by which you want the secret to be referenced
</ParamField>
<ParamField path="Default TTL" type="string" required>
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
</ParamField>
<ParamField path="Default TTL" type="string" required>
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
</ParamField>
<ParamField path="Max TTL" type="string" required>
Maximum time-to-live for a generated secret
</ParamField>
<ParamField path="Max TTL" type="string" required>
Maximum time-to-live for a generated secret
</ParamField>
<ParamField path="Metadata" type="list" required>
List of key/value metadata pairs
</ParamField>
<ParamField path="Metadata" type="list" required>
List of key/value metadata pairs
</ParamField>
<ParamField path="Service" type="string" required>
Choose the service you want to generate dynamic secrets for. This must be selected as **MS SQL**.
</ParamField>
<ParamField path="Service" type="string" required>
Choose the service you want to generate dynamic secrets for. This must be selected as **MS SQL**.
</ParamField>
<ParamField path="Host" type="string" required>
Database host
</ParamField>
<ParamField path="Host" type="string" required>
Database host
</ParamField>
<ParamField path="Port" type="number" required>
Database port
</ParamField>
<ParamField path="Port" type="number" required>
Database port
</ParamField>
<ParamField path="User" type="string" required>
Username that will be used to create dynamic secrets
</ParamField>
<ParamField path="User" type="string" required>
Username that will be used to create dynamic secrets
</ParamField>
<ParamField path="Password" type="string" required>
Password that will be used to create dynamic secrets
</ParamField>
<ParamField path="Password" type="string" required>
Password that will be used to create dynamic secrets
</ParamField>
<ParamField path="Database Name" type="string" required>
Name of the database for which you want to create dynamic secrets
</ParamField>
<ParamField path="Database Name" type="string" required>
Name of the database for which you want to create dynamic secrets
</ParamField>
<ParamField path="CA(SSL)" type="string">
A CA may be required if your DB requires it for incoming connections. AWS RDS instances with default settings will requires a CA which can be downloaded [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL.html#UsingWithRDS.SSL.CertificatesAllRegions).
</ParamField>
<ParamField path="CA(SSL)" type="string">
A CA may be required if your DB requires it for incoming connections. AWS RDS instances with default settings will requires a CA which can be downloaded [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL.html#UsingWithRDS.SSL.CertificatesAllRegions).
</ParamField>
![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-mssql.png)
![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-mssql.png)
</Step>
<Step title="(Optional) Modify SQL Statements">
![Modify SQL Statements Modal](../../../images/platform/dynamic-secrets/modify-sql-statements-mssql.png)
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
<ParamField path="Customize SQL Statement" type="string">
If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL statement to your needs. This is useful if you want to only give access to a specific table(s).
</ParamField>
</Step>
<Step title="Click 'Submit'">
After submitting the form, you will see a dynamic secret created in the dashboard.
<Note>
If this step fails, you may have to add the CA certificate.
</Note>
<Note>
If this step fails, you may have to add the CA certificate.
</Note>
![Dynamic Secret](../../../images/platform/dynamic-secrets/dynamic-secret.png)
![Dynamic Secret](../../../images/platform/dynamic-secrets/dynamic-secret.png)
</Step>
<Step title="Generate dynamic secrets">
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png)
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png)
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png)
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png)
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png)
![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png)
<Tip>
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
</Tip>
<Tip>
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
</Tip>
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
![Provision Lease](/images/platform/dynamic-secrets/lease-values.png)
![Provision Lease](/images/platform/dynamic-secrets/lease-values.png)
</Step>
</Steps>
## Audit or Revoke Leases
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
This will allow you to see the expiration time of the lease or delete the lease before it's set time to live.
![Provision Lease](/images/platform/dynamic-secrets/lease-data.png)
## Renew Leases
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png)
<Warning>
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic
secret
</Warning>

View File

@@ -69,15 +69,28 @@ Create a user with the required permission in your SQL instance. This user will
</Step>
<Step title="(Optional) Modify SQL Statements">
![Modify SQL Statements Modal](../../../images/platform/dynamic-secrets/modify-sql-statement-mysql.png)
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
<ParamField path="Customize SQL Statement" type="string">
If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL statement to your needs. This is useful if you want to only give access to a specific table(s).
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
</Step>
<Step title="Click `Submit`">

View File

@@ -71,15 +71,28 @@ Create a user with the required permission in your SQL instance. This user will
</Step>
<Step title="(Optional) Modify SQL Statements">
![Modify SQL Statements Modal](../../../images/platform/dynamic-secrets/modify-sql-statement-oracle.png)
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
<ParamField path="Customize SQL Statement" type="string">
If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL statement to your needs. This is useful if you want to only give access to a specific table(s).
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
</Step>
<Step title="Click 'Submit'">

View File

@@ -72,12 +72,28 @@ Create a user with the required permission in your SQL instance. This user will
</Step>
<Step title="(Optional) Modify SQL Statements">
![Modify SQL Statements Modal](../../../images/platform/dynamic-secrets/modify-sql-statements.png)
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
<ParamField path="Customize SQL Statement" type="string">
If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL statement to your needs. This is useful if you want to only give access to a specific table(s).

View File

@@ -66,12 +66,28 @@ The port that the RabbitMQ management plugin is listening on. This is `15672` by
</ParamField>
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
<ParamField path="CA(SSL)" type="string">
A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service.

View File

@@ -57,12 +57,28 @@ Create a user with the required permission in your Redis instance. This user wil
</Step>
<Step title="(Optional) Modify Redis Statements">
![Modify Redis Statements Modal](/images/platform/dynamic-secrets/modify-redis-statement.png)
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
<ParamField path="Customize Redis Statement" type="string">
If you want to provide specific privileges for the generated dynamic credentials, you can modify the Redis statement to your needs. This is useful if you want to only give access to a specific table(s).

View File

@@ -64,13 +64,29 @@ The Infisical SAP ASE dynamic secret allows you to generate SAP ASE database cre
</Step>
<Step title="(Optional) Modify SAP SQL Statements">
![Modify SQL Statements Modal](../../../images/platform/dynamic-secrets/sap-ase/dynamic-secret-sap-ase-statements.png)
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
<ParamField path="Customize Statement" type="string">
If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL statement to your needs.

View File

@@ -64,12 +64,28 @@ The Infisical SAP HANA dynamic secret allows you to generate SAP HANA database c
</Step>
<Step title="(Optional) Modify SAP SQL Statements">
![Modify SQL Statements Modal](../../../images/platform/dynamic-secrets/modify-sap-hana-sql-statements.png)
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
<ParamField path="Customize Statement" type="string">

View File

@@ -78,12 +78,28 @@ Infisical's Snowflake dynamic secrets allow you to generate Snowflake user crede
<Step title="(Optional) Modify SQL Statements">
![Modify SQL Statements Modal](/images/platform/dynamic-secrets/snowflake/dynamic-secret-snowflake-sql-statements.png)
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
<ParamField path="Customize Statement" type="string">
If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL

View File

@@ -89,18 +89,208 @@ Once authenticated, the Gateway establishes a secure connection with Infisical t
helm repo update
```
### Create a Kubernetes Secret with the gateway token
### Create a Kubernetes Secret containing gateway environment variables
Create a new Kubernetes secret containing the gateway token as the `TOKEN` key. You can optionally also set the `INFISICAL_API_URL` key to your Infisical instance URL. By default, `INFISICAL_API_URL` is set to `https://app.infisical.com`.
The gateway supports all identity authentication methods through the use of environment variables.
The environment variables must be set in the `infisical-gateway-environment` Kubernetes secret.
```bash
kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=<your-machine-identity-access-token>
```
<Note>
The secret name is `infisical-gateway-environment` by default. The `TOKEN` key is required, and the `INFISICAL_API_URL` key is optional.
</Note>
#### Supported authentication methods
<AccordionGroup>
<Accordion title="Universal Auth">
The Universal Auth method is a simple and secure way to authenticate with Infisical. It requires a client ID and a client secret to authenticate with Infisical.
<ParamField query="Environment Variables">
<Expandable title="properties">
<ParamField query="INFISICAL_UNIVERSAL_AUTH_CLIENT_ID" type="string" required>
Your machine identity client ID.
</ParamField>
<ParamField query="INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET" type="string" required>
Your machine identity client secret.
</ParamField>
<ParamField query="INFISICAL_AUTH_METHOD" type="string" required>
The authentication method to use. Must be `universal-auth` when using Universal Auth.
</ParamField>
</Expandable>
</ParamField>
```bash
kubectl create secret generic infisical-gateway-environment --from-literal=INFISICAL_AUTH_METHOD=universal-auth --from-literal=INFISICAL_UNIVERSAL_AUTH_CLIENT_ID=<client-id> --from-literal=INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET=<client-secret>
```
</Accordion>
<Accordion title="Native Kubernetes">
The Native Kubernetes method is used to authenticate with Infisical when running in a Kubernetes environment. It requires a service account token to authenticate with Infisical.
<ParamField query="Environment Variables">
<Expandable title="properties">
<ParamField query="INFISICAL_MACHINE_IDENTITY_ID" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH" type="string" optional>
Path to the Kubernetes service account token to use. Default: `/var/run/secrets/kubernetes.io/serviceaccount/token`.
</ParamField>
<ParamField query="INFISICAL_AUTH_METHOD" type="string" required>
The authentication method to use. Must be `kubernetes` when using Native Kubernetes.
</ParamField>
</Expandable>
</ParamField>
```bash
kubectl create secret generic infisical-gateway-environment --from-literal=INFISICAL_AUTH_METHOD=kubernetes --from-literal=INFISICAL_MACHINE_IDENTITY_ID=<machine-identity-id>
```
</Accordion>
<Accordion title="Native Azure">
The Native Azure method is used to authenticate with Infisical when running in an Azure environment.
<ParamField query="Environment Variables">
<Expandable title="properties">
<ParamField query="INFISICAL_MACHINE_IDENTITY_ID" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="INFISICAL_AUTH_METHOD" type="string" required>
The authentication method to use. Must be `azure` when using Native Azure.
</ParamField>
</Expandable>
</ParamField>
```bash
kubectl create secret generic infisical-gateway-environment --from-literal=INFISICAL_AUTH_METHOD=azure --from-literal=INFISICAL_MACHINE_IDENTITY_ID=<machine-identity-id>
```
</Accordion>
<Accordion title="Native GCP ID Token">
The Native GCP ID Token method is used to authenticate with Infisical when running in a GCP environment.
<ParamField query="Environment Variables">
<Expandable title="properties">
<ParamField query="INFISICAL_MACHINE_IDENTITY_ID" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="INFISICAL_AUTH_METHOD" type="string" required>
The authentication method to use. Must be `gcp-id-token` when using Native GCP ID Token.
</ParamField>
</Expandable>
</ParamField>
```bash
kubectl create secret generic infisical-gateway-environment --from-literal=INFISICAL_AUTH_METHOD=gcp-id-token --from-literal=INFISICAL_MACHINE_IDENTITY_ID=<machine-identity-id>
```
</Accordion>
<Accordion title="GCP IAM">
The GCP IAM method is used to authenticate with Infisical with a GCP service account key.
<ParamField query="Environment Variables">
<Expandable title="properties">
<ParamField query="INFISICAL_MACHINE_IDENTITY_ID" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="INFISICAL_GCP_SERVICE_ACCOUNT_KEY_FILE_PATH" type="string" required>
Path to your GCP service account key file _(Must be in JSON format!)_
</ParamField>
<ParamField query="INFISICAL_AUTH_METHOD" type="string" required>
The authentication method to use. Must be `gcp-iam` when using GCP IAM.
</ParamField>
</Expandable>
</ParamField>
```bash
kubectl create secret generic infisical-gateway-environment --from-literal=INFISICAL_AUTH_METHOD=gcp-iam --from-literal=INFISICAL_MACHINE_IDENTITY_ID=<machine-identity-id> --from-literal=INFISICAL_GCP_SERVICE_ACCOUNT_KEY_FILE_PATH=<service-account-key-file-path>
```
</Accordion>
<Accordion title="Native AWS IAM">
The AWS IAM method is used to authenticate with Infisical with an AWS IAM role while running in an AWS environment like EC2, Lambda, etc.
<ParamField query="Environment Variables">
<Expandable title="properties">
<ParamField query="INFISICAL_MACHINE_IDENTITY_ID" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="INFISICAL_AUTH_METHOD" type="string" required>
The authentication method to use. Must be `aws-iam` when using Native AWS IAM.
</ParamField>
</Expandable>
</ParamField>
```bash
kubectl create secret generic infisical-gateway-environment --from-literal=INFISICAL_AUTH_METHOD=aws-iam --from-literal=INFISICAL_MACHINE_IDENTITY_ID=<machine-identity-id>
```
</Accordion>
<Accordion title="OIDC Auth">
The OIDC Auth method is used to authenticate with Infisical via identity tokens with OIDC.
<ParamField query="Environment Variables">
<Expandable title="properties">
<ParamField query="INFISICAL_MACHINE_IDENTITY_ID" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="INFISICAL_JWT" type="string" required>
The OIDC JWT from the identity provider.
</ParamField>
<ParamField query="INFISICAL_AUTH_METHOD" type="string" required>
The authentication method to use. Must be `oidc-auth` when using OIDC Auth.
</ParamField>
</Expandable>
</ParamField>
```bash
kubectl create secret generic infisical-gateway-environment --from-literal=INFISICAL_AUTH_METHOD=oidc-auth --from-literal=INFISICAL_MACHINE_IDENTITY_ID=<machine-identity-id> --from-literal=INFISICAL_JWT=<oidc-jwt>
```
</Accordion>
<Accordion title="JWT Auth">
The JWT Auth method is used to authenticate with Infisical via a JWT token.
<ParamField query="Environment Variables">
<Expandable title="properties">
<ParamField query="INFISICAL_JWT" type="string" required>
The JWT token to use for authentication.
</ParamField>
<ParamField query="INFISICAL_MACHINE_IDENTITY_ID" type="string" required>
Your machine identity ID.
</ParamField>
<ParamField query="INFISICAL_AUTH_METHOD" type="string" required>
The authentication method to use. Must be `jwt-auth` when using JWT Auth.
</ParamField>
</Expandable>
</ParamField>
```bash
kubectl create secret generic infisical-gateway-environment --from-literal=INFISICAL_AUTH_METHOD=jwt-auth --from-literal=INFISICAL_JWT=<jwt> --from-literal=INFISICAL_MACHINE_IDENTITY_ID=<machine-identity-id>
```
</Accordion>
<Accordion title="Token Auth">
You can use the `INFISICAL_TOKEN` environment variable to authenticate with Infisical with a raw machine identity access token.
<ParamField query="Environment Variables">
<Expandable title="properties">
<ParamField query="INFISICAL_TOKEN" type="string" required>
The machine identity access token to use for authentication.
</ParamField>
</Expandable>
</ParamField>
```bash
kubectl create secret generic infisical-gateway-environment --from-literal=INFISICAL_TOKEN=<token>
```
</Accordion>
</AccordionGroup>
#### Other environment variables
<AccordionGroup>
<Accordion title="INFISICAL_API_URL">
The API URL to use for the gateway. By default, `INFISICAL_API_URL` is set to `https://app.infisical.com`.
</Accordion>
</AccordionGroup>
### Install the Infisical Gateway Helm Chart
```bash

View File

@@ -0,0 +1,157 @@
---
title: Azure
description: "Learn how to authenticate Azure pipelines with Infisical using OpenID Connect (OIDC)."
---
**OIDC Auth** is a platform-agnostic JWT-based authentication method that can be used to authenticate from any platform or environment using an identity provider with OpenID Connect.
## Diagram
The following sequence diagram illustrates the OIDC Auth workflow for authenticating Azure pipelines with Infisical.
```mermaid
sequenceDiagram
participant Client as Azure Pipeline
participant Idp as Identity Provider
participant Infis as Infisical
Client->>Idp: Step 1: Request identity token
Idp-->>Client: Return JWT with verifiable claims
Note over Client,Infis: Step 2: Login Operation
Client->>Infis: Send signed JWT to /api/v1/auth/oidc-auth/login
Note over Infis,Idp: Step 3: Query verification
Infis->>Idp: Request JWT public key using OIDC Discovery
Idp-->>Infis: Return public key
Note over Infis: Step 4: JWT validation
Infis->>Client: Return short-lived access token
Note over Client,Infis: Step 5: Access Infisical API with Token
Client->>Infis: Make authenticated requests using the short-lived access token
```
## Concept
At a high-level, Infisical authenticates a client by verifying the JWT and checking that it meets specific requirements (e.g. it is issued by a trusted identity provider) at the `/api/v1/auth/oidc-auth/login` endpoint. If successful,
then Infisical returns a short-lived access token that can be used to make authenticated requests to the Infisical API.
To be more specific:
1. The Azure pipeline requests an identity token from Azure's identity provider.
2. The fetched identity token is sent to Infisical at the `/api/v1/auth/oidc-auth/login` endpoint.
3. Infisical fetches the public key that was used to sign the identity token from Azure's identity provider using OIDC Discovery.
4. Infisical validates the JWT using the public key provided by the identity provider and checks that the subject, audience, and claims of the token matches with the set criteria.
5. If all is well, Infisical returns a short-lived access token that the Azure pipeline can use to make authenticated requests to the Infisical API.
<Note>
Infisical needs network-level access to Azure's identity provider endpoints.
</Note>
## Guide
In the following steps, we explore how to create and use identities to access the Infisical API using the OIDC Auth authentication method.
<Steps>
<Step title="Creating an identity">
To create an identity, head to your Organization Settings > Access Control > Identities and press **Create identity**.
![identities organization](/images/platform/identities/identities-org.png)
When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
![identities organization create](/images/platform/identities/identities-org-create.png)
Now input a few details for your new identity. Here's some guidance for each field:
- Name (required): A friendly name for the identity.
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
Once you've created an identity, you'll be redirected to a page where you can manage the identity.
![identities page](/images/platform/identities/identities-page.png)
Since the identity has been configured with Universal Auth by default, you should re-configure it to use OIDC Auth instead. To do this, press to edit the **Authentication** section,
remove the existing Universal Auth configuration, and add a new OIDC Auth configuration onto the identity.
![identities page remove default auth](/images/platform/identities/identities-page-remove-default-auth.png)
![identities create oidc auth method](/images/platform/identities/identities-org-create-oidc-auth-method.png)
<Warning>Restrict access by configuring the Subject, Audiences, and Claims fields</Warning>
Here's some more guidance on each field:
- <div style={{ textAlign: 'justify' }}>**OIDC Discovery URL**: The URL used to retrieve the OpenID Connect configuration from the identity provider. This is used to fetch the public keys needed to verify the JWT. For Azure, set this to `https://login.microsoftonline.com/{tenant-id}/v2.0` (replace `{tenant-id}` with your Azure AD tenant ID).</div>
- <div style={{ textAlign: 'justify' }}>**Issuer**: The value of the `iss` claim that the token must match. For Azure, this should be `https://login.microsoftonline.com/{tenant-id}/v2.0`.</div>
- **Subject**: This must match the `sub` claim in the JWT.
- **Audiences**: Values that must match the `aud` claim.
- **Claims**: Additional claims that must be present. Refer to [Azure DevOps docs](https://learn.microsoft.com/en-us/azure/devops/pipelines/library/connect-to-azure?view=azure-devops#workload-identity-federation) for available claims.
- **Access Token TTL**: Lifetime of the issued token (in seconds), e.g., `2592000` (30 days)
- **Access Token Max TTL**: Maximum allowed lifetime of the token
- **Access Token Max Number of Uses**: Max times the token can be used (`0` = unlimited)
- **Access Token Trusted IPs**: List of allowed IP ranges (defaults to `0.0.0.0/0`)
<Tip>If you are unsure about what to configure for the subject, audience, and claims fields, you can inspect the JWT token from your Azure DevOps pipeline by adding a debug step that outputs the token claims.</Tip>
<Info>The `subject`, `audiences`, and `claims` fields support glob pattern matching; however, we highly recommend using hardcoded values whenever possible.</Info>
</Step>
<Step title="Adding an identity to a project">
To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project.
To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**.
Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to.
![identities project](/images/platform/identities/identities-project.png)
![identities project create](/images/platform/identities/identities-project-create.png)
</Step>
<Step title="Accessing the Infisical API with the identity">
In Azure DevOps, to authenticate with Infisical using OIDC, you must configure a service connection that enables workload identity federation.
Once set up, the OIDC token can be fetched automatically within the pipeline job context. Here's an example:
```yaml
trigger:
- main
pool:
vmImage: ubuntu-latest
steps:
- task: AzureCLI@2
displayName: 'Retrieve secrets from Infisical using OIDC'
inputs:
azureSubscription: 'your-azure-service-connection-name'
scriptType: 'bash'
scriptLocation: 'inlineScript'
addSpnToEnvironment: true
inlineScript: |
# Get OIDC access token
OIDC_TOKEN=$(az account get-access-token --resource "api://AzureADTokenExchange" --query accessToken -o tsv)
[ -z "$OIDC_TOKEN" ] && { echo "Failed to get access token"; exit 1; }
# Exchange for Infisical access token
ACCESS_TOKEN=$(curl -s -X POST "<YOUR-INFISICAL-INSTANCE-URL>/api/v1/auth/oidc-auth/login" \
-H "Content-Type: application/json" \
-d "{\"identityId\":\"{your-identity-id}\",\"jwt\":\"$OIDC_TOKEN\"}" \
| jq -r '.accessToken')
# Fetch secrets
curl -s -H "Authorization: Bearer $ACCESS_TOKEN" \
"<YOUR-INFISICAL-INSTANCE-URL>/api/v3/secrets/raw?environment={your-environment-slug}&workspaceSlug={your-workspace-slug}"
```
Make sure the service connection is properly configured for workload identity federation and linked to your Azure AD app registration with appropriate claims.
<Note>
Each identity access token has a time-to-live (TTL) which you can infer from the response of the login operation;
the default TTL is `7200` seconds which can be adjusted.
If an identity access token expires, it can no longer authenticate with the Infisical API. In this case,
a new access token should be obtained by performing another login operation.
</Note>
</Step>
</Steps>

Binary file not shown.

After

Width:  |  Height:  |  Size: 538 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 526 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 526 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 532 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 516 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 503 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 477 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 624 KiB

After

Width:  |  Height:  |  Size: 793 KiB

View File

@@ -34,6 +34,9 @@ description: "Learn how to configure a GCP Secret Manager Sync for Infisical."
- **GCP Connection**: The GCP Connection to authenticate with.
- **Project**: The GCP project to sync with.
- **Scope**: The GCP project scope that secrets should be synced to:
- **Global**: Secrets will be synced globally; available to all project regions.
- **Region**: Secrets will be synced to the specified region.
5. Configure the **Sync Options** to specify how secrets should be synced, then click **Next**.
![Configure Options](/images/secret-syncs/gcp-secret-manager/gcp-secret-manager-options.png)

View File

@@ -341,6 +341,7 @@
"group": "OIDC Auth",
"pages": [
"documentation/platform/identities/oidc-auth/general",
"documentation/platform/identities/oidc-auth/azure",
"documentation/platform/identities/oidc-auth/github",
"documentation/platform/identities/oidc-auth/circleci",
"documentation/platform/identities/oidc-auth/gitlab",

View File

@@ -5,27 +5,56 @@ import { faCircleInfo } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { SecretSyncConnectionField } from "@app/components/secret-syncs/forms/SecretSyncConnectionField";
import { FilterableSelect, FormControl, Tooltip } from "@app/components/v2";
import { useGcpConnectionListProjects } from "@app/hooks/api/appConnections/gcp/queries";
import { TGitHubConnectionEnvironment } from "@app/hooks/api/appConnections/github";
import {
Badge,
FilterableSelect,
FormControl,
Select,
SelectItem,
Tooltip
} from "@app/components/v2";
import { GCP_SYNC_SCOPES } from "@app/helpers/secretSyncs";
import {
useGcpConnectionListProjectLocations,
useGcpConnectionListProjects
} from "@app/hooks/api/appConnections/gcp/queries";
import { TGcpLocation, TGcpProject } from "@app/hooks/api/appConnections/gcp/types";
import { SecretSync } from "@app/hooks/api/secretSyncs";
import { GcpSyncScope } from "@app/hooks/api/secretSyncs/types/gcp-sync";
import { TSecretSyncForm } from "../schemas";
const formatOptionLabel = ({ displayName, locationId }: TGcpLocation) => (
<div className="flex w-full flex-row items-center gap-1">
<span>{displayName}</span>{" "}
<Badge className="h-5 leading-5" variant="success">
{locationId}
</Badge>
</div>
);
export const GcpSyncFields = () => {
const { control, setValue } = useFormContext<
TSecretSyncForm & { destination: SecretSync.GCPSecretManager }
>();
const connectionId = useWatch({ name: "connection.id", control });
const projectId = useWatch({ name: "destinationConfig.projectId", control });
const selectedScope = useWatch({ name: "destinationConfig.scope", control });
const { data: projects, isPending } = useGcpConnectionListProjects(connectionId, {
enabled: Boolean(connectionId)
});
const { data: locations, isPending: areLocationsPending } = useGcpConnectionListProjectLocations(
{ connectionId, projectId },
{
enabled: Boolean(connectionId) && Boolean(projectId)
}
);
useEffect(() => {
setValue("destinationConfig.scope", GcpSyncScope.Global);
if (!selectedScope) setValue("destinationConfig.scope", GcpSyncScope.Global);
}, []);
return (
@@ -33,6 +62,7 @@ export const GcpSyncFields = () => {
<SecretSyncConnectionField
onChange={() => {
setValue("destinationConfig.projectId", "");
setValue("destinationConfig.locationId", "");
}}
/>
<Controller
@@ -60,9 +90,10 @@ export const GcpSyncFields = () => {
isLoading={isPending && Boolean(connectionId)}
isDisabled={!connectionId}
value={projects?.find((project) => project.id === value) ?? null}
onChange={(option) =>
onChange((option as SingleValue<TGitHubConnectionEnvironment>)?.id ?? null)
}
onChange={(option) => {
setValue("destinationConfig.locationId", "");
onChange((option as SingleValue<TGcpProject>)?.id ?? null);
}}
options={projects}
placeholder="Select a GCP project..."
getOptionLabel={(option) => option.name}
@@ -71,6 +102,76 @@ export const GcpSyncFields = () => {
</FormControl>
)}
/>
<Controller
name="destinationConfig.scope"
control={control}
render={({ field: { value, onChange }, fieldState: { error } }) => (
<FormControl
isError={Boolean(error?.message)}
tooltipText={
<div className="flex flex-col gap-3">
<p>
Specify how Infisical should sync secrets to GCP. The following options are
available:
</p>
<ul className="flex list-disc flex-col gap-3 pl-4">
{Object.values(GCP_SYNC_SCOPES).map(({ name, description }) => {
return (
<li key={name}>
<p className="text-mineshaft-300">
<span className="font-medium text-bunker-200">{name}</span>: {description}
</p>
</li>
);
})}
</ul>
</div>
}
tooltipClassName="max-w-lg"
label="Scope"
>
<Select
value={value}
onValueChange={(val) => onChange(val)}
className="w-full border border-mineshaft-500 capitalize"
position="popper"
dropdownContainerClassName="max-w-none"
isDisabled={!projectId}
>
{Object.values(GcpSyncScope).map((scope) => {
return (
<SelectItem className="capitalize" value={scope} key={scope}>
{scope}
</SelectItem>
);
})}
</Select>
</FormControl>
)}
/>
{selectedScope === GcpSyncScope.Region && (
<Controller
name="destinationConfig.locationId"
control={control}
render={({ field: { value, onChange }, fieldState: { error } }) => (
<FormControl isError={Boolean(error)} errorText={error?.message} label="Region">
<FilterableSelect
menuPlacement="top"
isLoading={areLocationsPending && Boolean(projectId)}
isDisabled={!projectId}
value={locations?.find((option) => option.locationId === value) ?? null}
onChange={(option) =>
onChange((option as SingleValue<TGcpLocation>)?.locationId ?? null)
}
options={locations}
placeholder="Select a region..."
getOptionValue={(option) => option.locationId}
formatOptionLabel={formatOptionLabel}
/>
</FormControl>
)}
/>
)}
</>
);
};

View File

@@ -3,12 +3,23 @@ import { useFormContext } from "react-hook-form";
import { GenericFieldLabel } from "@app/components/secret-syncs";
import { TSecretSyncForm } from "@app/components/secret-syncs/forms/schemas";
import { SecretSync } from "@app/hooks/api/secretSyncs";
import { GcpSyncScope } from "@app/hooks/api/secretSyncs/types/gcp-sync";
export const GcpSyncReviewFields = () => {
const { watch } = useFormContext<
TSecretSyncForm & { destination: SecretSync.GCPSecretManager }
>();
const projectId = watch("destinationConfig.projectId");
const destinationConfig = watch("destinationConfig");
return <GenericFieldLabel label="Project ID">{projectId}</GenericFieldLabel>;
return (
<>
<GenericFieldLabel label="Project ID">{destinationConfig.projectId}</GenericFieldLabel>
<GenericFieldLabel label="Scope" className="capitalize">
{destinationConfig.scope}
</GenericFieldLabel>
{destinationConfig.scope === GcpSyncScope.Region && (
<GenericFieldLabel label="Region">{destinationConfig.locationId}</GenericFieldLabel>
)}
</>
);
};

View File

@@ -7,9 +7,16 @@ import { GcpSyncScope } from "@app/hooks/api/secretSyncs/types/gcp-sync";
export const GcpSyncDestinationSchema = BaseSecretSyncSchema().merge(
z.object({
destination: z.literal(SecretSync.GCPSecretManager),
destinationConfig: z.object({
scope: z.literal(GcpSyncScope.Global),
projectId: z.string().min(1, "Project ID required")
})
destinationConfig: z.discriminatedUnion("scope", [
z.object({
scope: z.literal(GcpSyncScope.Global),
projectId: z.string().min(1, "Project ID required")
}),
z.object({
scope: z.literal(GcpSyncScope.Region),
projectId: z.string().min(1, "Project ID required"),
locationId: z.string().min(1, "Region required")
})
])
})
);

View File

@@ -4,6 +4,7 @@ import {
SecretSyncImportBehavior,
SecretSyncInitialSyncBehavior
} from "@app/hooks/api/secretSyncs";
import { GcpSyncScope } from "@app/hooks/api/secretSyncs/types/gcp-sync";
import { HumanitecSyncScope } from "@app/hooks/api/secretSyncs/types/humanitec-sync";
export const SECRET_SYNC_MAP: Record<SecretSync, { name: string; image: string }> = {
@@ -124,3 +125,14 @@ export const HUMANITEC_SYNC_SCOPES: Record<
"Infisical will sync secrets as environment level shared values to the specified Humanitec application environment."
}
};
export const GCP_SYNC_SCOPES: Record<GcpSyncScope, { name: string; description: string }> = {
[GcpSyncScope.Global]: {
name: "Global",
description: "Secrets will be synced globally; being available in all project regions."
},
[GcpSyncScope.Region]: {
name: "Region",
description: "Secrets will be synced to the specified region."
}
};

View File

@@ -3,12 +3,14 @@ import { useQuery, UseQueryOptions } from "@tanstack/react-query";
import { apiRequest } from "@app/config/request";
import { appConnectionKeys } from "../queries";
import { TGcpProject } from "./types";
import { TGcpLocation, TGcpProject, TListProjectLocations } from "./types";
const gcpConnectionKeys = {
all: [...appConnectionKeys.all, "gcp"] as const,
listProjects: (connectionId: string) =>
[...gcpConnectionKeys.all, "projects", connectionId] as const
[...gcpConnectionKeys.all, "projects", connectionId] as const,
listProjectLocations: ({ projectId, connectionId }: TListProjectLocations) =>
[...gcpConnectionKeys.all, "project-locations", connectionId, projectId] as const
};
export const useGcpConnectionListProjects = (
@@ -35,3 +37,29 @@ export const useGcpConnectionListProjects = (
...options
});
};
export const useGcpConnectionListProjectLocations = (
{ connectionId, projectId }: TListProjectLocations,
options?: Omit<
UseQueryOptions<
TGcpLocation[],
unknown,
TGcpLocation[],
ReturnType<typeof gcpConnectionKeys.listProjectLocations>
>,
"queryKey" | "queryFn"
>
) => {
return useQuery({
queryKey: gcpConnectionKeys.listProjectLocations({ connectionId, projectId }),
queryFn: async () => {
const { data } = await apiRequest.get<TGcpLocation[]>(
`/api/v1/app-connections/gcp/${connectionId}/secret-manager-project-locations`,
{ params: { projectId } }
);
return data;
},
...options
});
};

View File

@@ -2,3 +2,13 @@ export type TGcpProject = {
id: string;
name: string;
};
export type TListProjectLocations = {
connectionId: string;
projectId: string;
};
export type TGcpLocation = {
displayName: string;
locationId: string;
};

View File

@@ -37,11 +37,6 @@ export enum DynamicSecretProviders {
Vertica = "vertica"
}
export enum KubernetesDynamicSecretCredentialType {
Static = "static",
Dynamic = "dynamic"
}
export enum SqlProviders {
Postgres = "postgres",
MySql = "mysql2",
@@ -49,6 +44,11 @@ export enum SqlProviders {
MsSQL = "mssql"
}
export enum DynamicSecretAwsIamAuth {
AssumeRole = "assume-role",
AccessKey = "access-key"
}
export type TDynamicSecretProvider =
| {
type: DynamicSecretProviders.SqlDatabase;
@@ -83,15 +83,26 @@ export type TDynamicSecretProvider =
}
| {
type: DynamicSecretProviders.AwsIam;
inputs: {
accessKey: string;
secretAccessKey: string;
region: string;
awsPath?: string;
policyDocument?: string;
userGroups?: string;
policyArns?: string;
};
inputs:
| {
method: DynamicSecretAwsIamAuth.AccessKey;
accessKey: string;
secretAccessKey: string;
region: string;
awsPath?: string;
policyDocument?: string;
userGroups?: string;
policyArns?: string;
}
| {
method: DynamicSecretAwsIamAuth.AssumeRole;
roleArn: string;
region: string;
awsPath?: string;
policyDocument?: string;
userGroups?: string;
policyArns?: string;
};
}
| {
type: DynamicSecretProviders.Redis;
@@ -272,32 +283,17 @@ export type TDynamicSecretProvider =
}
| {
type: DynamicSecretProviders.Kubernetes;
inputs:
| {
url: string;
clusterToken?: string;
ca?: string;
serviceAccountName: string;
credentialType: KubernetesDynamicSecretCredentialType.Static;
namespace: string;
gatewayId?: string;
sslEnabled: boolean;
audiences: string[];
authMethod: string;
}
| {
url: string;
clusterToken?: string;
ca?: string;
credentialType: KubernetesDynamicSecretCredentialType.Dynamic;
namespace: string;
gatewayId?: string;
sslEnabled: boolean;
audiences: string[];
roleType: string;
role: string;
authMethod: string;
};
inputs: {
url: string;
clusterToken: string;
ca?: string;
serviceAccountName: string;
credentialType: "dynamic" | "static";
namespace: string;
gatewayId?: string;
sslEnabled: boolean;
audiences: string[];
};
}
| {
type: DynamicSecretProviders.Vertica;

View File

@@ -21,7 +21,27 @@ export const groupKeys = {
limit: number;
search: string;
filter?: EFilterReturnedUsers;
}) => [...groupKeys.forGroupUserMemberships(slug), { offset, limit, search, filter }] as const
}) => [...groupKeys.forGroupUserMemberships(slug), { offset, limit, search, filter }] as const,
specificProjectGroupUserMemberships: ({
projectId,
slug,
offset,
limit,
search,
filter
}: {
slug: string;
projectId: string;
offset: number;
limit: number;
search: string;
filter?: EFilterReturnedUsers;
}) =>
[
...groupKeys.forGroupUserMemberships(slug),
projectId,
{ offset, limit, search, filter }
] as const
};
export const useGetGroupById = (groupId: string) => {
@@ -80,3 +100,51 @@ export const useListGroupUsers = ({
}
});
};
export const useListProjectGroupUsers = ({
id,
projectId,
groupSlug,
offset = 0,
limit = 10,
search,
filter
}: {
id: string;
groupSlug: string;
projectId: string;
offset: number;
limit: number;
search: string;
filter?: EFilterReturnedUsers;
}) => {
return useQuery({
queryKey: groupKeys.specificProjectGroupUserMemberships({
slug: groupSlug,
projectId,
offset,
limit,
search,
filter
}),
enabled: Boolean(groupSlug),
placeholderData: (previousData) => previousData,
queryFn: async () => {
const params = new URLSearchParams({
offset: String(offset),
limit: String(limit),
search,
...(filter && { filter })
});
const { data } = await apiRequest.get<{ users: TGroupUser[]; totalCount: number }>(
`/api/v2/workspace/${projectId}/groups/${id}/users`,
{
params
}
);
return data;
}
});
};

View File

@@ -3,15 +3,22 @@ import { SecretSync } from "@app/hooks/api/secretSyncs";
import { TRootSecretSync } from "@app/hooks/api/secretSyncs/types/root-sync";
export enum GcpSyncScope {
Global = "global"
Global = "global",
Region = "region"
}
export type TGcpSync = TRootSecretSync & {
destination: SecretSync.GCPSecretManager;
destinationConfig: {
scope: GcpSyncScope.Global;
projectId: string;
};
destinationConfig:
| {
scope: GcpSyncScope.Global;
projectId: string;
}
| {
scope: GcpSyncScope.Region;
projectId: string;
locationId: string;
};
connection: {
app: AppConnection.GCP;
name: string;

View File

@@ -50,10 +50,13 @@ export const useUpdateGroupWorkspaceRole = () => {
return groupMembership;
},
onSuccess: (_, { projectId }) => {
onSuccess: (_, { projectId, groupId }) => {
queryClient.invalidateQueries({
queryKey: workspaceKeys.getWorkspaceGroupMemberships(projectId)
});
queryClient.invalidateQueries({
queryKey: workspaceKeys.getWorkspaceGroupMembershipDetails(projectId, groupId)
});
}
});
};

View File

@@ -691,6 +691,21 @@ export const useGetWorkspaceIdentityMembershipDetails = (projectId: string, iden
});
};
export const useGetWorkspaceGroupMembershipDetails = (projectId: string, groupId: string) => {
return useQuery({
enabled: Boolean(projectId && groupId),
queryKey: workspaceKeys.getWorkspaceGroupMembershipDetails(projectId, groupId),
queryFn: async () => {
const {
data: { groupMembership }
} = await apiRequest.get<{ groupMembership: TGroupMembership }>(
`/api/v2/workspace/${projectId}/groups/${groupId}`
);
return groupMembership;
}
});
};
export const useListWorkspaceGroups = (projectId: string) => {
return useQuery({
queryKey: workspaceKeys.getWorkspaceGroupMemberships(projectId),

View File

@@ -36,6 +36,8 @@ export const workspaceKeys = {
searchWorkspace: (dto: TSearchProjectsDTO) => ["search-projects", dto] as const,
getWorkspaceGroupMemberships: (workspaceId: string) =>
[{ workspaceId }, "workspace-groups"] as const,
getWorkspaceGroupMembershipDetails: (workspaceId: string, groupId: string) =>
[{ workspaceId, groupId }, "workspace-group-membership-details"] as const,
getWorkspaceCas: ({ projectSlug }: { projectSlug: string }) =>
[{ projectSlug }, "workspace-cas"] as const,
specificWorkspaceCas: ({ projectSlug, status }: { projectSlug: string; status?: CaStatus }) =>

View File

@@ -3,7 +3,7 @@ import { Helmet } from "react-helmet";
import { useTranslation } from "react-i18next";
import { faArrowRight } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { Link, useNavigate } from "@tanstack/react-router";
import { Link, useNavigate, useRouter } from "@tanstack/react-router";
import axios from "axios";
import { addSeconds, formatISO } from "date-fns";
import { jwtDecode } from "jwt-decode";
@@ -51,6 +51,7 @@ export const SelectOrganizationSection = () => {
const [mfaSuccessCallback, setMfaSuccessCallback] = useState<() => void>(() => {});
const router = useRouter();
const queryParams = new URLSearchParams(window.location.search);
const orgId = queryParams.get("org_id");
const callbackPort = queryParams.get("callback_port");
@@ -118,6 +119,8 @@ export const SelectOrganizationSection = () => {
})
.finally(() => setIsInitialOrgCheckLoading(false));
await router.invalidate();
if (isMfaEnabled) {
SecurityClient.setMfaToken(token);
if (mfaMethod) {

View File

@@ -7,7 +7,8 @@ import { SelectOrganizationPage } from "./SelectOrgPage";
export const SelectOrganizationPageQueryParams = z.object({
org_id: z.string().optional().catch(""),
callback_port: z.coerce.number().optional().catch(undefined),
is_admin_login: z.boolean().optional().catch(false)
is_admin_login: z.boolean().optional().catch(false),
force: z.boolean().optional()
});
export const Route = createFileRoute("/_restrict-login-signup/login/select-organization")({

View File

@@ -28,8 +28,7 @@ import {
import { MfaMethod } from "@app/hooks/api/auth/types";
import { fetchOrganizations } from "@app/hooks/api/organization/queries";
import { ProjectType } from "@app/hooks/api/workspace/types";
import { navigateUserToOrg } from "../LoginPage/Login.utils";
import { isLoggedIn } from "@app/hooks/api/reactQuery";
// eslint-disable-next-line new-cap
const client = new jsrp.client();
@@ -71,6 +70,8 @@ export const SignupInvitePage = () => {
const { mutateAsync: selectOrganization } = useSelectOrganization();
const loggedIn = isLoggedIn();
// Verifies if the information that the users entered (name, workspace) is there, and if the password matched the criteria.
const signupErrorCheck = async () => {
setIsLoading(true);
@@ -242,29 +243,10 @@ export const SignupInvitePage = () => {
if (response?.token) {
SecurityClient.setSignupToken(response.token);
setStep(2);
} else if (loggedIn) {
navigate({ to: "/login/select-organization", search: { force: true } });
} else {
const redirectExistingUser = async () => {
try {
const { token: mfaToken, isMfaEnabled } = await selectOrganization({
organizationId
});
if (isMfaEnabled) {
SecurityClient.setMfaToken(mfaToken);
toggleShowMfa.on();
setMfaSuccessCallback(() => redirectExistingUser);
return;
}
// user will be redirected to dashboard
// if not logged in gets kicked out to login
await navigateUserToOrg(navigate, organizationId);
} catch (err) {
navigate({ to: "/login" });
}
};
await redirectExistingUser();
navigate({ to: "/login" });
}
}
} catch (err) {

View File

@@ -15,7 +15,8 @@ import { setAuthToken } from "@app/hooks/api/reactQuery";
import { ProjectType } from "@app/hooks/api/workspace/types";
const QueryParamsSchema = z.object({
callback_port: z.coerce.number().optional().catch(undefined)
callback_port: z.coerce.number().optional().catch(undefined),
force: z.boolean().optional()
});
export const AuthConsentWrapper = () => {
@@ -71,7 +72,7 @@ export const AuthConsentWrapper = () => {
export const Route = createFileRoute("/_restrict-login-signup")({
validateSearch: zodValidator(QueryParamsSchema),
search: {
middlewares: [stripSearchParams({ callback_port: undefined })]
middlewares: [stripSearchParams({ callback_port: undefined, force: undefined })]
},
beforeLoad: async ({ context, location, search }) => {
if (!context.serverConfig.initialized) {
@@ -90,6 +91,12 @@ export const Route = createFileRoute("/_restrict-login-signup")({
if (!data) return;
setAuthToken(data.token);
if (location.pathname === "/signupinvite") return;
// Avoid redirect if on select-organization page with force=true
if (location.pathname.endsWith("select-organization") && search?.force === true) return;
// to do cli login
if (search?.callback_port) {
if (location.pathname.endsWith("select-organization") || location.pathname.endsWith("login"))

View File

@@ -152,7 +152,7 @@ export const GroupMembersTable = ({ groupId, groupSlug, handlePopUpOpen }: Props
</Th>
<Th>Email</Th>
<Th>Added On</Th>
<Th />
<Th className="w-5" />
</Tr>
</THead>
<TBody>

View File

@@ -1,8 +1,17 @@
import { faUserMinus } from "@fortawesome/free-solid-svg-icons";
import { faEllipsisV, faUserMinus } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { OrgPermissionCan } from "@app/components/permissions";
import { IconButton, Td, Tooltip, Tr } from "@app/components/v2";
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
IconButton,
Td,
Tooltip,
Tr
} from "@app/components/v2";
import { OrgPermissionGroupActions, OrgPermissionSubjects, useOrganization } from "@app/context";
import { useOidcManageGroupMembershipsEnabled } from "@app/hooks/api";
import { TGroupUser } from "@app/hooks/api/groups/types";
@@ -38,30 +47,47 @@ export const GroupMembershipRow = ({
<p>{new Date(joinedGroupAt).toLocaleDateString()}</p>
</Tooltip>
</Td>
<Td className="justify-end">
<OrgPermissionCan I={OrgPermissionGroupActions.Edit} a={OrgPermissionSubjects.Groups}>
{(isAllowed) => {
return (
<Tooltip
content={
isOidcManageGroupMembershipsEnabled
? "OIDC Group Membership Mapping Enabled. Remove user from this group in your OIDC provider."
: "Remove user from group"
}
<Td>
<Tooltip className="max-w-sm text-center" content="Options">
<DropdownMenu>
<DropdownMenuTrigger asChild>
<IconButton
ariaLabel="Options"
colorSchema="secondary"
className="w-6"
variant="plain"
>
<IconButton
isDisabled={!isAllowed || isOidcManageGroupMembershipsEnabled}
ariaLabel="Remove user from group"
onClick={() => handlePopUpOpen("removeMemberFromGroup", { username })}
variant="plain"
colorSchema="danger"
>
<FontAwesomeIcon icon={faUserMinus} />
</IconButton>
</Tooltip>
);
}}
</OrgPermissionCan>
<FontAwesomeIcon icon={faEllipsisV} />
</IconButton>
</DropdownMenuTrigger>
<DropdownMenuContent sideOffset={2} align="end">
<OrgPermissionCan I={OrgPermissionGroupActions.Edit} a={OrgPermissionSubjects.Groups}>
{(isAllowed) => {
return (
<Tooltip
content={
isOidcManageGroupMembershipsEnabled
? "OIDC Group Membership Mapping Enabled. Remove user from this group in your OIDC provider."
: undefined
}
position="left"
>
<div>
<DropdownMenuItem
icon={<FontAwesomeIcon icon={faUserMinus} />}
onClick={() => handlePopUpOpen("removeMemberFromGroup", { username })}
isDisabled={!isAllowed || isOidcManageGroupMembershipsEnabled}
>
Remove User From Group
</DropdownMenuItem>
</div>
</Tooltip>
);
}}
</OrgPermissionCan>
</DropdownMenuContent>
</DropdownMenu>
</Tooltip>
</Td>
</Tr>
);

View File

@@ -3,6 +3,7 @@ import { Controller, useForm } from "react-hook-form";
import { faCheck, faClock, faEdit, faSearch } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { zodResolver } from "@hookform/resolvers/zod";
import { PopperContentProps } from "@radix-ui/react-popper";
import { twMerge } from "tailwind-merge";
import { z } from "zod";
@@ -28,6 +29,7 @@ import { formatProjectRoleName } from "@app/helpers/roles";
import { usePopUp } from "@app/hooks";
import { useGetProjectRoles, useUpdateGroupWorkspaceRole } from "@app/hooks/api";
import { TGroupMembership } from "@app/hooks/api/groups/types";
import { TProjectRole } from "@app/hooks/api/roles/types";
import { ProjectUserMembershipTemporaryMode } from "@app/hooks/api/workspace/types";
import { groupBy } from "@app/lib/fn/array";
@@ -196,33 +198,38 @@ type TForm = z.infer<typeof formSchema>;
export type TMemberRolesProp = {
disableEdit?: boolean;
groupId: string;
className?: string;
roles: TGroupMembership["roles"];
popperContentProps?: PopperContentProps;
};
const MAX_ROLES_TO_BE_SHOWN_IN_TABLE = 2;
export const GroupRoles = ({ roles = [], disableEdit = false, groupId }: TMemberRolesProp) => {
type FormProps = {
projectRoles: Omit<TProjectRole, "permissions">[] | undefined;
roles: TGroupMembership["roles"];
groupId: string;
onClose: VoidFunction;
};
const GroupRolesForm = ({ projectRoles, roles, groupId, onClose }: FormProps) => {
const { currentWorkspace } = useWorkspace();
const { popUp, handlePopUpToggle } = usePopUp(["editRole"] as const);
const [searchRoles, setSearchRoles] = useState("");
const userRolesGroupBySlug = groupBy(roles, ({ customRoleSlug, role }) => customRoleSlug || role);
const updateGroupWorkspaceRole = useUpdateGroupWorkspaceRole();
const {
handleSubmit,
control,
reset,
setValue,
formState: { isSubmitting, isDirty }
} = useForm<TForm>({
resolver: zodResolver(formSchema)
});
const { data: projectRoles, isPending: isRolesLoading } = useGetProjectRoles(
currentWorkspace?.id ?? ""
);
const userRolesGroupBySlug = groupBy(roles, ({ customRoleSlug, role }) => customRoleSlug || role);
const updateGroupWorkspaceRole = useUpdateGroupWorkspaceRole();
const handleRoleUpdate = async (data: TForm) => {
const selectedRoles = Object.keys(data)
.filter((el) => Boolean(data[el].isChecked))
@@ -253,7 +260,7 @@ export const GroupRoles = ({ roles = [], disableEdit = false, groupId }: TMember
roles: selectedRoles
});
createNotification({ text: "Successfully updated group role", type: "success" });
handlePopUpToggle("editRole");
onClose();
setSearchRoles("");
} catch {
createNotification({ text: "Failed to update group role", type: "error" });
@@ -261,7 +268,120 @@ export const GroupRoles = ({ roles = [], disableEdit = false, groupId }: TMember
};
return (
<div className="flex items-center space-x-2">
<form onSubmit={handleSubmit(handleRoleUpdate)} id="role-update-form">
<div className="thin-scrollbar max-h-80 space-y-4 overflow-y-auto">
{projectRoles
?.filter(
({ name, slug }) =>
name.toLowerCase().includes(searchRoles.toLowerCase()) ||
slug.toLowerCase().includes(searchRoles.toLowerCase())
)
?.map(({ id, name, slug }) => {
const userProjectRoleDetails = userRolesGroupBySlug?.[slug]?.[0];
return (
<div key={id} className="flex items-center space-x-4">
<div className="flex-grow">
<Controller
control={control}
defaultValue={Boolean(userProjectRoleDetails?.id)}
name={`${slug}.isChecked`}
render={({ field }) => (
<Checkbox
id={slug}
isChecked={field.value}
onCheckedChange={(isChecked) => {
field.onChange(isChecked);
setValue(`${slug}.temporaryAccess`, false);
}}
>
{name}
</Checkbox>
)}
/>
</div>
<div>
<Controller
control={control}
name={`${slug}.temporaryAccess`}
defaultValue={
userProjectRoleDetails?.isTemporary
? {
isTemporary: true,
temporaryAccessStartTime:
userProjectRoleDetails.temporaryAccessStartTime as string,
temporaryRange: userProjectRoleDetails.temporaryRange as string,
temporaryAccessEndTime: userProjectRoleDetails.temporaryAccessEndTime
}
: false
}
render={({ field }) => (
<IdentityTemporaryRoleForm
temporaryConfig={
typeof field.value === "boolean"
? { isTemporary: field.value }
: field.value
}
onSetTemporary={(data) => {
setValue(`${slug}.isChecked`, true, { shouldDirty: true });
field.onChange({ isTemporary: true, ...data });
}}
onRemoveTemporary={() => {
setValue(`${slug}.isChecked`, false, { shouldDirty: true });
field.onChange(false);
}}
/>
)}
/>
</div>
</div>
);
})}
</div>
<div className="mt-3 flex items-center space-x-2 border-t border-t-gray-700 pt-3">
<div>
<Input
className="w-full p-1.5 pl-8"
size="xs"
value={searchRoles}
onChange={(el) => setSearchRoles(el.target.value)}
leftIcon={<FontAwesomeIcon icon={faSearch} />}
placeholder="Search roles.."
/>
</div>
<div>
<Button
size="xs"
type="submit"
form="role-update-form"
leftIcon={<FontAwesomeIcon icon={faCheck} />}
isDisabled={!isDirty || isSubmitting}
isLoading={isSubmitting}
>
Save
</Button>
</div>
</div>
</form>
);
};
export const GroupRoles = ({
roles = [],
disableEdit = false,
groupId,
className,
popperContentProps
}: TMemberRolesProp) => {
const { currentWorkspace } = useWorkspace();
const { popUp, handlePopUpToggle } = usePopUp(["editRole"] as const);
const { data: projectRoles, isPending: isRolesLoading } = useGetProjectRoles(
currentWorkspace?.id ?? ""
);
return (
<div className={twMerge("flex items-center space-x-1", className)}>
{roles
.slice(0, MAX_ROLES_TO_BE_SHOWN_IN_TABLE)
.map(({ role, customRoleName, id, isTemporary, temporaryAccessEndTime }) => {
@@ -325,119 +445,32 @@ export const GroupRoles = ({ roles = [], disableEdit = false, groupId }: TMember
open={popUp.editRole.isOpen}
onOpenChange={(isOpen) => {
handlePopUpToggle("editRole", isOpen);
reset();
}}
>
{!disableEdit && (
<PopoverTrigger>
<PopoverTrigger onClick={(e) => e.stopPropagation()}>
<IconButton size="sm" variant="plain" ariaLabel="update">
<FontAwesomeIcon icon={faEdit} />
</IconButton>
</PopoverTrigger>
)}
<PopoverContent hideCloseBtn className="pt-4">
<PopoverContent
{...popperContentProps}
onClick={(e) => e.stopPropagation()}
hideCloseBtn
className="pt-4"
>
{isRolesLoading ? (
<div className="flex h-8 w-full items-center justify-center">
<Spinner />
</div>
) : (
<form onSubmit={handleSubmit(handleRoleUpdate)} id="role-update-form">
<div className="thin-scrollbar max-h-80 space-y-4 overflow-y-auto">
{projectRoles
?.filter(
({ name, slug }) =>
name.toLowerCase().includes(searchRoles.toLowerCase()) ||
slug.toLowerCase().includes(searchRoles.toLowerCase())
)
?.map(({ id, name, slug }) => {
const userProjectRoleDetails = userRolesGroupBySlug?.[slug]?.[0];
return (
<div key={id} className="flex items-center space-x-4">
<div className="flex-grow">
<Controller
control={control}
defaultValue={Boolean(userProjectRoleDetails?.id)}
name={`${slug}.isChecked`}
render={({ field }) => (
<Checkbox
id={slug}
isChecked={field.value}
onCheckedChange={(isChecked) => {
field.onChange(isChecked);
setValue(`${slug}.temporaryAccess`, false);
}}
>
{name}
</Checkbox>
)}
/>
</div>
<div>
<Controller
control={control}
name={`${slug}.temporaryAccess`}
defaultValue={
userProjectRoleDetails?.isTemporary
? {
isTemporary: true,
temporaryAccessStartTime:
userProjectRoleDetails.temporaryAccessStartTime as string,
temporaryRange:
userProjectRoleDetails.temporaryRange as string,
temporaryAccessEndTime:
userProjectRoleDetails.temporaryAccessEndTime
}
: false
}
render={({ field }) => (
<IdentityTemporaryRoleForm
temporaryConfig={
typeof field.value === "boolean"
? { isTemporary: field.value }
: field.value
}
onSetTemporary={(data) => {
setValue(`${slug}.isChecked`, true, { shouldDirty: true });
field.onChange({ isTemporary: true, ...data });
}}
onRemoveTemporary={() => {
setValue(`${slug}.isChecked`, false, { shouldDirty: true });
field.onChange(false);
}}
/>
)}
/>
</div>
</div>
);
})}
</div>
<div className="mt-3 flex items-center space-x-2 border-t border-t-gray-700 pt-3">
<div>
<Input
className="w-full p-1.5 pl-8"
size="xs"
value={searchRoles}
onChange={(el) => setSearchRoles(el.target.value)}
leftIcon={<FontAwesomeIcon icon={faSearch} />}
placeholder="Search roles.."
/>
</div>
<div>
<Button
size="xs"
type="submit"
form="role-update-form"
leftIcon={<FontAwesomeIcon icon={faCheck} />}
isDisabled={!isDirty || isSubmitting}
isLoading={isSubmitting}
>
Save
</Button>
</div>
</div>
</form>
<GroupRolesForm
projectRoles={projectRoles}
groupId={groupId}
roles={roles}
onClose={() => handlePopUpToggle("editRole")}
/>
)}
</PopoverContent>
</Popover>

Some files were not shown because too many files have changed in this diff Show More