Compare commits

...

53 Commits

Author SHA1 Message Date
Daniel Hougaard
c782493704 docs(ansible): fixed inconsistencies 2025-08-24 07:37:49 +04:00
Sheen
5c632db282 Merge pull request #4399 from Infisical/audit-log-transaction-fix
fix(audit-logs): move prune audit log transaction inside while loop
2025-08-23 12:17:14 +08:00
Sid
461deef0d5 feat: support render environment groups (#4327)
* feat: support env groups in render sync

* fix: update doc

* Update backend/src/services/app-connection/render/render-connection-service.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* fix: pr changes

* fix: lint and type check

* fix: changes

* fix: remove secrets

* fix: MAX iterations in render sync

* fix: render sync review fields

* fix: pr changes

* fix: lint

* fix: changes

---------

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-19 16:11:51 +05:30
Scott Wilson
7748e03612 Merge pull request #4378 from Infisical/animation-for-commit-popover
improvement(frontend): make commit popover animated
2025-08-19 18:11:13 +08:00
github-actions[bot]
2389c64e69 Update Helm chart to version v0.10.2 (#4400)
Co-authored-by: sidwebworks <sidwebworks@users.noreply.github.com>
2025-08-19 14:58:28 +05:30
Scott Wilson
de5ad47f77 fix: move prune audit log transaction inside while loop 2025-08-19 16:16:26 +08:00
Daniel Hougaard
e0161cd06f Merge pull request #4379 from Infisical/daniel/google-sso-enforcement
feat(sso): enforce google SSO on org-level
2025-08-19 15:30:02 +08:00
Akhil Mohan
7c12fa3a4c Merge pull request #4397 from Infisical/fix/crd-issue
feat: resolved instant update in required
2025-08-19 12:28:22 +05:30
=
0af53e82da feat: nity fix 2025-08-19 12:24:03 +05:30
=
f0c080187e feat: resolved instant update in required 2025-08-19 12:14:32 +05:30
Sheen
47118bcf19 Merge pull request #4396 from Infisical/misc/optimize-partition-script
misc: optimize partition script
2025-08-19 14:41:59 +08:00
Akhil Mohan
bb1975491f Merge pull request #4321 from Infisical/sid/k8s-operator
feat: support `InstantUpdates` in k8s operator
2025-08-19 12:02:59 +05:30
Sheen Capadngan
28cc919ff7 misc: optimize partition script 2025-08-19 14:27:06 +08:00
Scott Wilson
5c21ac3182 Merge pull request #4392 from Infisical/fix-audit-log-prune-infinite-loop
fix(audit-logs): clear deleted audit logs on error to prevent infinite looping of audit log prune
2025-08-18 22:13:01 +08:00
sidwebworks
6204b181e7 fix: log message 2025-08-18 14:03:31 +05:30
Sheen
3cceec86c8 Merge pull request #4391 from Infisical/doc/monitoring-telemetry
doc: monitoring telemetry
2025-08-18 14:25:57 +08:00
Sheen Capadngan
ff043f990f doc: monitoring telemetry 2025-08-18 14:20:45 +08:00
Daniel Hougaard
9e177c1e45 Merge pull request #4389 from Infisical/daniel/check-out-no-org-check
fix(cli): failing tests
2025-08-18 10:41:20 +08:00
Daniel Hougaard
5aeb823c9e Update auth-router.ts 2025-08-18 09:53:08 +08:00
Daniel Hougaard
d587e779f5 requested changes 2025-08-16 00:26:06 +04:00
sidwebworks
f9a9565630 fix: add default roles 2025-08-16 01:26:29 +05:30
sidwebworks
05ba0abadd fix: PR changes 2025-08-16 00:04:18 +05:30
sidwebworks
fff9a96204 fix: revert config 2025-08-15 19:51:29 +05:30
sidwebworks
f78556c85f fix: context 2025-08-14 21:50:03 +05:30
sidwebworks
13aa380cac fix: PR changes 2025-08-14 21:43:49 +05:30
Sid
f2a9a57c95 Update k8-operator/config/samples/crd/infisicalsecret/infisicalSecretCrd.yaml
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-14 17:07:09 +05:30
Sid
6384fa6dba Update k8-operator/config/samples/universalAuthIdentitySecret.yaml
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-14 17:07:01 +05:30
sidwebworks
c34ec8de09 fix: operator changes 2025-08-14 17:05:10 +05:30
sidwebworks
ef8a7f1233 Merge branch 'main' of github.com:Infisical/infisical into sid/k8s-operator 2025-08-14 15:48:20 +05:30
Daniel Hougaard
09db98db50 fix: typescript complaining 2025-08-14 06:58:45 +04:00
Daniel Hougaard
a37f1eb1f8 requested changes & frontend lint 2025-08-14 06:53:57 +04:00
Daniel Hougaard
2113abcfdc Update license-fns.ts 2025-08-14 06:15:25 +04:00
Daniel Hougaard
ea2707651c feat(sso): enforce google SSO on org-level 2025-08-14 06:13:24 +04:00
Scott Wilson
b986ff9a21 improvement: adjust key 2025-08-13 17:51:14 -07:00
Scott Wilson
106833328b improvement: make commit popover animated 2025-08-13 17:48:44 -07:00
sidwebworks
97dac1da94 fix: v4 changes 2025-08-12 18:58:35 +05:30
sidwebworks
f9f989c8af Merge branch 'main' of github.com:Infisical/infisical into sid/k8s-operator 2025-08-12 16:49:27 +05:30
sidwebworks
02ee418763 fix: revert yaml 2025-08-07 10:41:48 +05:30
sidwebworks
faca20c00c Merge branch 'main' of github.com:Infisical/infisical into sid/k8s-operator 2025-08-07 01:07:52 +05:30
sidwebworks
69c3687add fix: revert license fns 2025-08-07 01:05:47 +05:30
sidwebworks
1645534b54 fix: changes 2025-08-07 01:04:29 +05:30
sidwebworks
dca0b0c614 draft: k8s operator changes 2025-08-06 23:31:45 +05:30
sidwebworks
d3d0d44778 wip: sse working 2025-08-05 01:08:29 +05:30
sidwebworks
67abcbfe7a wip: k8s operator changes 2025-08-05 00:16:47 +05:30
sidwebworks
fc772e6b89 chore: remove recursive 2025-08-04 23:31:17 +05:30
sidwebworks
c8108ff49a feat: improve docs 2025-08-04 15:53:32 +05:30
sidwebworks
806165b9e9 fix: pr changes 2025-08-03 02:39:16 +05:30
sidwebworks
9fde0a5787 docs: content 2025-08-02 18:26:21 +05:30
Sid
9ee2581659 Update docs/docs.json 2025-08-01 17:19:12 +05:30
Sid
2deff0ef55 Update backend/src/lib/api-docs/constants.ts
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-01 17:18:15 +05:30
Sid
4312378589 Update backend/src/lib/api-docs/constants.ts 2025-08-01 17:17:03 +05:30
sidwebworks
d749a9621f fix: make the conditions optional in casl check 2025-08-01 17:14:51 +05:30
sidwebworks
9686d14e7f feat: events docs 2025-08-01 17:14:37 +05:30
72 changed files with 2277 additions and 527 deletions

View File

@@ -148,6 +148,7 @@ declare module "fastify" {
interface Session {
callbackPort: string;
isAdminLogin: boolean;
orgSlug?: string;
}
interface FastifyRequest {

View File

@@ -84,6 +84,9 @@ const up = async (knex: Knex): Promise<void> => {
t.index("expiresAt");
t.index("orgId");
t.index("projectId");
t.index("eventType");
t.index("userAgentType");
t.index("actor");
});
console.log("Adding GIN indices...");
@@ -119,8 +122,8 @@ const up = async (knex: Knex): Promise<void> => {
console.log("Creating audit log partitions ahead of time... next date:", nextDateStr);
await createAuditLogPartition(knex, nextDate, new Date(nextDate.getFullYear(), nextDate.getMonth() + 1));
// create partitions 4 years ahead
const partitionMonths = 4 * 12;
// create partitions 20 years ahead
const partitionMonths = 20 * 12;
const partitionPromises: Promise<void>[] = [];
for (let x = 1; x <= partitionMonths; x += 1) {
partitionPromises.push(

View File

@@ -0,0 +1,39 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const GOOGLE_SSO_AUTH_ENFORCED_COLUMN_NAME = "googleSsoAuthEnforced";
const GOOGLE_SSO_AUTH_LAST_USED_COLUMN_NAME = "googleSsoAuthLastUsed";
export async function up(knex: Knex): Promise<void> {
const hasGoogleSsoAuthEnforcedColumn = await knex.schema.hasColumn(
TableName.Organization,
GOOGLE_SSO_AUTH_ENFORCED_COLUMN_NAME
);
const hasGoogleSsoAuthLastUsedColumn = await knex.schema.hasColumn(
TableName.Organization,
GOOGLE_SSO_AUTH_LAST_USED_COLUMN_NAME
);
await knex.schema.alterTable(TableName.Organization, (table) => {
if (!hasGoogleSsoAuthEnforcedColumn)
table.boolean(GOOGLE_SSO_AUTH_ENFORCED_COLUMN_NAME).defaultTo(false).notNullable();
if (!hasGoogleSsoAuthLastUsedColumn) table.timestamp(GOOGLE_SSO_AUTH_LAST_USED_COLUMN_NAME).nullable();
});
}
export async function down(knex: Knex): Promise<void> {
const hasGoogleSsoAuthEnforcedColumn = await knex.schema.hasColumn(
TableName.Organization,
GOOGLE_SSO_AUTH_ENFORCED_COLUMN_NAME
);
const hasGoogleSsoAuthLastUsedColumn = await knex.schema.hasColumn(
TableName.Organization,
GOOGLE_SSO_AUTH_LAST_USED_COLUMN_NAME
);
await knex.schema.alterTable(TableName.Organization, (table) => {
if (hasGoogleSsoAuthEnforcedColumn) table.dropColumn(GOOGLE_SSO_AUTH_ENFORCED_COLUMN_NAME);
if (hasGoogleSsoAuthLastUsedColumn) table.dropColumn(GOOGLE_SSO_AUTH_LAST_USED_COLUMN_NAME);
});
}

View File

@@ -36,7 +36,9 @@ export const OrganizationsSchema = z.object({
scannerProductEnabled: z.boolean().default(true).nullable().optional(),
shareSecretsProductEnabled: z.boolean().default(true).nullable().optional(),
maxSharedSecretLifetime: z.number().default(2592000).nullable().optional(),
maxSharedSecretViewLimit: z.number().nullable().optional()
maxSharedSecretViewLimit: z.number().nullable().optional(),
googleSsoAuthEnforced: z.boolean().default(false),
googleSsoAuthLastUsed: z.date().nullable().optional()
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;

View File

@@ -14,7 +14,7 @@ import { ActorType } from "@app/services/auth/auth-type";
import { EventType, filterableSecretEvents } from "./audit-log-types";
export interface TAuditLogDALFactory extends Omit<TOrmify<TableName.AuditLog>, "find"> {
pruneAuditLog: (tx?: knex.Knex) => Promise<void>;
pruneAuditLog: () => Promise<void>;
find: (
arg: Omit<TFindQuery, "actor" | "eventType"> & {
actorId?: string | undefined;
@@ -41,6 +41,10 @@ type TFindQuery = {
offset?: number;
};
const QUERY_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
const MAX_RETRY_ON_FAILURE = 3;
export const auditLogDALFactory = (db: TDbClient) => {
const auditLogOrm = ormify(db, TableName.AuditLog);
@@ -151,20 +155,20 @@ export const auditLogDALFactory = (db: TDbClient) => {
};
// delete all audit log that have expired
const pruneAuditLog: TAuditLogDALFactory["pruneAuditLog"] = async (tx) => {
const runPrune = async (dbClient: knex.Knex) => {
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
const MAX_RETRY_ON_FAILURE = 3;
const pruneAuditLog: TAuditLogDALFactory["pruneAuditLog"] = async () => {
const today = new Date();
let deletedAuditLogIds: { id: string }[] = [];
let numberOfRetryOnFailure = 0;
let isRetrying = false;
const today = new Date();
let deletedAuditLogIds: { id: string }[] = [];
let numberOfRetryOnFailure = 0;
let isRetrying = false;
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
do {
try {
// eslint-disable-next-line no-await-in-loop
deletedAuditLogIds = await db.transaction(async (trx) => {
await trx.raw(`SET statement_timeout = ${QUERY_TIMEOUT_MS}`);
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
do {
try {
const findExpiredLogSubQuery = dbClient(TableName.AuditLog)
const findExpiredLogSubQuery = trx(TableName.AuditLog)
.where("expiresAt", "<", today)
.where("createdAt", "<", today) // to use audit log partition
.orderBy(`${TableName.AuditLog}.createdAt`, "desc")
@@ -172,35 +176,25 @@ export const auditLogDALFactory = (db: TDbClient) => {
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
deletedAuditLogIds = await dbClient(TableName.AuditLog)
.whereIn("id", findExpiredLogSubQuery)
.del()
.returning("id");
numberOfRetryOnFailure = 0; // reset
} catch (error) {
numberOfRetryOnFailure += 1;
deletedAuditLogIds = [];
logger.error(error, "Failed to delete audit log on pruning");
} finally {
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 10); // time to breathe for db
});
}
isRetrying = numberOfRetryOnFailure > 0;
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
};
const results = await trx(TableName.AuditLog).whereIn("id", findExpiredLogSubQuery).del().returning("id");
if (tx) {
await runPrune(tx);
} else {
const QUERY_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
await db.transaction(async (trx) => {
await trx.raw(`SET statement_timeout = ${QUERY_TIMEOUT_MS}`);
await runPrune(trx);
});
}
return results;
});
numberOfRetryOnFailure = 0; // reset
} catch (error) {
numberOfRetryOnFailure += 1;
deletedAuditLogIds = [];
logger.error(error, "Failed to delete audit log on pruning");
} finally {
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 10); // time to breathe for db
});
}
isRetrying = numberOfRetryOnFailure > 0;
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
};
const create: TAuditLogDALFactory["create"] = async (tx) => {

View File

@@ -123,7 +123,7 @@ export function createEventStreamClient(redis: Redis, options: IEventStreamClien
await redis.set(key, "1", "EX", 60);
stream.push("1");
send({ type: "ping" });
};
const close = () => {

View File

@@ -32,6 +32,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
auditLogStreams: false,
auditLogStreamLimit: 3,
samlSSO: false,
enforceGoogleSSO: false,
hsm: false,
oidcSSO: false,
scim: false,

View File

@@ -47,6 +47,7 @@ export type TFeatureSet = {
auditLogStreamLimit: 3;
githubOrgSync: false;
samlSSO: false;
enforceGoogleSSO: false;
hsm: false;
oidcSSO: false;
secretAccessInsights: false;

View File

@@ -13,6 +13,7 @@ import {
ProjectPermissionPkiSubscriberActions,
ProjectPermissionPkiTemplateActions,
ProjectPermissionSecretActions,
ProjectPermissionSecretEventActions,
ProjectPermissionSecretRotationActions,
ProjectPermissionSecretScanningConfigActions,
ProjectPermissionSecretScanningDataSourceActions,
@@ -252,6 +253,16 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.SecretScanningConfigs
);
can(
[
ProjectPermissionSecretEventActions.SubscribeCreated,
ProjectPermissionSecretEventActions.SubscribeDeleted,
ProjectPermissionSecretEventActions.SubscribeUpdated,
ProjectPermissionSecretEventActions.SubscribeImportMutations
],
ProjectPermissionSub.SecretEvents
);
return rules;
};
@@ -455,6 +466,16 @@ const buildMemberPermissionRules = () => {
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
can(
[
ProjectPermissionSecretEventActions.SubscribeCreated,
ProjectPermissionSecretEventActions.SubscribeDeleted,
ProjectPermissionSecretEventActions.SubscribeUpdated,
ProjectPermissionSecretEventActions.SubscribeImportMutations
],
ProjectPermissionSub.SecretEvents
);
return rules;
};
@@ -505,6 +526,16 @@ const buildViewerPermissionRules = () => {
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
can(
[
ProjectPermissionSecretEventActions.SubscribeCreated,
ProjectPermissionSecretEventActions.SubscribeDeleted,
ProjectPermissionSecretEventActions.SubscribeUpdated,
ProjectPermissionSecretEventActions.SubscribeImportMutations
],
ProjectPermissionSub.SecretEvents
);
return rules;
};

View File

@@ -35,6 +35,7 @@ export interface TPermissionDALFactory {
projectFavorites?: string[] | null | undefined;
customRoleSlug?: string | null | undefined;
orgAuthEnforced?: boolean | null | undefined;
orgGoogleSsoAuthEnforced: boolean;
} & {
groups: {
id: string;
@@ -87,6 +88,7 @@ export interface TPermissionDALFactory {
}[];
orgId: string;
orgAuthEnforced: boolean | null | undefined;
orgGoogleSsoAuthEnforced: boolean;
orgRole: OrgMembershipRole;
userId: string;
projectId: string;
@@ -350,6 +352,7 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
db.ref("slug").withSchema(TableName.OrgRoles).withSchema(TableName.OrgRoles).as("customRoleSlug"),
db.ref("permissions").withSchema(TableName.OrgRoles),
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
db.ref("googleSsoAuthEnforced").withSchema(TableName.Organization).as("orgGoogleSsoAuthEnforced"),
db.ref("bypassOrgAuthEnabled").withSchema(TableName.Organization).as("bypassOrgAuthEnabled"),
db.ref("groupId").withSchema("userGroups"),
db.ref("groupOrgId").withSchema("userGroups"),
@@ -369,6 +372,7 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
OrgMembershipsSchema.extend({
permissions: z.unknown(),
orgAuthEnforced: z.boolean().optional().nullable(),
orgGoogleSsoAuthEnforced: z.boolean(),
bypassOrgAuthEnabled: z.boolean(),
customRoleSlug: z.string().optional().nullable(),
shouldUseNewPrivilegeSystem: z.boolean()
@@ -988,6 +992,7 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
db.ref("key").withSchema(TableName.IdentityMetadata).as("metadataKey"),
db.ref("value").withSchema(TableName.IdentityMetadata).as("metadataValue"),
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
db.ref("googleSsoAuthEnforced").withSchema(TableName.Organization).as("orgGoogleSsoAuthEnforced"),
db.ref("bypassOrgAuthEnabled").withSchema(TableName.Organization).as("bypassOrgAuthEnabled"),
db.ref("role").withSchema(TableName.OrgMembership).as("orgRole"),
db.ref("orgId").withSchema(TableName.Project),
@@ -1003,6 +1008,7 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
orgId,
username,
orgAuthEnforced,
orgGoogleSsoAuthEnforced,
orgRole,
membershipId,
groupMembershipId,
@@ -1016,6 +1022,7 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
}) => ({
orgId,
orgAuthEnforced,
orgGoogleSsoAuthEnforced,
orgRole: orgRole as OrgMembershipRole,
userId,
projectId,

View File

@@ -121,6 +121,7 @@ function isAuthMethodSaml(actorAuthMethod: ActorAuthMethod) {
function validateOrgSSO(
actorAuthMethod: ActorAuthMethod,
isOrgSsoEnforced: TOrganizations["authEnforced"],
isOrgGoogleSsoEnforced: TOrganizations["googleSsoAuthEnforced"],
isOrgSsoBypassEnabled: TOrganizations["bypassOrgAuthEnabled"],
orgRole: OrgMembershipRole
) {
@@ -128,10 +129,16 @@ function validateOrgSSO(
throw new UnauthorizedError({ name: "No auth method defined" });
}
if (isOrgSsoEnforced && isOrgSsoBypassEnabled && orgRole === OrgMembershipRole.Admin) {
if ((isOrgSsoEnforced || isOrgGoogleSsoEnforced) && isOrgSsoBypassEnabled && orgRole === OrgMembershipRole.Admin) {
return;
}
// case: google sso is enforced, but the actor is not using google sso
if (isOrgGoogleSsoEnforced && actorAuthMethod !== null && actorAuthMethod !== AuthMethod.GOOGLE) {
throw new ForbiddenRequestError({ name: "Org auth enforced. Cannot access org-scoped resource" });
}
// case: SAML SSO is enforced, but the actor is not using SAML SSO
if (
isOrgSsoEnforced &&
actorAuthMethod !== null &&

View File

@@ -146,6 +146,7 @@ export const permissionServiceFactory = ({
validateOrgSSO(
authMethod,
membership.orgAuthEnforced,
membership.orgGoogleSsoAuthEnforced,
membership.bypassOrgAuthEnabled,
membership.role as OrgMembershipRole
);
@@ -238,6 +239,7 @@ export const permissionServiceFactory = ({
validateOrgSSO(
authMethod,
userProjectPermission.orgAuthEnforced,
userProjectPermission.orgGoogleSsoAuthEnforced,
userProjectPermission.bypassOrgAuthEnabled,
userProjectPermission.orgRole
);

View File

@@ -2491,6 +2491,7 @@ export const SecretSyncs = {
},
RENDER: {
serviceId: "The ID of the Render service to sync secrets to.",
environmentGroupId: "The ID of the Render environment group to sync secrets to.",
scope: "The Render scope that secrets should be synced to.",
type: "The Render resource type to sync secrets to."
},

View File

@@ -49,4 +49,32 @@ export const registerRenderConnectionRouter = async (server: FastifyZodProvider)
return services;
}
});
server.route({
method: "GET",
url: `/:connectionId/environment-groups`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z
.object({
id: z.string(),
name: z.string()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const groups = await server.services.appConnection.render.listEnvironmentGroups(connectionId, req.permission);
return groups;
}
});
};

View File

@@ -67,7 +67,7 @@ export const registerAuthRoutes = async (server: FastifyZodProvider) => {
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
onRequest: verifyAuth([AuthMode.JWT], { requireOrg: false }),
handler: () => ({ message: "Authenticated" as const })
});

View File

@@ -279,6 +279,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
name: GenericResourceNameSchema.optional(),
slug: slugSchema({ max: 64 }).optional(),
authEnforced: z.boolean().optional(),
googleSsoAuthEnforced: z.boolean().optional(),
scimEnabled: z.boolean().optional(),
defaultMembershipRoleSlug: slugSchema({ max: 64, field: "Default Membership Role" }).optional(),
enforceMfa: z.boolean().optional(),

View File

@@ -54,6 +54,8 @@ export const registerOauthMiddlewares = (server: FastifyZodProvider) => {
try {
// @ts-expect-error this is because this is express type and not fastify
const callbackPort = req.session.get("callbackPort");
// @ts-expect-error this is because this is express type and not fastify
const orgSlug = req.session.get("orgSlug");
const email = profile?.emails?.[0]?.value;
if (!email)
@@ -67,7 +69,8 @@ export const registerOauthMiddlewares = (server: FastifyZodProvider) => {
firstName: profile?.name?.givenName || "",
lastName: profile?.name?.familyName || "",
authMethod: AuthMethod.GOOGLE,
callbackPort
callbackPort,
orgSlug
});
cb(null, { isUserCompleted, providerAuthToken });
} catch (error) {
@@ -215,6 +218,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
schema: {
querystring: z.object({
callback_port: z.string().optional(),
org_slug: z.string().optional(),
is_admin_login: z
.string()
.optional()
@@ -223,12 +227,15 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
},
preValidation: [
async (req, res) => {
const { callback_port: callbackPort, is_admin_login: isAdminLogin } = req.query;
const { callback_port: callbackPort, is_admin_login: isAdminLogin, org_slug: orgSlug } = req.query;
// ensure fresh session state per login attempt
await req.session.regenerate();
if (callbackPort) {
req.session.set("callbackPort", callbackPort);
}
if (orgSlug) {
req.session.set("orgSlug", orgSlug);
}
if (isAdminLogin) {
req.session.set("isAdminLogin", isAdminLogin);
}

View File

@@ -8,9 +8,11 @@ import { IntegrationUrls } from "@app/services/integration-auth/integration-list
import { AppConnection } from "../app-connection-enums";
import { RenderConnectionMethod } from "./render-connection-enums";
import {
TRawRenderEnvironmentGroup,
TRawRenderService,
TRenderConnection,
TRenderConnectionConfig,
TRenderEnvironmentGroup,
TRenderService
} from "./render-connection-types";
@@ -32,7 +34,11 @@ export const listRenderServices = async (appConnection: TRenderConnection): Prom
const perPage = 100;
let cursor;
let maxIterations = 10;
while (hasMorePages) {
if (maxIterations <= 0) break;
const res: TRawRenderService[] = (
await request.get<TRawRenderService[]>(`${IntegrationUrls.RENDER_API_URL}/v1/services`, {
params: new URLSearchParams({
@@ -59,6 +65,8 @@ export const listRenderServices = async (appConnection: TRenderConnection): Prom
} else {
cursor = res[res.length - 1].cursor;
}
maxIterations -= 1;
}
return services;
@@ -86,3 +94,52 @@ export const validateRenderConnectionCredentials = async (config: TRenderConnect
return inputCredentials;
};
export const listRenderEnvironmentGroups = async (
appConnection: TRenderConnection
): Promise<TRenderEnvironmentGroup[]> => {
const {
credentials: { apiKey }
} = appConnection;
const groups: TRenderEnvironmentGroup[] = [];
let hasMorePages = true;
const perPage = 100;
let cursor;
let maxIterations = 10;
while (hasMorePages) {
if (maxIterations <= 0) break;
const res: TRawRenderEnvironmentGroup[] = (
await request.get<TRawRenderEnvironmentGroup[]>(`${IntegrationUrls.RENDER_API_URL}/v1/env-groups`, {
params: new URLSearchParams({
...(cursor ? { cursor: String(cursor) } : {}),
limit: String(perPage)
}),
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json",
"Accept-Encoding": "application/json"
}
})
).data;
res.forEach((item) => {
groups.push({
name: item.envGroup.name,
id: item.envGroup.id
});
});
if (res.length < perPage) {
hasMorePages = false;
} else {
cursor = res[res.length - 1].cursor;
}
maxIterations -= 1;
}
return groups;
};

View File

@@ -2,7 +2,7 @@ import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { listRenderServices } from "./render-connection-fns";
import { listRenderEnvironmentGroups, listRenderServices } from "./render-connection-fns";
import { TRenderConnection } from "./render-connection-types";
type TGetAppConnectionFunc = (
@@ -24,7 +24,20 @@ export const renderConnectionService = (getAppConnection: TGetAppConnectionFunc)
}
};
const listEnvironmentGroups = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Render, connectionId, actor);
try {
const groups = await listRenderEnvironmentGroups(appConnection);
return groups;
} catch (error) {
logger.error(error, "Failed to list environment groups for Render connection");
return [];
}
};
return {
listServices
listServices,
listEnvironmentGroups
};
};

View File

@@ -33,3 +33,16 @@ export type TRawRenderService = {
name: string;
};
};
export type TRenderEnvironmentGroup = {
name: string;
id: string;
};
export type TRawRenderEnvironmentGroup = {
cursor: string;
envGroup: {
id: string;
name: string;
};
};

View File

@@ -448,15 +448,34 @@ export const authLoginServiceFactory = ({
// Check if the user actually has access to the specified organization.
const userOrgs = await orgDAL.findAllOrgsByUserId(user.id);
const hasOrganizationMembership = userOrgs.some((org) => org.id === organizationId && org.userStatus !== "invited");
const selectedOrgMembership = userOrgs.find((org) => org.id === organizationId && org.userStatus !== "invited");
const selectedOrg = await orgDAL.findById(organizationId);
if (!hasOrganizationMembership) {
if (!selectedOrgMembership) {
throw new ForbiddenRequestError({
message: `User does not have access to the organization named ${selectedOrg?.name}`
});
}
if (selectedOrg.googleSsoAuthEnforced && decodedToken.authMethod !== AuthMethod.GOOGLE) {
const canBypass = selectedOrg.bypassOrgAuthEnabled && selectedOrgMembership.userRole === OrgMembershipRole.Admin;
if (!canBypass) {
throw new ForbiddenRequestError({
message: "Google SSO is enforced for this organization. Please use Google SSO to login.",
error: "GoogleSsoEnforced"
});
}
}
if (decodedToken.authMethod === AuthMethod.GOOGLE) {
await orgDAL.updateById(selectedOrg.id, {
googleSsoAuthLastUsed: new Date()
});
}
const shouldCheckMfa = selectedOrg.enforceMfa || user.isMfaEnabled;
const orgMfaMethod = selectedOrg.enforceMfa ? (selectedOrg.selectedMfaMethod ?? MfaMethod.EMAIL) : undefined;
const userMfaMethod = user.isMfaEnabled ? (user.selectedMfaMethod ?? MfaMethod.EMAIL) : undefined;
@@ -502,7 +521,8 @@ export const authLoginServiceFactory = ({
selectedOrg.authEnforced &&
selectedOrg.bypassOrgAuthEnabled &&
!isAuthMethodSaml(decodedToken.authMethod) &&
decodedToken.authMethod !== AuthMethod.OIDC
decodedToken.authMethod !== AuthMethod.OIDC &&
decodedToken.authMethod !== AuthMethod.GOOGLE
) {
await auditLogService.createAuditLog({
orgId: organizationId,
@@ -705,7 +725,7 @@ export const authLoginServiceFactory = ({
/*
* OAuth2 login for google,github, and other oauth2 provider
* */
const oauth2Login = async ({ email, firstName, lastName, authMethod, callbackPort }: TOauthLoginDTO) => {
const oauth2Login = async ({ email, firstName, lastName, authMethod, callbackPort, orgSlug }: TOauthLoginDTO) => {
// akhilmhdh: case sensitive email resolution
const usersByUsername = await userDAL.findUserByUsername(email);
let user = usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === email) : usersByUsername?.[0];
@@ -759,6 +779,8 @@ export const authLoginServiceFactory = ({
const appCfg = getConfig();
let orgId = "";
let orgName: undefined | string;
if (!user) {
// Create a new user based on oAuth
if (!serverCfg?.allowSignUp) throw new BadRequestError({ message: "Sign up disabled", name: "Oauth 2 login" });
@@ -784,7 +806,6 @@ export const authLoginServiceFactory = ({
});
if (authMethod === AuthMethod.GITHUB && serverCfg.defaultAuthOrgId && !appCfg.isCloud) {
let orgId = "";
const defaultOrg = await orgDAL.findOrgById(serverCfg.defaultAuthOrgId);
if (!defaultOrg) {
throw new BadRequestError({
@@ -824,11 +845,39 @@ export const authLoginServiceFactory = ({
}
}
if (!orgId && orgSlug) {
const org = await orgDAL.findOrgBySlug(orgSlug);
if (org) {
// checks for the membership and only sets the orgId / orgName if the user is a member of the specified org
const orgMembership = await orgDAL.findMembership({
[`${TableName.OrgMembership}.userId` as "userId"]: user.id,
[`${TableName.OrgMembership}.orgId` as "orgId"]: org.id,
[`${TableName.OrgMembership}.isActive` as "isActive"]: true,
[`${TableName.OrgMembership}.status` as "status"]: OrgMembershipStatus.Accepted
});
if (orgMembership) {
orgId = org.id;
orgName = org.name;
}
}
}
const isUserCompleted = user.isAccepted;
const providerAuthToken = crypto.jwt().sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
...(orgId && orgSlug && orgName !== undefined
? {
organizationId: orgId,
organizationName: orgName,
organizationSlug: orgSlug
}
: {}),
username: user.username,
email: user.email,
isEmailVerified: user.isEmailVerified,

View File

@@ -32,6 +32,7 @@ export type TOauthLoginDTO = {
lastName?: string;
authMethod: AuthMethod;
callbackPort?: string;
orgSlug?: string;
};
export type TOauthTokenExchangeDTO = {

View File

@@ -8,6 +8,7 @@ export const sanitizedOrganizationSchema = OrganizationsSchema.pick({
createdAt: true,
updatedAt: true,
authEnforced: true,
googleSsoAuthEnforced: true,
scimEnabled: true,
kmsDefaultKeyId: true,
defaultMembershipRole: true,

View File

@@ -364,6 +364,7 @@ export const orgServiceFactory = ({
name,
slug,
authEnforced,
googleSsoAuthEnforced,
scimEnabled,
defaultMembershipRoleSlug,
enforceMfa,
@@ -430,6 +431,21 @@ export const orgServiceFactory = ({
}
}
if (googleSsoAuthEnforced !== undefined) {
if (!plan.enforceGoogleSSO) {
throw new BadRequestError({
message: "Failed to enforce Google SSO due to plan restriction. Upgrade plan to enforce Google SSO."
});
}
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
}
if (authEnforced && googleSsoAuthEnforced) {
throw new BadRequestError({
message: "SAML/OIDC auth enforcement and Google SSO auth enforcement cannot be enabled at the same time."
});
}
if (authEnforced) {
const samlCfg = await samlConfigDAL.findOne({
orgId,
@@ -460,6 +476,21 @@ export const orgServiceFactory = ({
}
}
if (googleSsoAuthEnforced) {
if (googleSsoAuthEnforced && currentOrg.authEnforced) {
throw new BadRequestError({
message: "Google SSO auth enforcement cannot be enabled when SAML/OIDC auth enforcement is enabled."
});
}
if (!currentOrg.googleSsoAuthLastUsed) {
throw new BadRequestError({
message:
"Google SSO auth enforcement cannot be enabled because Google SSO has not been used yet. Please log in via Google SSO at least once before enforcing it for your organization."
});
}
}
let defaultMembershipRole: string | undefined;
if (defaultMembershipRoleSlug) {
defaultMembershipRole = await getDefaultOrgMembershipRoleForUpdateOrg({
@@ -474,6 +505,7 @@ export const orgServiceFactory = ({
name,
slug: slug ? slugify(slug) : undefined,
authEnforced,
googleSsoAuthEnforced,
scimEnabled,
defaultMembershipRole,
enforceMfa,

View File

@@ -74,6 +74,7 @@ export type TUpdateOrgDTO = {
name: string;
slug: string;
authEnforced: boolean;
googleSsoAuthEnforced: boolean;
scimEnabled: boolean;
defaultMembershipRoleSlug: string;
enforceMfa: boolean;

View File

@@ -1,5 +1,6 @@
export enum RenderSyncScope {
Service = "service"
Service = "service",
EnvironmentGroup = "environment-group"
}
export enum RenderSyncType {

View File

@@ -1,11 +1,13 @@
/* eslint-disable no-await-in-loop */
import { isAxiosError } from "axios";
import { AxiosRequestConfig, isAxiosError } from "axios";
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
import { RenderSyncScope } from "./render-sync-enums";
import { TRenderSecret, TRenderSyncWithCredentials } from "./render-sync-types";
const MAX_RETRIES = 5;
@@ -27,6 +29,80 @@ const makeRequestWithRetry = async <T>(requestFn: () => Promise<T>, attempt = 0)
}
};
async function getSecrets(input: { destination: TRenderSyncWithCredentials["destinationConfig"]; token: string }) {
const req: AxiosRequestConfig = {
baseURL: `${IntegrationUrls.RENDER_API_URL}/v1`,
method: "GET",
headers: {
Authorization: `Bearer ${input.token}`,
Accept: "application/json"
}
};
switch (input.destination.scope) {
case RenderSyncScope.Service: {
req.url = `/services/${input.destination.serviceId}/env-vars`;
const allSecrets: TRenderSecret[] = [];
let cursor: string | undefined;
do {
// eslint-disable-next-line @typescript-eslint/no-loop-func
const { data } = await makeRequestWithRetry(() =>
request.request<
{
envVar: {
key: string;
value: string;
};
cursor: string;
}[]
>({
...req,
params: {
cursor
}
})
);
const secrets = data.map((item) => ({
key: item.envVar.key,
value: item.envVar.value
}));
allSecrets.push(...secrets);
if (data.length > 0 && data[data.length - 1]?.cursor) {
cursor = data[data.length - 1].cursor;
} else {
cursor = undefined;
}
} while (cursor);
return allSecrets;
}
case RenderSyncScope.EnvironmentGroup: {
req.url = `/env-groups/${input.destination.environmentGroupId}`;
const res = await makeRequestWithRetry(() =>
request.request<{
envVars: {
key: string;
value: string;
}[];
}>(req)
);
return res.data.envVars.map((item) => ({
key: item.key,
value: item.value
}));
}
default:
throw new BadRequestError({ message: "Unknown render sync destination scope" });
}
}
const getRenderEnvironmentSecrets = async (secretSync: TRenderSyncWithCredentials): Promise<TRenderSecret[]> => {
const {
destinationConfig,
@@ -35,45 +111,12 @@ const getRenderEnvironmentSecrets = async (secretSync: TRenderSyncWithCredential
}
} = secretSync;
const baseUrl = `${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/env-vars`;
const allSecrets: TRenderSecret[] = [];
let cursor: string | undefined;
const secrets = await getSecrets({
destination: destinationConfig,
token: apiKey
});
do {
const url = cursor ? `${baseUrl}?cursor=${cursor}` : baseUrl;
const { data } = await makeRequestWithRetry(() =>
request.get<
{
envVar: {
key: string;
value: string;
};
cursor: string;
}[]
>(url, {
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
})
);
const secrets = data.map((item) => ({
key: item.envVar.key,
value: item.envVar.value
}));
allSecrets.push(...secrets);
if (data.length > 0 && data[data.length - 1]?.cursor) {
cursor = data[data.length - 1].cursor;
} else {
cursor = undefined;
}
} while (cursor);
return allSecrets;
return secrets;
};
const batchUpdateEnvironmentSecrets = async (
@@ -87,14 +130,91 @@ const batchUpdateEnvironmentSecrets = async (
}
} = secretSync;
await makeRequestWithRetry(() =>
request.put(`${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/env-vars`, envVars, {
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
const req: AxiosRequestConfig = {
baseURL: `${IntegrationUrls.RENDER_API_URL}/v1`,
method: "PUT",
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
};
switch (destinationConfig.scope) {
case RenderSyncScope.Service: {
await makeRequestWithRetry(() =>
request.request({
...req,
url: `/services/${destinationConfig.serviceId}/env-vars`,
data: envVars
})
);
break;
}
case RenderSyncScope.EnvironmentGroup: {
for await (const variable of envVars) {
await makeRequestWithRetry(() =>
request.request({
...req,
url: `/env-groups/${destinationConfig.environmentGroupId}/env-vars/${variable.key}`,
data: {
value: variable.value
}
})
);
}
})
);
break;
}
default:
throw new BadRequestError({ message: "Unknown render sync destination scope" });
}
};
const deleteEnvironmentSecret = async (
secretSync: TRenderSyncWithCredentials,
envVar: { key: string; value: string }
): Promise<void> => {
const {
destinationConfig,
connection: {
credentials: { apiKey }
}
} = secretSync;
const req: AxiosRequestConfig = {
baseURL: `${IntegrationUrls.RENDER_API_URL}/v1`,
method: "DELETE",
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
};
switch (destinationConfig.scope) {
case RenderSyncScope.Service: {
await makeRequestWithRetry(() =>
request.request({
...req,
url: `/services/${destinationConfig.serviceId}/env-vars/${envVar.key}`
})
);
break;
}
case RenderSyncScope.EnvironmentGroup: {
await makeRequestWithRetry(() =>
request.request({
...req,
url: `/env-groups/${destinationConfig.environmentGroupId}/env-vars/${envVar.key}`
})
);
break;
}
default:
throw new BadRequestError({ message: "Unknown render sync destination scope" });
}
};
const redeployService = async (secretSync: TRenderSyncWithCredentials) => {
@@ -105,18 +225,50 @@ const redeployService = async (secretSync: TRenderSyncWithCredentials) => {
}
} = secretSync;
await makeRequestWithRetry(() =>
request.post(
`${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/deploys`,
{},
{
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
const req: AxiosRequestConfig = {
baseURL: `${IntegrationUrls.RENDER_API_URL}/v1`,
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
};
switch (destinationConfig.scope) {
case RenderSyncScope.Service: {
await makeRequestWithRetry(() =>
request.request({
...req,
method: "POST",
url: `/services/${destinationConfig.serviceId}/deploys`,
data: {}
})
);
break;
}
case RenderSyncScope.EnvironmentGroup: {
const { data } = await request.request<{ serviceLinks: { id: string }[] }>({
...req,
method: "GET",
url: `/env-groups/${destinationConfig.environmentGroupId}`
});
for await (const link of data.serviceLinks) {
// eslint-disable-next-line @typescript-eslint/no-loop-func
await makeRequestWithRetry(() =>
request.request({
...req,
url: `/services/${link.id}/deploys`,
data: {}
})
);
}
)
);
break;
}
default:
throw new BadRequestError({ message: "Unknown render sync destination scope" });
}
};
export const RenderSyncFns = {
@@ -169,14 +321,15 @@ export const RenderSyncFns = {
const finalEnvVars: Array<{ key: string; value: string }> = [];
for (const renderSecret of renderSecrets) {
if (!(renderSecret.key in secretMap)) {
if (renderSecret.key in secretMap) {
finalEnvVars.push({
key: renderSecret.key,
value: renderSecret.value
});
}
}
await batchUpdateEnvironmentSecrets(secretSync, finalEnvVars);
await Promise.all(finalEnvVars.map((el) => deleteEnvironmentSecret(secretSync, el)));
if (secretSync.syncOptions.autoRedeployServices) {
await redeployService(secretSync);

View File

@@ -17,6 +17,14 @@ const RenderSyncDestinationConfigSchema = z.discriminatedUnion("scope", [
scope: z.literal(RenderSyncScope.Service).describe(SecretSyncs.DESTINATION_CONFIG.RENDER.scope),
serviceId: z.string().min(1, "Service ID is required").describe(SecretSyncs.DESTINATION_CONFIG.RENDER.serviceId),
type: z.nativeEnum(RenderSyncType).describe(SecretSyncs.DESTINATION_CONFIG.RENDER.type)
}),
z.object({
scope: z.literal(RenderSyncScope.EnvironmentGroup).describe(SecretSyncs.DESTINATION_CONFIG.RENDER.scope),
environmentGroupId: z
.string()
.min(1, "Environment Group ID is required")
.describe(SecretSyncs.DESTINATION_CONFIG.RENDER.environmentGroupId),
type: z.nativeEnum(RenderSyncType).describe(SecretSyncs.DESTINATION_CONFIG.RENDER.type)
})
]);

View File

@@ -310,7 +310,8 @@
"self-hosting/guides/mongo-to-postgres",
"self-hosting/guides/custom-certificates",
"self-hosting/guides/automated-bootstrapping",
"self-hosting/guides/production-hardening"
"self-hosting/guides/production-hardening",
"self-hosting/guides/monitoring-telemetry"
]
},
{

View File

@@ -27,7 +27,7 @@ $ ansible-galaxy collection install infisical.vault
The python module dependencies are not installed by ansible-galaxy. They can be manually installed using pip:
```bash
$ pip install infisical-python
$ pip install infisicalsdk
```
## Using this collection
@@ -42,7 +42,7 @@ vars:
# [{ "key": "HOST", "value": "google.com" }, { "key": "SMTP", "value": "gmail.smtp.edu" }]
read_secret_by_name_within_scope: "{{ lookup('infisical.vault.read_secrets', universal_auth_client_id='<>', universal_auth_client_secret='<>', project_id='<>', path='/', env_slug='dev', secret_name='HOST', url='https://spotify.infisical.com') }}"
# [{ "key": "HOST", "value": "google.com" }]
# { "key": "HOST", "value": "google.com" }
```

View File

@@ -30,8 +30,9 @@ description: "Learn how to configure a Render Sync for Infisical."
![Configure Destination](/images/secret-syncs/render/render-sync-destination.png)
- **Render Connection**: The Render Connection to authenticate with.
- **Scope**: Select **Service**.
- **Service**: Choose the Render service you want to sync secrets to.
- **Scope**: Select **Service** or **Environment Group**.
- **Service**: Choose the Render service you want to sync secrets to.
- **Environment Group**: Choose the Render environment group you want to sync secrets to.
5. Configure the **Sync Options** to specify how secrets should be synced, then click **Next**.
![Configure Options](/images/secret-syncs/render/render-sync-options.png)

View File

@@ -0,0 +1,440 @@
---
title: "Monitoring and Telemetry Setup"
description: "Learn how to set up monitoring and telemetry for your self-hosted Infisical instance using Grafana, Prometheus, and OpenTelemetry."
---
Infisical provides comprehensive monitoring and telemetry capabilities to help you monitor the health, performance, and usage of your self-hosted instance. This guide covers setting up monitoring using Grafana with two different telemetry collection approaches.
## Overview
Infisical exports metrics in **OpenTelemetry (OTEL) format**, which provides maximum flexibility for your monitoring infrastructure. While this guide focuses on Grafana, the OTEL format means you can easily integrate with:
- **Cloud-native monitoring**: AWS CloudWatch, Google Cloud Monitoring, Azure Monitor
- **Observability platforms**: Datadog, New Relic, Splunk, Dynatrace
- **Custom backends**: Any system that supports OTEL ingestion
- **Traditional monitoring**: Prometheus, Grafana (as covered in this guide)
Infisical supports two telemetry collection methods:
1. **Pull-based (Prometheus)**: Exposes metrics on a dedicated endpoint for Prometheus to scrape
2. **Push-based (OTLP)**: Sends metrics to an OpenTelemetry Collector via OTLP protocol
Both approaches provide the same metrics data in OTEL format, so you can choose the one that best fits your infrastructure and monitoring strategy.
## Prerequisites
- Self-hosted Infisical instance running
- Access to deploy monitoring services (Prometheus, Grafana, etc.)
- Basic understanding of Prometheus and Grafana
## Environment Variables
Configure the following environment variables in your Infisical backend:
```bash
# Enable telemetry collection
OTEL_TELEMETRY_COLLECTION_ENABLED=true
# Choose export type: "prometheus" or "otlp"
OTEL_EXPORT_TYPE=prometheus
# For OTLP push mode, also configure:
# OTEL_EXPORT_OTLP_ENDPOINT=http://otel-collector:4318/v1/metrics
# OTEL_COLLECTOR_BASIC_AUTH_USERNAME=your_collector_username
# OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=your_collector_password
# OTEL_OTLP_PUSH_INTERVAL=30000
```
**Note**: The `OTEL_COLLECTOR_BASIC_AUTH_USERNAME` and `OTEL_COLLECTOR_BASIC_AUTH_PASSWORD` values must match the credentials configured in your OpenTelemetry Collector's `basicauth/server` extension. These are not hardcoded values - you configure them in your collector configuration file.
## Option 1: Pull-based Monitoring (Prometheus)
This approach exposes metrics on port 9464 at the `/metrics` endpoint, allowing Prometheus to scrape the data. The metrics are exposed in Prometheus format but originate from OpenTelemetry instrumentation.
### Configuration
1. **Enable Prometheus export in Infisical**:
```bash
OTEL_TELEMETRY_COLLECTION_ENABLED=true
OTEL_EXPORT_TYPE=prometheus
```
2. **Expose the metrics port** in your Infisical backend:
- **Docker**: Expose port 9464
- **Kubernetes**: Create a service exposing port 9464
- **Other**: Ensure port 9464 is accessible to your monitoring stack
3. **Create Prometheus configuration** (`prometheus.yml`):
```yaml
global:
scrape_interval: 30s
evaluation_interval: 30s
scrape_configs:
- job_name: "infisical"
scrape_interval: 30s
static_configs:
- targets: ["infisical-backend:9464"] # Adjust hostname/port based on your deployment
metrics_path: "/metrics"
```
**Note**: Replace `infisical-backend:9464` with the actual hostname and port where your Infisical backend is running. This could be:
- **Docker Compose**: `infisical-backend:9464` (service name)
- **Kubernetes**: `infisical-backend.default.svc.cluster.local:9464` (service name)
- **Bare Metal**: `192.168.1.100:9464` (actual IP address)
- **Cloud**: `your-infisical.example.com:9464` (domain name)
### Deployment Options
#### Docker Compose
```yaml
services:
prometheus:
image: prom/prometheus:latest
ports:
- "9090:9090"
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml:ro
command:
- "--config.file=/etc/prometheus/prometheus.yml"
grafana:
image: grafana/grafana:latest
ports:
- "3000:3000"
environment:
- GF_SECURITY_ADMIN_USER=admin
- GF_SECURITY_ADMIN_PASSWORD=admin
```
#### Kubernetes
```yaml
# prometheus-deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: prometheus
spec:
replicas: 1
selector:
matchLabels:
app: prometheus
template:
metadata:
labels:
app: prometheus
spec:
containers:
- name: prometheus
image: prom/prometheus:latest
ports:
- containerPort: 9090
volumeMounts:
- name: config
mountPath: /etc/prometheus
volumes:
- name: config
configMap:
name: prometheus-config
---
# prometheus-service.yaml
apiVersion: v1
kind: Service
metadata:
name: prometheus
spec:
selector:
app: prometheus
ports:
- port: 9090
targetPort: 9090
type: ClusterIP
```
#### Helm
```bash
helm repo add prometheus-community https://prometheus-community.github.io/helm-charts
helm install prometheus prometheus-community/prometheus \
--set server.config.global.scrape_interval=30s \
--set server.config.scrape_configs[0].job_name=infisical \
--set server.config.scrape_configs[0].static_configs[0].targets[0]=infisical-backend:9464
```
## Option 2: Push-based Monitoring (OTLP)
This approach sends metrics directly to an OpenTelemetry Collector via the OTLP protocol. This gives you the most flexibility as you can configure the collector to export to multiple backends simultaneously.
### Configuration
1. **Enable OTLP export in Infisical**:
```bash
OTEL_TELEMETRY_COLLECTION_ENABLED=true
OTEL_EXPORT_TYPE=otlp
OTEL_EXPORT_OTLP_ENDPOINT=http://otel-collector:4318/v1/metrics
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=infisical
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=infisical
OTEL_OTLP_PUSH_INTERVAL=30000
```
2. **Create OpenTelemetry Collector configuration** (`otel-collector-config.yaml`):
```yaml
extensions:
health_check:
pprof:
zpages:
basicauth/server:
htpasswd:
inline: |
your_username:your_password
receivers:
otlp:
protocols:
http:
endpoint: 0.0.0.0:4318
auth:
authenticator: basicauth/server
prometheus:
config:
scrape_configs:
- job_name: otel-collector
scrape_interval: 30s
static_configs:
- targets: [infisical-backend:9464]
metric_relabel_configs:
- action: labeldrop
regex: "service_instance_id|service_name"
processors:
batch:
exporters:
prometheus:
endpoint: "0.0.0.0:8889"
auth:
authenticator: basicauth/server
resource_to_telemetry_conversion:
enabled: true
service:
extensions: [basicauth/server, health_check, pprof, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [prometheus]
```
**Important**: Replace `your_username:your_password` with your chosen credentials. These must match the values you set in Infisical's `OTEL_COLLECTOR_BASIC_AUTH_USERNAME` and `OTEL_COLLECTOR_BASIC_AUTH_PASSWORD` environment variables.
3. **Create Prometheus configuration** for the collector:
```yaml
global:
scrape_interval: 30s
evaluation_interval: 30s
scrape_configs:
- job_name: "otel-collector"
scrape_interval: 30s
static_configs:
- targets: ["otel-collector:8889"] # Adjust hostname/port based on your deployment
metrics_path: "/metrics"
```
**Note**: Replace `otel-collector:8889` with the actual hostname and port where your OpenTelemetry Collector is running. This could be:
- **Docker Compose**: `otel-collector:8889` (service name)
- **Kubernetes**: `otel-collector.default.svc.cluster.local:8889` (service name)
- **Bare Metal**: `192.168.1.100:8889` (actual IP address)
- **Cloud**: `your-collector.example.com:8889` (domain name)
### Deployment Options
#### Docker Compose
```yaml
services:
otel-collector:
image: otel/opentelemetry-collector-contrib:latest
ports:
- 4318:4318 # OTLP http receiver
- 8889:8889 # Prometheus exporter metrics
volumes:
- ./otel-collector-config.yaml:/etc/otelcol-contrib/config.yaml:ro
command:
- "--config=/etc/otelcol-contrib/config.yaml"
```
#### Kubernetes
```yaml
# otel-collector-deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: otel-collector
spec:
replicas: 1
selector:
matchLabels:
app: otel-collector
template:
metadata:
labels:
app: otel-collector
spec:
containers:
- name: otel-collector
image: otel/opentelemetry-collector-contrib:latest
ports:
- containerPort: 4318
- containerPort: 8889
volumeMounts:
- name: config
mountPath: /etc/otelcol-contrib
volumes:
- name: config
configMap:
name: otel-collector-config
```
#### Helm
```bash
helm repo add open-telemetry https://open-telemetry.github.io/opentelemetry-helm-charts
helm install otel-collector open-telemetry/opentelemetry-collector \
--set config.receivers.otlp.protocols.http.endpoint=0.0.0.0:4318 \
--set config.exporters.prometheus.endpoint=0.0.0.0:8889
```
## Alternative Backends
Since Infisical exports in OpenTelemetry format, you can easily configure the collector to send metrics to other backends instead of (or in addition to) Prometheus:
### Cloud-Native Examples
```yaml
# Add to your otel-collector-config.yaml exporters section
exporters:
# AWS CloudWatch
awsemf:
region: us-west-2
log_group_name: /aws/emf/infisical
log_stream_name: metrics
# Google Cloud Monitoring
googlecloud:
project_id: your-project-id
# Azure Monitor
azuremonitor:
connection_string: "your-connection-string"
# Datadog
datadog:
api:
key: "your-api-key"
site: "datadoghq.com"
# New Relic
newrelic:
apikey: "your-api-key"
host_override: "otlp.nr-data.net"
```
### Multi-Backend Configuration
```yaml
service:
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [prometheus, awsemf, datadog] # Send to multiple backends
```
## Setting Up Grafana
1. **Access Grafana**: Navigate to your Grafana instance
2. **Login**: Use your configured credentials
3. **Add Prometheus Data Source**:
- Go to Configuration → Data Sources
- Click "Add data source"
- Select "Prometheus"
- Set URL to your Prometheus endpoint
- Click "Save & Test"
## Available Metrics
Infisical exposes the following key metrics in OpenTelemetry format:
### API Performance Metrics
- `API_latency` - API request latency histogram in milliseconds
- **Labels**: `route`, `method`, `statusCode`
- **Example**: Monitor response times for specific endpoints
- `API_errors` - API error count histogram
- **Labels**: `route`, `method`, `type`, `name`
- **Example**: Track error rates by endpoint and error type
### Integration & Secret Sync Metrics
- `integration_secret_sync_errors` - Integration secret sync error count
- **Labels**: `version`, `integration`, `integrationId`, `type`, `status`, `name`, `projectId`
- **Example**: Monitor integration sync failures across different services
- `secret_sync_sync_secrets_errors` - Secret sync operation error count
- **Labels**: `version`, `destination`, `syncId`, `projectId`, `type`, `status`, `name`
- **Example**: Track secret sync failures to external systems
- `secret_sync_import_secrets_errors` - Secret import operation error count
- **Labels**: `version`, `destination`, `syncId`, `projectId`, `type`, `status`, `name`
- **Example**: Monitor secret import failures
- `secret_sync_remove_secrets_errors` - Secret removal operation error count
- **Labels**: `version`, `destination`, `syncId`, `projectId`, `type`, `status`, `name`
- **Example**: Track secret removal operation failures
### System Metrics
These metrics are automatically collected by OpenTelemetry's HTTP instrumentation:
- `http_server_duration` - HTTP server request duration metrics (histogram buckets, count, sum)
- `http_client_duration` - HTTP client request duration metrics (histogram buckets, count, sum)
### Custom Business Metrics
- `infisical_secret_operations_total` - Total secret operations
- `infisical_secrets_processed_total` - Total secrets processed
## Troubleshooting
### Common Issues
1. **Metrics not appearing**:
- Check if `OTEL_TELEMETRY_COLLECTION_ENABLED=true`
- Verify the correct `OTEL_EXPORT_TYPE` is set
- Check network connectivity between services
2. **Authentication errors**:
- Verify basic auth credentials in OTLP configuration
- Check if credentials match between Infisical and collector

View File

@@ -5,7 +5,9 @@ import { SecretSyncConnectionField } from "@app/components/secret-syncs/forms/Se
import { FilterableSelect, FormControl, Select, SelectItem } from "@app/components/v2";
import { RENDER_SYNC_SCOPES } from "@app/helpers/secretSyncs";
import {
TRenderEnvironmentGroup,
TRenderService,
useRenderConnectionListEnvironmentGroups,
useRenderConnectionListServices
} from "@app/hooks/api/appConnections/render";
import { SecretSync } from "@app/hooks/api/secretSyncs";
@@ -19,6 +21,7 @@ export const RenderSyncFields = () => {
>();
const connectionId = useWatch({ name: "connection.id", control });
const selectedScope = useWatch({ name: "destinationConfig.scope", control });
const { data: services = [], isPending: isServicesPending } = useRenderConnectionListServices(
connectionId,
@@ -27,11 +30,17 @@ export const RenderSyncFields = () => {
}
);
const { data: groups = [], isPending: isGroupsPending } =
useRenderConnectionListEnvironmentGroups(connectionId, {
enabled: Boolean(connectionId) && selectedScope === RenderSyncScope.EnvironmentGroup
});
return (
<>
<SecretSyncConnectionField
onChange={() => {
setValue("destinationConfig.serviceId", "");
setValue("destinationConfig.environmentGroupId", "");
setValue("destinationConfig.type", RenderSyncType.Env);
setValue("destinationConfig.scope", RenderSyncScope.Service);
}}
@@ -83,30 +92,67 @@ export const RenderSyncFields = () => {
</FormControl>
)}
/>
<Controller
name="destinationConfig.serviceId"
control={control}
render={({ field: { value, onChange }, fieldState: { error } }) => (
<FormControl errorText={error?.message} isError={Boolean(error?.message)} label="Service">
<FilterableSelect
isLoading={isServicesPending && Boolean(connectionId)}
isDisabled={!connectionId}
value={services ? (services.find((service) => service.id === value) ?? []) : []}
onChange={(option) => {
onChange((option as SingleValue<TRenderService>)?.id ?? null);
setValue(
"destinationConfig.serviceName",
(option as SingleValue<TRenderService>)?.name ?? ""
);
}}
options={services}
placeholder="Select a service..."
getOptionLabel={(option) => option.name}
getOptionValue={(option) => option.id.toString()}
/>
</FormControl>
)}
/>
{selectedScope === RenderSyncScope.Service && (
<Controller
name="destinationConfig.serviceId"
control={control}
render={({ field: { value, onChange }, fieldState: { error } }) => (
<FormControl
errorText={error?.message}
isError={Boolean(error?.message)}
label="Service"
>
<FilterableSelect
isLoading={isServicesPending && Boolean(connectionId)}
isDisabled={!connectionId}
value={services ? (services.find((service) => service.id === value) ?? []) : []}
onChange={(option) => {
onChange((option as SingleValue<TRenderService>)?.id ?? null);
setValue(
"destinationConfig.serviceName",
(option as SingleValue<TRenderService>)?.name ?? ""
);
}}
options={services}
placeholder="Select a service..."
getOptionLabel={(option) => option.name}
getOptionValue={(option) => option.id.toString()}
/>
</FormControl>
)}
/>
)}
{selectedScope === RenderSyncScope.EnvironmentGroup && (
<Controller
name="destinationConfig.environmentGroupId"
control={control}
render={({ field: { value, onChange }, fieldState: { error } }) => (
<FormControl
errorText={error?.message}
isError={Boolean(error?.message)}
label="Environment Group"
>
<FilterableSelect
isLoading={isGroupsPending && Boolean(connectionId)}
isDisabled={!connectionId}
value={groups ? (groups.find((g) => g.id === value) ?? []) : []}
onChange={(option) => {
onChange((option as SingleValue<TRenderEnvironmentGroup>)?.id ?? null);
setValue(
"destinationConfig.environmentGroupName",
(option as SingleValue<TRenderEnvironmentGroup>)?.name ?? ""
);
}}
options={groups}
placeholder="Select an environment group..."
getOptionLabel={(option) => option.name}
getOptionValue={(option) => option.id.toString()}
/>
</FormControl>
)}
/>
)}
</>
);
};

View File

@@ -4,6 +4,7 @@ import { GenericFieldLabel } from "@app/components/secret-syncs";
import { TSecretSyncForm } from "@app/components/secret-syncs/forms/schemas";
import { Badge } from "@app/components/v2";
import { SecretSync } from "@app/hooks/api/secretSyncs";
import { RenderSyncScope } from "@app/hooks/api/secretSyncs/types/render-sync";
export const RenderSyncOptionsReviewFields = () => {
const { watch } = useFormContext<TSecretSyncForm & { destination: SecretSync.Render }>();
@@ -27,13 +28,20 @@ export const RenderSyncOptionsReviewFields = () => {
export const RenderSyncReviewFields = () => {
const { watch } = useFormContext<TSecretSyncForm & { destination: SecretSync.Render }>();
const serviceName = watch("destinationConfig.serviceName");
const scope = watch("destinationConfig.scope");
const config = watch("destinationConfig");
return (
<>
<GenericFieldLabel label="Scope">{scope}</GenericFieldLabel>
<GenericFieldLabel label="Service">{serviceName}</GenericFieldLabel>
<GenericFieldLabel label="Scope">{config.scope}</GenericFieldLabel>
{config.scope === RenderSyncScope.Service ? (
<GenericFieldLabel label="Service">
{config.serviceName ?? config.serviceId}
</GenericFieldLabel>
) : (
<GenericFieldLabel label="Service">
{config.environmentGroupName ?? config.environmentGroupId}
</GenericFieldLabel>
)}
</>
);
};

View File

@@ -17,6 +17,12 @@ export const RenderSyncDestinationSchema = BaseSecretSyncSchema(
serviceId: z.string().trim().min(1, "Service is required"),
serviceName: z.string().trim().optional(),
type: z.nativeEnum(RenderSyncType)
}),
z.object({
scope: z.literal(RenderSyncScope.EnvironmentGroup),
environmentGroupId: z.string().trim().min(1, "Environment Group ID is required"),
environmentGroupName: z.string().trim().optional(),
type: z.nativeEnum(RenderSyncType)
})
])
})

View File

@@ -212,5 +212,9 @@ export const RENDER_SYNC_SCOPES: Record<RenderSyncScope, { name: string; descrip
[RenderSyncScope.Service]: {
name: "Service",
description: "Infisical will sync secrets to the specified Render service."
},
[RenderSyncScope.EnvironmentGroup]: {
name: "EnvironmentGroup",
description: "Infisical will sync secrets to the specified Render environment group."
}
};

View File

@@ -3,12 +3,14 @@ import { useQuery, UseQueryOptions } from "@tanstack/react-query";
import { apiRequest } from "@app/config/request";
import { appConnectionKeys } from "../queries";
import { TRenderService } from "./types";
import { TRenderEnvironmentGroup, TRenderService } from "./types";
const renderConnectionKeys = {
all: [...appConnectionKeys.all, "render"] as const,
listServices: (connectionId: string) =>
[...renderConnectionKeys.all, "services", connectionId] as const
[...renderConnectionKeys.all, "services", connectionId] as const,
listEnvironmentGroups: (connectionId: string) =>
[...renderConnectionKeys.all, "environment-groups", connectionId] as const
};
export const useRenderConnectionListServices = (
@@ -35,3 +37,28 @@ export const useRenderConnectionListServices = (
...options
});
};
export const useRenderConnectionListEnvironmentGroups = (
connectionId: string,
options?: Omit<
UseQueryOptions<
TRenderEnvironmentGroup[],
unknown,
TRenderEnvironmentGroup[],
ReturnType<typeof renderConnectionKeys.listEnvironmentGroups>
>,
"queryKey" | "queryFn"
>
) => {
return useQuery({
queryKey: renderConnectionKeys.listEnvironmentGroups(connectionId),
queryFn: async () => {
const { data } = await apiRequest.get<TRenderEnvironmentGroup[]>(
`/api/v1/app-connections/render/${connectionId}/environment-groups`
);
return data;
},
...options
});
};

View File

@@ -2,3 +2,8 @@ export type TRenderService = {
id: string;
name: string;
};
export type TRenderEnvironmentGroup = {
id: string;
name: string;
};

View File

@@ -104,6 +104,7 @@ export const useUpdateOrg = () => {
mutationFn: ({
name,
authEnforced,
googleSsoAuthEnforced,
scimEnabled,
slug,
orgId,
@@ -125,6 +126,7 @@ export const useUpdateOrg = () => {
return apiRequest.patch(`/api/v1/organization/${orgId}`, {
name,
authEnforced,
googleSsoAuthEnforced,
scimEnabled,
slug,
defaultMembershipRoleSlug,

View File

@@ -9,6 +9,7 @@ export type Organization = {
createAt: string;
updatedAt: string;
authEnforced: boolean;
googleSsoAuthEnforced: boolean;
bypassOrgAuthEnabled: boolean;
orgAuthMethod: string;
scimEnabled: boolean;
@@ -34,6 +35,7 @@ export type UpdateOrgDTO = {
orgId: string;
name?: string;
authEnforced?: boolean;
googleSsoAuthEnforced?: boolean;
scimEnabled?: boolean;
slug?: string;
defaultMembershipRoleSlug?: string;

View File

@@ -4,12 +4,19 @@ import { RootSyncOptions, TRootSecretSync } from "@app/hooks/api/secretSyncs/typ
export type TRenderSync = TRootSecretSync & {
destination: SecretSync.Render;
destinationConfig: {
scope: RenderSyncScope.Service;
type: RenderSyncType;
serviceId: string;
serviceName?: string;
};
destinationConfig:
| {
type: RenderSyncType;
scope: RenderSyncScope.Service;
serviceId: string;
serviceName?: string | undefined;
}
| {
type: RenderSyncType;
scope: RenderSyncScope.EnvironmentGroup;
environmentGroupId: string;
environmentGroupName?: string | undefined;
};
connection: {
app: AppConnection.Render;
@@ -23,7 +30,8 @@ export type TRenderSync = TRootSecretSync & {
};
export enum RenderSyncScope {
Service = "service"
Service = "service",
EnvironmentGroup = "environment-group"
}
export enum RenderSyncType {

View File

@@ -48,6 +48,7 @@ export type SubscriptionPlan = {
externalKms: boolean;
pkiEst: boolean;
enforceMfa: boolean;
enforceGoogleSSO: boolean;
projectTemplates: boolean;
kmip: boolean;
secretScanning: boolean;

View File

@@ -240,6 +240,13 @@ export const Navbar = () => {
return;
}
if (org.googleSsoAuthEnforced) {
await logout.mutateAsync();
window.open(`/api/v1/sso/redirect/google?org_slug=${org.slug}`);
window.close();
return;
}
handleOrgChange(org?.id);
}}
variant="plain"

View File

@@ -82,25 +82,40 @@ export const SelectOrganizationSection = () => {
}
}
if (organization.authEnforced && !canBypassOrgAuth) {
if ((organization.authEnforced || organization.googleSsoAuthEnforced) && !canBypassOrgAuth) {
const authToken = jwtDecode(getAuthToken()) as { authMethod: AuthMethod };
// org has an org-level auth method enabled (e.g. SAML)
// -> logout + redirect to SAML SSO
await logout.mutateAsync();
let url = "";
if (organization.orgAuthMethod === AuthMethod.OIDC) {
url = `/api/v1/sso/oidc/login?orgSlug=${organization.slug}${
callbackPort ? `&callbackPort=${callbackPort}` : ""
}`;
} else {
} else if (organization.orgAuthMethod === AuthMethod.SAML) {
url = `/api/v1/sso/redirect/saml2/organizations/${organization.slug}`;
if (callbackPort) {
url += `?callback_port=${callbackPort}`;
}
} else if (
organization.googleSsoAuthEnforced &&
authToken.authMethod !== AuthMethod.GOOGLE
) {
url = `/api/v1/sso/redirect/google?org_slug=${organization.slug}`;
if (callbackPort) {
url += `&callback_port=${callbackPort}`;
}
}
window.location.href = url;
return;
// we are conditionally checking if the url is set because it may not be set if google SSO is enforced, but the user is already logged in with google SSO
// see line 103-106
if (url) {
await logout.mutateAsync();
window.location.href = url;
return;
}
}
const { token, isMfaEnabled, mfaMethod } = await selectOrg

View File

@@ -13,8 +13,23 @@ import {
} from "@app/context";
import { useLogoutUser, useUpdateOrg } from "@app/hooks/api";
import { usePopUp } from "@app/hooks/usePopUp";
import { twMerge } from "tailwind-merge";
export const OrgGeneralAuthSection = () => {
enum EnforceAuthType {
SAML = "saml",
GOOGLE = "google",
OIDC = "oidc"
}
export const OrgGeneralAuthSection = ({
isSamlConfigured,
isOidcConfigured,
isGoogleConfigured
}: {
isSamlConfigured: boolean;
isOidcConfigured: boolean;
isGoogleConfigured: boolean;
}) => {
const { currentOrg } = useOrganization();
const { subscription } = useSubscription();
const { popUp, handlePopUpOpen, handlePopUpToggle } = usePopUp(["upgradePlan"] as const);
@@ -23,27 +38,61 @@ export const OrgGeneralAuthSection = () => {
const logout = useLogoutUser();
const handleEnforceOrgAuthToggle = async (value: boolean) => {
const handleEnforceOrgAuthToggle = async (value: boolean, type: EnforceAuthType) => {
try {
if (!currentOrg?.id) return;
if (!subscription?.samlSSO) {
handlePopUpOpen("upgradePlan");
return;
if (type === EnforceAuthType.SAML) {
if (!subscription?.samlSSO) {
handlePopUpOpen("upgradePlan");
return;
}
await mutateAsync({
orgId: currentOrg?.id,
authEnforced: value
});
} else if (type === EnforceAuthType.GOOGLE) {
if (!subscription?.enforceGoogleSSO) {
handlePopUpOpen("upgradePlan");
return;
}
await mutateAsync({
orgId: currentOrg?.id,
googleSsoAuthEnforced: value
});
} else if (type === EnforceAuthType.OIDC) {
if (!subscription?.oidcSSO) {
handlePopUpOpen("upgradePlan");
return;
}
await mutateAsync({
orgId: currentOrg?.id,
authEnforced: value
});
} else {
createNotification({
text: `Invalid auth enforcement type ${type}`,
type: "error"
});
}
await mutateAsync({
orgId: currentOrg?.id,
authEnforced: value
});
createNotification({
text: `Successfully ${value ? "enforced" : "un-enforced"} org-level auth`,
text: `Successfully ${value ? "enabled" : "disabled"} org-level auth`,
type: "success"
});
if (value) {
await logout.mutateAsync();
window.open(`/api/v1/sso/redirect/saml2/organizations/${currentOrg.slug}`);
if (type === EnforceAuthType.SAML) {
window.open(`/api/v1/sso/redirect/saml2/organizations/${currentOrg.slug}`);
} else if (type === EnforceAuthType.GOOGLE) {
window.open(`/api/v1/sso/redirect/google?org_slug=${currentOrg.slug}`);
}
window.close();
}
} catch (err) {
@@ -78,45 +127,91 @@ export const OrgGeneralAuthSection = () => {
};
return (
<>
{/* <div className="py-4">
<div className="mb-2 flex justify-between">
<h3 className="text-md text-mineshaft-100">Allow users to send invites</h3>
<OrgPermissionCan I={OrgPermissionActions.Edit} a={OrgPermissionSubjects.Sso}>
{(isAllowed) => (
<Switch
id="allow-org-invites"
onCheckedChange={(value) => handleEnforceOrgAuthToggle(value)}
isChecked={currentOrg?.authEnforced ?? false}
isDisabled={!isAllowed}
/>
)}
</OrgPermissionCan>
</div>
<p className="text-sm text-mineshaft-300">Allow members to invite new users to this organization</p>
</div> */}
<div className="py-4">
<div className="mb-2 flex justify-between">
<div className="flex items-center gap-1">
<span className="text-md text-mineshaft-100">Enforce SAML SSO</span>
</div>
<OrgPermissionCan I={OrgPermissionActions.Edit} a={OrgPermissionSubjects.Sso}>
{(isAllowed) => (
<Switch
id="enforce-org-auth"
onCheckedChange={(value) => handleEnforceOrgAuthToggle(value)}
isChecked={currentOrg?.authEnforced ?? false}
isDisabled={!isAllowed}
/>
)}
</OrgPermissionCan>
</div>
<p className="text-sm text-mineshaft-300">
Enforce users to authenticate via SAML to access this organization
<div className="rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-6">
<div>
<p className="text-xl font-semibold text-gray-200">SSO Enforcement</p>
<p className="mb-2 mt-1 text-gray-400">
Manage strict enforcement of specific authentication methods for your organization.
</p>
</div>
{currentOrg?.authEnforced && (
<div className="py-4">
<div className="flex flex-col gap-2 py-4">
<div className={twMerge("mt-4", !isSamlConfigured && "hidden")}>
<div className="mb-2 flex justify-between">
<div className="flex items-center gap-1">
<span className="text-md text-mineshaft-100">Enforce SAML SSO</span>
</div>
<OrgPermissionCan I={OrgPermissionActions.Edit} a={OrgPermissionSubjects.Sso}>
{(isAllowed) => (
<Switch
id="enforce-saml-auth"
onCheckedChange={(value) =>
handleEnforceOrgAuthToggle(value, EnforceAuthType.SAML)
}
isChecked={currentOrg?.authEnforced ?? false}
isDisabled={!isAllowed || currentOrg?.googleSsoAuthEnforced}
/>
)}
</OrgPermissionCan>
</div>
<p className="text-sm text-mineshaft-300">
Enforce users to authenticate via SAML to access this organization.
<br />
When this is enabled your organization members will only be able to login with SAML.
</p>
</div>
<div className={twMerge("mt-4", !isOidcConfigured && "hidden")}>
<div className="mb-2 flex justify-between">
<div className="flex items-center gap-1">
<span className="text-md text-mineshaft-100">Enforce OIDC SSO</span>
</div>
<OrgPermissionCan I={OrgPermissionActions.Edit} a={OrgPermissionSubjects.Sso}>
{(isAllowed) => (
<Switch
id="enforce-oidc-auth"
isChecked={currentOrg?.authEnforced ?? false}
onCheckedChange={(value) =>
handleEnforceOrgAuthToggle(value, EnforceAuthType.OIDC)
}
isDisabled={!isAllowed}
/>
)}
</OrgPermissionCan>
</div>
<p className="text-sm text-mineshaft-300">
Enforce users to authenticate via OIDC to access this organization.
<br />
When this is enabled your organization members will only be able to login with OIDC.
</p>
</div>
<div className={twMerge("mt-2", !isGoogleConfigured && "hidden")}>
<div className="mb-2 flex justify-between">
<div className="flex items-center gap-1">
<span className="text-md text-mineshaft-100">Enforce Google SSO</span>
</div>
<OrgPermissionCan I={OrgPermissionActions.Edit} a={OrgPermissionSubjects.Sso}>
{(isAllowed) => (
<Switch
id="enforce-google-sso"
onCheckedChange={(value) =>
handleEnforceOrgAuthToggle(value, EnforceAuthType.GOOGLE)
}
isChecked={currentOrg?.googleSsoAuthEnforced ?? false}
isDisabled={!isAllowed || currentOrg?.authEnforced}
/>
)}
</OrgPermissionCan>
</div>
<p className="text-sm text-mineshaft-300">
Enforce users to authenticate via Google to access this organization.
<br />
When this is enabled your organization members will only be able to login with Google.
</p>
</div>
</div>
{(currentOrg?.authEnforced || currentOrg?.googleSsoAuthEnforced) && (
<div className="mt-4 py-4">
<div className="mb-2 flex justify-between">
<div className="flex items-center gap-1">
<span className="text-md text-mineshaft-100">Enable Admin SSO Bypass</span>
@@ -125,8 +220,8 @@ export const OrgGeneralAuthSection = () => {
content={
<div>
<span>
When this is enabled, we strongly recommend enforcing MFA at the organization
level.
When enabling admin SSO bypass, we highly recommend enabling MFA enforcement
at the organization-level for security reasons.
</span>
<p className="mt-4">
In case of a lockout, admins can use the{" "}
@@ -182,6 +277,6 @@ export const OrgGeneralAuthSection = () => {
onOpenChange={(isOpen) => handlePopUpToggle("upgradePlan", isOpen)}
text="You can enforce SAML SSO if you switch to Infisical's Pro plan."
/>
</>
</div>
);
};

View File

@@ -95,43 +95,25 @@ export const OrgLDAPSection = (): JSX.Element => {
};
return (
<div className="mb-4 rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-6">
<div className="mb-4">
<div className="py-4">
<div className="mb-2 flex items-center justify-between">
<h2 className="text-md text-mineshaft-100">LDAP</h2>
<div className="flex">
<OrgPermissionCan I={OrgPermissionActions.Create} a={OrgPermissionSubjects.Ldap}>
{(isAllowed) => (
<Button onClick={addLDAPBtnClick} colorSchema="secondary" isDisabled={!isAllowed}>
Manage
</Button>
)}
</OrgPermissionCan>
<div className="mb-4 flex items-center justify-between">
<div>
<p className="text-xl font-semibold text-gray-200">LDAP</p>
<p className="mb-2 text-gray-400">Manage LDAP authentication configuration</p>
</div>
</div>
<p className="text-sm text-mineshaft-300">Manage LDAP authentication configuration</p>
</div>
<div className="py-4">
<div className="mb-2 flex items-center justify-between">
<h2 className="text-md text-mineshaft-100">LDAP Group Mappings</h2>
<OrgPermissionCan I={OrgPermissionActions.Create} a={OrgPermissionSubjects.Ldap}>
{(isAllowed) => (
<Button
onClick={openLDAPGroupMapModal}
colorSchema="secondary"
isDisabled={!isAllowed}
>
<Button onClick={addLDAPBtnClick} colorSchema="secondary" isDisabled={!isAllowed}>
Manage
</Button>
)}
</OrgPermissionCan>
</div>
<p className="text-sm text-mineshaft-300">
Manage how LDAP groups are mapped to internal groups in Infisical
</p>
</div>
{data && (
<div className="py-4">
<div className="pt-4">
<div className="mb-2 flex items-center justify-between">
<h2 className="text-md text-mineshaft-100">Enable LDAP</h2>
<OrgPermissionCan I={OrgPermissionActions.Edit} a={OrgPermissionSubjects.Ldap}>
@@ -152,6 +134,27 @@ export const OrgLDAPSection = (): JSX.Element => {
</p>
</div>
)}
<div className="py-4">
<div className="mb-2 flex items-center justify-between">
<h2 className="text-md text-mineshaft-100">LDAP Group Mappings</h2>
<OrgPermissionCan I={OrgPermissionActions.Create} a={OrgPermissionSubjects.Ldap}>
{(isAllowed) => (
<Button
onClick={openLDAPGroupMapModal}
colorSchema="secondary"
isDisabled={!isAllowed}
>
Configure
</Button>
)}
</OrgPermissionCan>
</div>
<p className="text-sm text-mineshaft-300">
Manage how LDAP groups are mapped to internal groups in Infisical
</p>
</div>
<LDAPModal
popUp={popUp}
handlePopUpClose={handlePopUpClose}

View File

@@ -11,7 +11,7 @@ import {
useOrganization,
useSubscription
} from "@app/context";
import { useGetOIDCConfig, useLogoutUser, useUpdateOrg } from "@app/hooks/api";
import { useGetOIDCConfig } from "@app/hooks/api";
import { useUpdateOIDCConfig } from "@app/hooks/api/oidcConfig/mutations";
import { usePopUp } from "@app/hooks/usePopUp";
@@ -23,9 +23,7 @@ export const OrgOIDCSection = (): JSX.Element => {
const { data, isPending } = useGetOIDCConfig(currentOrg?.id ?? "");
const { mutateAsync } = useUpdateOIDCConfig();
const { mutateAsync: updateOrg } = useUpdateOrg();
const logout = useLogoutUser();
const { popUp, handlePopUpOpen, handlePopUpClose, handlePopUpToggle } = usePopUp([
"addOIDC",
"upgradePlan"
@@ -54,56 +52,6 @@ export const OrgOIDCSection = (): JSX.Element => {
}
};
const handleEnforceOrgAuthToggle = async (value: boolean) => {
try {
if (!currentOrg?.id) return;
if (!subscription?.oidcSSO) {
handlePopUpOpen("upgradePlan");
return;
}
await updateOrg({
orgId: currentOrg?.id,
authEnforced: value
});
createNotification({
text: `Successfully ${value ? "enforced" : "un-enforced"} org-level auth`,
type: "success"
});
if (value) {
await logout.mutateAsync();
window.open(`/api/v1/sso/oidc/login?orgSlug=${currentOrg.slug}`);
window.close();
}
} catch (err) {
console.error(err);
}
};
const handleEnableBypassOrgAuthToggle = async (value: boolean) => {
try {
if (!currentOrg?.id) return;
if (!subscription?.oidcSSO) {
handlePopUpOpen("upgradePlan");
return;
}
await updateOrg({
orgId: currentOrg?.id,
bypassOrgAuthEnabled: value
});
createNotification({
text: `Successfully ${value ? "enabled" : "disabled"} admin bypassing of org-level auth`,
type: "success"
});
} catch (err) {
console.error(err);
}
};
const handleOIDCGroupManagement = async (value: boolean) => {
try {
if (!currentOrg?.id) return;
@@ -136,25 +84,22 @@ export const OrgOIDCSection = (): JSX.Element => {
};
return (
<div className="mb-4 rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-6">
<div className="py-4">
<div className="mb-2 flex items-center justify-between">
<h2 className="text-md text-mineshaft-100">OIDC</h2>
{!isPending && (
<OrgPermissionCan I={OrgPermissionActions.Create} a={OrgPermissionSubjects.Sso}>
{(isAllowed) => (
<Button
onClick={addOidcButtonClick}
colorSchema="secondary"
isDisabled={!isAllowed}
>
Manage
</Button>
)}
</OrgPermissionCan>
)}
<div className="mb-4 rounded-lg border-mineshaft-600 bg-mineshaft-900">
<div className="mb-4 flex items-center justify-between">
<div>
<p className="text-xl font-semibold text-gray-200">OIDC</p>
<p className="mb-2 text-gray-400">Manage OIDC authentication configuration</p>
</div>
<p className="text-sm text-mineshaft-300">Manage OIDC authentication configuration</p>
{!isPending && (
<OrgPermissionCan I={OrgPermissionActions.Create} a={OrgPermissionSubjects.Sso}>
{(isAllowed) => (
<Button onClick={addOidcButtonClick} colorSchema="secondary" isDisabled={!isAllowed}>
Manage
</Button>
)}
</OrgPermissionCan>
)}
</div>
{data && (
<div className="py-4">
@@ -178,88 +123,6 @@ export const OrgOIDCSection = (): JSX.Element => {
</p>
</div>
)}
<div className="py-4">
<div className="mb-2 flex justify-between">
<div className="flex items-center gap-1">
<span className="text-md text-mineshaft-100">Enforce OIDC SSO</span>
</div>
<OrgPermissionCan I={OrgPermissionActions.Edit} a={OrgPermissionSubjects.Sso}>
{(isAllowed) => (
<Switch
id="enforce-org-auth"
isChecked={currentOrg?.authEnforced ?? false}
onCheckedChange={(value) => handleEnforceOrgAuthToggle(value)}
isDisabled={!isAllowed}
/>
)}
</OrgPermissionCan>
</div>
<p className="text-sm text-mineshaft-300">
<span>Enforce users to authenticate via OIDC to access this organization.</span>
</p>
</div>
{currentOrg?.authEnforced && (
<div className="py-4">
<div className="mb-2 flex justify-between">
<div className="flex items-center gap-1">
<span className="text-md text-mineshaft-100">Enable Admin SSO Bypass</span>
<Tooltip
className="max-w-lg"
content={
<div>
<span>
When this is enabled, we strongly recommend enforcing MFA at the organization
level.
</span>
<p className="mt-4">
In case of a lockout, admins can use the{" "}
<a
target="_blank"
className="underline underline-offset-2 hover:text-mineshaft-300"
href="https://infisical.com/docs/documentation/platform/sso/overview#admin-login-portal"
rel="noreferrer"
>
Admin Login Portal
</a>{" "}
at{" "}
<a
target="_blank"
rel="noopener noreferrer"
className="underline underline-offset-2 hover:text-mineshaft-300"
href={`${window.location.origin}/login/admin`}
>
{window.location.origin}/login/admin
</a>
</p>
</div>
}
>
<FontAwesomeIcon
icon={faInfoCircle}
size="sm"
className="mt-0.5 inline-block text-mineshaft-400"
/>
</Tooltip>
</div>
<OrgPermissionCan I={OrgPermissionActions.Edit} a={OrgPermissionSubjects.Sso}>
{(isAllowed) => (
<Switch
id="allow-admin-bypass"
isChecked={currentOrg?.bypassOrgAuthEnabled ?? false}
onCheckedChange={(value) => handleEnableBypassOrgAuthToggle(value)}
isDisabled={!isAllowed}
/>
)}
</OrgPermissionCan>
</div>
<p className="text-sm text-mineshaft-300">
<span>
Allow organization admins to bypass OIDC enforcement when SSO is unavailable,
misconfigured, or inaccessible.
</span>
</p>
</div>
)}
<div className="py-4">
<div className="mb-2 flex justify-between">
<div className="text-md flex items-center text-mineshaft-100">

View File

@@ -79,25 +79,24 @@ export const OrgSSOSection = (): JSX.Element => {
};
return (
<>
<hr className="border-mineshaft-600" />
<div className="py-4">
<div className="mb-2 flex items-center justify-between">
<h2 className="text-md text-mineshaft-100">SAML</h2>
{!isPending && (
<OrgPermissionCan I={OrgPermissionActions.Create} a={OrgPermissionSubjects.Sso}>
{(isAllowed) => (
<Button onClick={addSSOBtnClick} colorSchema="secondary" isDisabled={!isAllowed}>
Manage
</Button>
)}
</OrgPermissionCan>
)}
<div className="space-y-4">
<div className="mb-4 flex items-center justify-between">
<div>
<p className="text-xl font-semibold text-gray-200">SAML</p>
<p className="mb-2 text-gray-400">Manage SAML authentication configuration</p>
</div>
<p className="text-sm text-mineshaft-300">Manage SAML authentication configuration</p>
{!isPending && (
<OrgPermissionCan I={OrgPermissionActions.Create} a={OrgPermissionSubjects.Sso}>
{(isAllowed) => (
<Button onClick={addSSOBtnClick} colorSchema="secondary" isDisabled={!isAllowed}>
Manage
</Button>
)}
</OrgPermissionCan>
)}
</div>
<div className="py-4">
<div className="mb-2 flex items-center justify-between">
<div>
<div className="mb-2 flex items-center justify-between pt-4">
<h2 className="text-md text-mineshaft-100">Enable SAML</h2>
{!isPending && (
<OrgPermissionCan I={OrgPermissionActions.Edit} a={OrgPermissionSubjects.Sso}>
@@ -126,6 +125,6 @@ export const OrgSSOSection = (): JSX.Element => {
onOpenChange={(isOpen) => handlePopUpToggle("upgradePlan", isOpen)}
text="You can use SAML SSO if you switch to Infisical's Pro plan."
/>
</>
</div>
);
};

View File

@@ -49,13 +49,19 @@ export const OrgSsoTab = withPermission(
);
const areConfigsLoading = isLoadingOidcConfig || isLoadingSamlConfig || isLoadingLdapConfig;
const shouldDisplaySection = (method: LoginMethod) =>
!enabledLoginMethods || enabledLoginMethods.includes(method);
const shouldDisplaySection = (method: LoginMethod[] | LoginMethod) => {
if (Array.isArray(method)) {
return method.some((m) => !enabledLoginMethods || enabledLoginMethods.includes(m));
}
const isOidcConfigured = oidcConfig && (oidcConfig.discoveryURL || oidcConfig.issuer);
return !enabledLoginMethods || enabledLoginMethods.includes(method);
};
const isOidcConfigured = Boolean(oidcConfig && (oidcConfig.discoveryURL || oidcConfig.issuer));
const isSamlConfigured =
samlConfig && (samlConfig.entryPoint || samlConfig.issuer || samlConfig.cert);
const isLdapConfigured = ldapConfig && ldapConfig.url;
const isGoogleConfigured = shouldDisplaySection(LoginMethod.GOOGLE);
const shouldShowCreateIdentityProviderView =
!isOidcConfigured && !isSamlConfigured && !isLdapConfigured;
@@ -65,11 +71,14 @@ export const OrgSsoTab = withPermission(
shouldDisplaySection(LoginMethod.OIDC) ||
shouldDisplaySection(LoginMethod.LDAP) ? (
<>
<div className="mb-6 rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-6">
<p className="text-xl font-semibold text-gray-200">Connect an Identity Provider</p>
<p className="mb-2 mt-1 text-gray-400">
Connect your identity provider to simplify user management
</p>
<div className="mb-4 space-y-6 rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-6">
<div>
<p className="text-xl font-semibold text-gray-200">Connect an Identity Provider</p>
<p className="mb-2 mt-1 text-gray-400">
Connect your identity provider to simplify user management with options like SAML,
OIDC, and LDAP.
</p>
</div>
{shouldDisplaySection(LoginMethod.SAML) && (
<div
className={twMerge(
@@ -169,20 +178,27 @@ export const OrgSsoTab = withPermission(
return (
<>
{shouldShowCreateIdentityProviderView ? (
createIdentityProviderView
) : (
<>
{isSamlConfigured && shouldDisplaySection(LoginMethod.SAML) && (
<div className="mb-4 rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-6">
<OrgGeneralAuthSection />
<OrgSSOSection />
<div className="space-y-4">
{shouldDisplaySection([LoginMethod.SAML, LoginMethod.GOOGLE]) && (
<OrgGeneralAuthSection
isSamlConfigured={isSamlConfigured}
isOidcConfigured={isOidcConfigured}
isGoogleConfigured={isGoogleConfigured}
/>
)}
{shouldShowCreateIdentityProviderView ? (
createIdentityProviderView
) : (
<div className="mb-4 space-y-6 rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-6">
<div>
{isSamlConfigured && shouldDisplaySection(LoginMethod.SAML) && <OrgSSOSection />}
{isOidcConfigured && shouldDisplaySection(LoginMethod.OIDC) && <OrgOIDCSection />}
{isLdapConfigured && shouldDisplaySection(LoginMethod.LDAP) && <OrgLDAPSection />}
</div>
)}
{isOidcConfigured && shouldDisplaySection(LoginMethod.OIDC) && <OrgOIDCSection />}
{isLdapConfigured && shouldDisplaySection(LoginMethod.LDAP) && <OrgLDAPSection />}
</>
)}
</div>
)}
</div>
<UpgradePlanModal
isOpen={popUp.upgradePlan.isOpen}
onOpenChange={(isOpen) => handlePopUpToggle("upgradePlan", isOpen)}

View File

@@ -1,5 +1,8 @@
import { useRenderConnectionListServices } from "@app/hooks/api/appConnections/render";
import { TRenderSync } from "@app/hooks/api/secretSyncs/types/render-sync";
import {
useRenderConnectionListEnvironmentGroups,
useRenderConnectionListServices
} from "@app/hooks/api/appConnections/render";
import { RenderSyncScope, TRenderSync } from "@app/hooks/api/secretSyncs/types/render-sync";
import { getSecretSyncDestinationColValues } from "../helpers";
import { SecretSyncTableCell } from "../SecretSyncTableCell";
@@ -9,21 +12,59 @@ type Props = {
};
export const RenderSyncDestinationCol = ({ secretSync }: Props) => {
const isServiceScope = secretSync.destinationConfig.scope === RenderSyncScope.Service;
const { data: services = [], isPending } = useRenderConnectionListServices(
secretSync.connectionId
secretSync.connectionId,
{
enabled: isServiceScope
}
);
const { primaryText, secondaryText } = getSecretSyncDestinationColValues({
...secretSync,
destinationConfig: {
...secretSync.destinationConfig,
serviceName: services.find((s) => s.id === secretSync.destinationConfig.serviceId)?.name
const { data: groups = [], isPending: isGroupsPending } =
useRenderConnectionListEnvironmentGroups(secretSync.connectionId, { enabled: !isServiceScope });
switch (secretSync.destinationConfig.scope) {
case RenderSyncScope.Service: {
const id = secretSync.destinationConfig.serviceId;
const { primaryText, secondaryText } = getSecretSyncDestinationColValues({
...secretSync,
destinationConfig: {
...secretSync.destinationConfig,
serviceName: services.find((s) => s.id === id)?.name
}
});
if (isPending) {
return (
<SecretSyncTableCell primaryText="Loading service info..." secondaryText="Service" />
);
}
return <SecretSyncTableCell primaryText={primaryText} secondaryText={secondaryText} />;
}
});
case RenderSyncScope.EnvironmentGroup: {
const id = secretSync.destinationConfig.environmentGroupId;
const { primaryText, secondaryText } = getSecretSyncDestinationColValues({
...secretSync,
destinationConfig: {
...secretSync.destinationConfig,
environmentGroupName: groups.find((s) => s.id === id)?.name
}
});
if (isPending) {
return <SecretSyncTableCell primaryText="Loading service info..." secondaryText="Service" />;
if (isGroupsPending) {
return (
<SecretSyncTableCell
primaryText="Loading environment group info..."
secondaryText="Environment Group"
/>
);
}
return <SecretSyncTableCell primaryText={primaryText} secondaryText={secondaryText} />;
}
default:
throw new Error("Unknown render sync destination scope");
}
return <SecretSyncTableCell primaryText={primaryText} secondaryText={secondaryText} />;
};

View File

@@ -8,6 +8,7 @@ import {
} from "@app/hooks/api/secretSyncs/types/github-sync";
import { GitLabSyncScope } from "@app/hooks/api/secretSyncs/types/gitlab-sync";
import { HumanitecSyncScope } from "@app/hooks/api/secretSyncs/types/humanitec-sync";
import { RenderSyncScope } from "@app/hooks/api/secretSyncs/types/render-sync";
// This functional ensures parity across what is displayed in the destination column
// and the values used when search filtering
@@ -125,8 +126,15 @@ export const getSecretSyncDestinationColValues = (secretSync: TSecretSync) => {
secondaryText = destinationConfig.app;
break;
case SecretSync.Render:
primaryText = destinationConfig.serviceName ?? destinationConfig.serviceId;
secondaryText = "Service";
if (destinationConfig.scope === RenderSyncScope.Service) {
primaryText = destinationConfig.serviceName ?? destinationConfig.serviceId;
secondaryText = "Service";
} else {
primaryText =
destinationConfig.environmentGroupName ?? destinationConfig.environmentGroupId;
secondaryText = "Environment Group";
}
break;
case SecretSync.Flyio:
primaryText = destinationConfig.appId;

View File

@@ -8,6 +8,7 @@ import {
faSave
} from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { AnimatePresence, motion } from "framer-motion";
import { Badge, Button, Input, Modal, ModalContent } from "@app/components/v2";
import { PendingAction } from "@app/hooks/api/secretFolders/types";
@@ -302,47 +303,61 @@ export const CommitForm: React.FC<CommitFormProps> = ({
<>
{/* Floating Panel */}
{!isModalOpen && (
<div className="fixed bottom-4 left-1/2 z-40 w-full max-w-3xl -translate-x-1/2 self-center rounded-lg border border-yellow/30 bg-mineshaft-800 shadow-2xl lg:left-auto lg:translate-x-0">
<div className="flex items-center justify-between p-4">
{/* Left Content */}
<div className="flex-1">
{/* Header */}
<div className="flex items-center gap-2">
<div className="h-2 w-2 rounded-full bg-yellow-500" />
<span className="font-medium text-mineshaft-100">Pending Changes</span>
<Badge variant="primary" className="text-xs">
{totalChangesCount} Change{totalChangesCount !== 1 ? "s" : ""}
</Badge>
<div className="fixed bottom-4 left-1/2 z-40 w-full max-w-3xl -translate-x-1/2 self-center lg:left-auto lg:translate-x-0">
<AnimatePresence mode="wait">
<motion.div
key="commit-panel"
transition={{ duration: 0.3 }}
initial={{ opacity: 0, translateY: 30 }}
animate={{ opacity: 1, translateY: 0 }}
exit={{ opacity: 0, translateY: -30 }}
>
<div className="rounded-lg border border-yellow/30 bg-mineshaft-800 shadow-2xl">
<div className="flex items-center justify-between p-4">
{/* Left Content */}
<div className="flex-1">
{/* Header */}
<div className="flex items-center gap-2">
<div className="h-2 w-2 rounded-full bg-yellow-500" />
<span className="font-medium text-mineshaft-100">Pending Changes</span>
<Badge variant="primary" className="text-xs">
{totalChangesCount} Change{totalChangesCount !== 1 ? "s" : ""}
</Badge>
</div>
{/* Description */}
<p className="text-sm leading-5 text-mineshaft-400">
Review pending changes and commit them to apply the updates.
</p>
</div>
{/* Right Buttons */}
<div className="ml-6 mt-0.5 flex items-center gap-3">
<Button
size="sm"
onClick={() =>
clearAllPendingChanges({ workspaceId, environment, secretPath })
}
isDisabled={totalChangesCount === 0}
variant="outline_bg"
className="px-4 hover:border-red/40 hover:bg-red/[0.1]"
>
Discard
</Button>
<Button
variant="solid"
leftIcon={<FontAwesomeIcon icon={faSave} />}
onClick={() => setIsModalOpen(true)}
isDisabled={totalChangesCount === 0}
className="px-6"
>
Save Changes
</Button>
</div>
</div>
</div>
{/* Description */}
<p className="text-sm leading-5 text-mineshaft-400">
Review pending changes and commit them to apply the updates.
</p>
</div>
{/* Right Buttons */}
<div className="ml-6 mt-0.5 flex items-center gap-3">
<Button
size="sm"
onClick={() => clearAllPendingChanges({ workspaceId, environment, secretPath })}
isDisabled={totalChangesCount === 0}
variant="outline_bg"
className="px-4 hover:border-red/40 hover:bg-red/[0.1]"
>
Discard
</Button>
<Button
variant="solid"
leftIcon={<FontAwesomeIcon icon={faSave} />}
onClick={() => setIsModalOpen(true)}
isDisabled={totalChangesCount === 0}
className="px-6"
>
Save Changes
</Button>
</div>
</div>
</motion.div>
</AnimatePresence>
</div>
)}

View File

@@ -1,23 +1,50 @@
import { GenericFieldLabel } from "@app/components/secret-syncs";
import { useRenderConnectionListServices } from "@app/hooks/api/appConnections/render";
import { TRenderSync } from "@app/hooks/api/secretSyncs/types/render-sync";
import {
useRenderConnectionListEnvironmentGroups,
useRenderConnectionListServices
} from "@app/hooks/api/appConnections/render";
import { RenderSyncScope, TRenderSync } from "@app/hooks/api/secretSyncs/types/render-sync";
type Props = {
secretSync: TRenderSync;
};
export const RenderSyncDestinationSection = ({ secretSync }: Props) => {
const isServiceScope = secretSync.destinationConfig.scope === RenderSyncScope.Service;
const { data: services = [], isPending } = useRenderConnectionListServices(
secretSync.connectionId
secretSync.connectionId,
{
enabled: isServiceScope
}
);
const {
destinationConfig: { serviceId }
} = secretSync;
if (isPending) {
return <GenericFieldLabel label="Service">Loading...</GenericFieldLabel>;
const { data: groups = [], isPending: isGroupsPending } =
useRenderConnectionListEnvironmentGroups(secretSync.connectionId, { enabled: !isServiceScope });
switch (secretSync.destinationConfig.scope) {
case RenderSyncScope.Service: {
const id = secretSync.destinationConfig.serviceId;
if (isPending) {
return <GenericFieldLabel label="Service">Loading...</GenericFieldLabel>;
}
const serviceName = services.find((service) => service.id === id)?.name;
return <GenericFieldLabel label="Service">{serviceName ?? id}</GenericFieldLabel>;
}
case RenderSyncScope.EnvironmentGroup: {
const id = secretSync.destinationConfig.environmentGroupId;
if (isGroupsPending) {
return <GenericFieldLabel label="Environment Group">Loading...</GenericFieldLabel>;
}
const envName = groups.find((g) => g.id === id)?.name;
return <GenericFieldLabel label="Environment Group">{envName ?? id}</GenericFieldLabel>;
}
default:
throw new Error("Unknown render sync destination scope");
}
const serviceName = services.find((service) => service.id === serviceId)?.name;
return <GenericFieldLabel label="Service">{serviceName ?? serviceId}</GenericFieldLabel>;
};

View File

@@ -13,9 +13,9 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: v0.10.1
version: v0.10.2
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "v0.10.1"
appVersion: "v0.10.2"

View File

@@ -316,6 +316,8 @@ spec:
hostAPI:
description: Infisical host to pull secrets from
type: string
instantUpdates:
type: boolean
managedKubeConfigMapReferences:
items:
properties:

View File

@@ -12,7 +12,7 @@ controllerManager:
readOnlyRootFilesystem: true
image:
repository: infisical/kubernetes-operator
tag: v0.10.1
tag: v0.10.2
resources:
limits:
cpu: 500m

View File

@@ -160,6 +160,9 @@ type InfisicalSecretSpec struct {
// +kubebuilder:validation:Optional
TLS TLSConfig `json:"tls"`
// +kubebuilder:validation:Optional
InstantUpdates bool `json:"instantUpdates"`
}
// InfisicalSecretStatus defines the observed state of InfisicalSecret

View File

@@ -314,6 +314,8 @@ spec:
hostAPI:
description: Infisical host to pull secrets from
type: string
instantUpdates:
type: boolean
managedKubeConfigMapReferences:
items:
properties:

View File

@@ -9,6 +9,7 @@ metadata:
spec:
hostAPI: http://localhost:8080/api
resyncInterval: 10
instantUpdates: false
# tls:
# caRef:
# secretName: custom-ca-certificate

View File

@@ -29,4 +29,4 @@ spec:
secretName: managed-secret-k8s
secretNamespace: default
creationPolicy: "Orphan" ## Owner | Orphan
# secretType: kubernetes.io/dockerconfigjson
# secretType: kubernetes.io/dockerconfigjson

View File

@@ -1,7 +1,7 @@
apiVersion: v1
kind: Secret
metadata:
name: service-token
type: Opaque
data:
infisicalToken: <base64 infisical token here>
# apiVersion: v1
# kind: Secret
# metadata:
# name: service-token
# type: Opaque
# data:
# infisicalToken: <base64 infisical token here>

View File

@@ -4,5 +4,5 @@ metadata:
name: universal-auth-credentials
type: Opaque
stringData:
clientId: da81e27e-1885-47d9-9ea3-ec7d4d807bb6
clientSecret: 2772414d440fe04d8b975f5fe25acd0fbfe71b2a4a420409eb9ac6f5ae6c1e98
clientId: your-client-id-here
clientSecret: your-client-secret-here

View File

@@ -1,8 +1,11 @@
package api
import (
"encoding/json"
"fmt"
"net/http"
"github.com/Infisical/infisical/k8-operator/internal/model"
"github.com/go-resty/resty/v2"
)
@@ -146,3 +149,85 @@ func CallGetProjectByID(httpClient *resty.Client, request GetProjectByIDRequest)
return projectResponse, nil
}
func CallGetProjectByIDv2(httpClient *resty.Client, request GetProjectByIDRequest) (model.Project, error) {
var projectResponse model.Project
response, err := httpClient.
R().SetResult(&projectResponse).
SetHeader("User-Agent", USER_AGENT_NAME).
Get(fmt.Sprintf("%s/v2/workspace/%s", API_HOST_URL, request.ProjectID))
if err != nil {
return model.Project{}, fmt.Errorf("CallGetProject: Unable to complete api request [err=%s]", err)
}
if response.IsError() {
return model.Project{}, fmt.Errorf("CallGetProject: Unsuccessful response: [response=%s]", response)
}
return projectResponse, nil
}
func CallSubscribeProjectEvents(httpClient *resty.Client, projectId, secretsPath, envSlug, token string) (*http.Response, error) {
conditions := &SubscribeProjectEventsRequestCondition{
SecretPath: secretsPath,
EnvironmentSlug: envSlug,
}
body, err := json.Marshal(&SubscribeProjectEventsRequest{
ProjectID: projectId,
Register: []SubscribeProjectEventsRequestRegister{
{
Event: "secret:create",
Conditions: conditions,
},
{
Event: "secret:update",
Conditions: conditions,
},
{
Event: "secret:delete",
Conditions: conditions,
},
{
Event: "secret:import-mutation",
Conditions: conditions,
},
},
})
if err != nil {
return nil, fmt.Errorf("CallSubscribeProjectEvents: Unable to marshal body [err=%s]", err)
}
response, err := httpClient.
R().
SetDoNotParseResponse(true).
SetHeader("User-Agent", USER_AGENT_NAME).
SetHeader("Content-Type", "application/json").
SetHeader("Accept", "text/event-stream").
SetHeader("Connection", "keep-alive").
SetHeader("Authorization", fmt.Sprint("Bearer ", token)).
SetBody(body).
Post(fmt.Sprintf("%s/v1/events/subscribe/project-events", API_HOST_URL))
if err != nil {
return nil, fmt.Errorf("CallSubscribeProjectEvents: Unable to complete api request [err=%s]", err)
}
if response.IsError() {
data := struct {
Message string `json:"message"`
}{}
if err := json.NewDecoder(response.RawBody()).Decode(&data); err != nil {
return nil, err
}
return nil, fmt.Errorf("CallSubscribeProjectEvents: Unsuccessful response: [message=%s]", data.Message)
}
return response.RawResponse, nil
}

View File

@@ -206,3 +206,20 @@ type GetProjectByIDRequest struct {
type GetProjectByIDResponse struct {
Project model.Project `json:"workspace"`
}
type SubscribeProjectEventsRequestRegister struct {
Event string `json:"event"`
Conditions *SubscribeProjectEventsRequestCondition `json:"conditions"`
}
type SubscribeProjectEventsRequestCondition struct {
EnvironmentSlug string `json:"environmentSlug"`
SecretPath string `json:"secretPath"`
}
type SubscribeProjectEventsRequest struct {
ProjectID string `json:"projectId"`
Register []SubscribeProjectEventsRequestRegister `json:"register"`
}
type SubscribeProjectEventsResponse struct{}

View File

@@ -231,7 +231,6 @@ func (r *InfisicalPushSecretReconciler) Reconcile(ctx context.Context, req ctrl.
}
func (r *InfisicalPushSecretReconciler) SetupWithManager(mgr ctrl.Manager) error {
// Custom predicate that allows both spec changes and deletions
specChangeOrDelete := predicate.Funcs{
UpdateFunc: func(e event.UpdateEvent) bool {

View File

@@ -31,6 +31,7 @@ import (
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/event"
"sigs.k8s.io/controller-runtime/pkg/predicate"
"sigs.k8s.io/controller-runtime/pkg/source"
secretsv1alpha1 "github.com/Infisical/infisical/k8-operator/api/v1alpha1"
"github.com/Infisical/infisical/k8-operator/internal/controllerhelpers"
@@ -41,8 +42,10 @@ import (
// InfisicalSecretReconciler reconciles a InfisicalSecret object
type InfisicalSecretReconciler struct {
client.Client
BaseLogger logr.Logger
Scheme *runtime.Scheme
BaseLogger logr.Logger
Scheme *runtime.Scheme
SourceCh chan event.TypedGenericEvent[client.Object]
Namespace string
IsNamespaceScoped bool
}
@@ -74,7 +77,6 @@ func (r *InfisicalSecretReconciler) GetLogger(req ctrl.Request) logr.Logger {
// For more details, check Reconcile and its Result here:
// - https://pkg.go.dev/sigs.k8s.io/controller-runtime@v0.21.0/pkg/reconcile
func (r *InfisicalSecretReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) {
logger := r.GetLogger(req)
var infisicalSecretCRD secretsv1alpha1.InfisicalSecret
@@ -196,6 +198,20 @@ func (r *InfisicalSecretReconciler) Reconcile(ctx context.Context, req ctrl.Requ
}, nil
}
if infisicalSecretCRD.Spec.InstantUpdates {
if err := handler.OpenInstantUpdatesStream(ctx, logger, &infisicalSecretCRD, infisicalSecretResourceVariablesMap, r.SourceCh); err != nil {
requeueTime = time.Second * 10
logger.Info(fmt.Sprintf("event stream failed. Will requeue after [requeueTime=%v] [error=%s]", requeueTime, err.Error()))
return ctrl.Result{
RequeueAfter: requeueTime,
}, nil
}
logger.Info("Instant updates are enabled")
} else {
handler.CloseInstantUpdatesStream(ctx, logger, &infisicalSecretCRD, infisicalSecretResourceVariablesMap)
}
// Sync again after the specified time
logger.Info(fmt.Sprintf("Successfully synced %d secrets. Operator will requeue after [%v]", secretsCount, requeueTime))
return ctrl.Result{
@@ -204,7 +220,12 @@ func (r *InfisicalSecretReconciler) Reconcile(ctx context.Context, req ctrl.Requ
}
func (r *InfisicalSecretReconciler) SetupWithManager(mgr ctrl.Manager) error {
r.SourceCh = make(chan event.TypedGenericEvent[client.Object])
return ctrl.NewControllerManagedBy(mgr).
WatchesRawSource(
source.Channel[client.Object](r.SourceCh, &util.EnqueueDelayedEventHandler{Delay: time.Second * 10}),
).
For(&secretsv1alpha1.InfisicalSecret{}, builder.WithPredicates(predicate.Funcs{
UpdateFunc: func(e event.UpdateEvent) bool {
if e.ObjectOld.GetGeneration() == e.ObjectNew.GetGeneration() {
@@ -230,4 +251,5 @@ func (r *InfisicalSecretReconciler) SetupWithManager(mgr ctrl.Manager) error {
},
})).
Complete(r)
}

View File

@@ -7,6 +7,7 @@ import (
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/event"
"github.com/Infisical/infisical/k8-operator/api/v1alpha1"
"github.com/Infisical/infisical/k8-operator/internal/api"
@@ -100,3 +101,22 @@ func (h *InfisicalSecretHandler) SetInfisicalAutoRedeploymentReady(ctx context.C
}
reconciler.SetInfisicalAutoRedeploymentReady(ctx, logger, infisicalSecret, numDeployments, errorToConditionOn)
}
func (h *InfisicalSecretHandler) CloseInstantUpdatesStream(ctx context.Context, logger logr.Logger, infisicalSecret *v1alpha1.InfisicalSecret, resourceVariablesMap map[string]util.ResourceVariables) error {
reconciler := &InfisicalSecretReconciler{
Client: h.Client,
Scheme: h.Scheme,
IsNamespaceScoped: h.IsNamespaceScoped,
}
return reconciler.CloseInstantUpdatesStream(ctx, logger, infisicalSecret, resourceVariablesMap)
}
// Ensures that SSE stream is open, incase if the stream is already opened - this is a noop
func (h *InfisicalSecretHandler) OpenInstantUpdatesStream(ctx context.Context, logger logr.Logger, infisicalSecret *v1alpha1.InfisicalSecret, resourceVariablesMap map[string]util.ResourceVariables, eventCh chan<- event.TypedGenericEvent[client.Object]) error {
reconciler := &InfisicalSecretReconciler{
Client: h.Client,
Scheme: h.Scheme,
IsNamespaceScoped: h.IsNamespaceScoped,
}
return reconciler.OpenInstantUpdatesStream(ctx, logger, infisicalSecret, resourceVariablesMap, eventCh)
}

View File

@@ -5,6 +5,7 @@ import (
"context"
"errors"
"fmt"
"net/http"
"strings"
tpl "text/template"
@@ -15,11 +16,14 @@ import (
"github.com/Infisical/infisical/k8-operator/internal/model"
"github.com/Infisical/infisical/k8-operator/internal/template"
"github.com/Infisical/infisical/k8-operator/internal/util"
"github.com/Infisical/infisical/k8-operator/internal/util/sse"
"github.com/go-logr/logr"
"github.com/go-resty/resty/v2"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/event"
infisicalSdk "github.com/infisical/go-sdk"
corev1 "k8s.io/api/core/v1"
@@ -409,9 +413,10 @@ func (r *InfisicalSecretReconciler) getResourceVariables(infisicalSecret v1alpha
})
resourceVariablesMap[string(infisicalSecret.UID)] = util.ResourceVariables{
InfisicalClient: client,
CancelCtx: cancel,
AuthDetails: util.AuthenticationDetails{},
InfisicalClient: client,
CancelCtx: cancel,
AuthDetails: util.AuthenticationDetails{},
ServerSentEvents: sse.NewConnectionRegistry(ctx),
}
resourceVariables = resourceVariablesMap[string(infisicalSecret.UID)]
@@ -421,7 +426,6 @@ func (r *InfisicalSecretReconciler) getResourceVariables(infisicalSecret v1alpha
}
return resourceVariables
}
func (r *InfisicalSecretReconciler) updateResourceVariables(infisicalSecret v1alpha1.InfisicalSecret, resourceVariables util.ResourceVariables, resourceVariablesMap map[string]util.ResourceVariables) {
@@ -454,9 +458,10 @@ func (r *InfisicalSecretReconciler) ReconcileInfisicalSecret(ctx context.Context
}
r.updateResourceVariables(*infisicalSecret, util.ResourceVariables{
InfisicalClient: infisicalClient,
CancelCtx: cancelCtx,
AuthDetails: authDetails,
InfisicalClient: infisicalClient,
CancelCtx: cancelCtx,
AuthDetails: authDetails,
ServerSentEvents: sse.NewConnectionRegistry(ctx),
}, resourceVariablesMap)
}
@@ -525,3 +530,94 @@ func (r *InfisicalSecretReconciler) ReconcileInfisicalSecret(ctx context.Context
return secretsCount, nil
}
func (r *InfisicalSecretReconciler) CloseInstantUpdatesStream(ctx context.Context, logger logr.Logger, infisicalSecret *v1alpha1.InfisicalSecret, resourceVariablesMap map[string]util.ResourceVariables) error {
if infisicalSecret == nil {
return fmt.Errorf("infisicalSecret is nil")
}
variables := r.getResourceVariables(*infisicalSecret, resourceVariablesMap)
if !variables.AuthDetails.IsMachineIdentityAuth {
return fmt.Errorf("only machine identity is supported for subscriptions")
}
conn := variables.ServerSentEvents
if _, ok := conn.Get(); ok {
conn.Close()
}
return nil
}
func (r *InfisicalSecretReconciler) OpenInstantUpdatesStream(ctx context.Context, logger logr.Logger, infisicalSecret *v1alpha1.InfisicalSecret, resourceVariablesMap map[string]util.ResourceVariables, eventCh chan<- event.TypedGenericEvent[client.Object]) error {
if infisicalSecret == nil {
return fmt.Errorf("infisicalSecret is nil")
}
variables := r.getResourceVariables(*infisicalSecret, resourceVariablesMap)
if !variables.AuthDetails.IsMachineIdentityAuth {
return fmt.Errorf("only machine identity is supported for subscriptions")
}
projectSlug := variables.AuthDetails.MachineIdentityScope.ProjectSlug
secretsPath := variables.AuthDetails.MachineIdentityScope.SecretsPath
envSlug := variables.AuthDetails.MachineIdentityScope.EnvSlug
infiscalClient := variables.InfisicalClient
sseRegistry := variables.ServerSentEvents
token := infiscalClient.Auth().GetAccessToken()
project, err := util.GetProjectBySlug(token, projectSlug)
if err != nil {
return fmt.Errorf("failed to get project [err=%s]", err)
}
if variables.AuthDetails.MachineIdentityScope.Recursive {
secretsPath = fmt.Sprint(secretsPath, "**")
}
if err != nil {
return fmt.Errorf("CallSubscribeProjectEvents: unable to marshal body [err=%s]", err)
}
events, errors, err := sseRegistry.Subscribe(func() (*http.Response, error) {
httpClient := resty.New()
req, err := api.CallSubscribeProjectEvents(httpClient, project.ID, secretsPath, envSlug, token)
if err != nil {
return nil, err
}
return req, nil
})
if err != nil {
return fmt.Errorf("unable to connect sse [err=%s]", err)
}
go func() {
outer:
for {
select {
case ev := <-events:
logger.Info("Received SSE Event", "event", ev)
eventCh <- event.TypedGenericEvent[client.Object]{
Object: infisicalSecret,
}
case err := <-errors:
logger.Error(err, "Error occurred")
break outer
case <-ctx.Done():
break outer
}
}
}()
return nil
}

View File

@@ -0,0 +1,59 @@
package util
import (
"context"
"math/rand"
"time"
"k8s.io/apimachinery/pkg/types"
"k8s.io/client-go/util/workqueue"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/event"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
)
// computeMaxJitterDuration returns a random duration between 0 and max.
// This is useful for introducing jitter to event processing.
func computeMaxJitterDuration(max time.Duration) (time.Duration, time.Duration) {
if max <= 0 {
return 0, 0
}
jitter := time.Duration(rand.Int63n(int64(max)))
return max, jitter
}
// EnqueueDelayedEventHandler enqueues reconcile requests with a random delay (jitter)
// to spread the load and avoid thundering herd issues.
type EnqueueDelayedEventHandler struct {
Delay time.Duration
}
func (e *EnqueueDelayedEventHandler) Create(_ context.Context, _ event.TypedCreateEvent[client.Object], _ workqueue.TypedRateLimitingInterface[reconcile.Request]) {
}
func (e *EnqueueDelayedEventHandler) Update(_ context.Context, _ event.TypedUpdateEvent[client.Object], _ workqueue.TypedRateLimitingInterface[reconcile.Request]) {
}
func (e *EnqueueDelayedEventHandler) Delete(_ context.Context, _ event.TypedDeleteEvent[client.Object], _ workqueue.TypedRateLimitingInterface[reconcile.Request]) {
}
func (e *EnqueueDelayedEventHandler) Generic(_ context.Context, evt event.TypedGenericEvent[client.Object], q workqueue.TypedRateLimitingInterface[reconcile.Request]) {
if evt.Object == nil {
return
}
req := reconcile.Request{
NamespacedName: types.NamespacedName{
Namespace: evt.Object.GetNamespace(),
Name: evt.Object.GetName(),
},
}
_, delay := computeMaxJitterDuration(e.Delay)
if delay > 0 {
q.AddAfter(req, delay)
} else {
q.Add(req)
}
}

View File

@@ -3,11 +3,13 @@ package util
import (
"context"
"github.com/Infisical/infisical/k8-operator/internal/util/sse"
infisicalSdk "github.com/infisical/go-sdk"
)
type ResourceVariables struct {
InfisicalClient infisicalSdk.InfisicalClientInterface
CancelCtx context.CancelFunc
AuthDetails AuthenticationDetails
InfisicalClient infisicalSdk.InfisicalClientInterface
CancelCtx context.CancelFunc
AuthDetails AuthenticationDetails
ServerSentEvents *sse.ConnectionRegistry
}

View File

@@ -0,0 +1,331 @@
package sse
import (
"bufio"
"context"
"io"
"net/http"
"strings"
"sync"
"sync/atomic"
"time"
)
// Event represents a Server-Sent Event
type Event struct {
ID string
Event string
Data string
Retry int
}
// ConnectionMeta holds metadata about an SSE connection
type ConnectionMeta struct {
EventChan <-chan Event
ErrorChan <-chan error
lastPingAt atomic.Value // stores time.Time
cancel context.CancelFunc
}
// LastPing returns the last ping time
func (c *ConnectionMeta) LastPing() time.Time {
if t, ok := c.lastPingAt.Load().(time.Time); ok {
return t
}
return time.Time{}
}
// UpdateLastPing atomically updates the last ping time
func (c *ConnectionMeta) UpdateLastPing() {
c.lastPingAt.Store(time.Now())
}
// Cancel terminates the connection
func (c *ConnectionMeta) Cancel() {
if c.cancel != nil {
c.cancel()
}
}
// ConnectionRegistry manages SSE connections with high performance
type ConnectionRegistry struct {
mu sync.RWMutex
conn *ConnectionMeta
monitorOnce sync.Once
monitorStop chan struct{}
onPing func() // Callback for ping events
}
// NewConnectionRegistry creates a new high-performance connection registry
func NewConnectionRegistry(ctx context.Context) *ConnectionRegistry {
r := &ConnectionRegistry{
monitorStop: make(chan struct{}),
}
// Configure ping handler
r.onPing = func() {
r.UpdateLastPing()
}
return r
}
// Subscribe provides SSE events, creating a connection if needed
func (r *ConnectionRegistry) Subscribe(request func() (*http.Response, error)) (<-chan Event, <-chan error, error) {
// Fast path: check if connection exists
if conn := r.getConnection(); conn != nil {
return conn.EventChan, conn.ErrorChan, nil
}
// Slow path: create new connection under lock
r.mu.Lock()
defer r.mu.Unlock()
// Double-check after acquiring lock
if r.conn != nil {
return r.conn.EventChan, r.conn.ErrorChan, nil
}
res, err := request()
if err != nil {
return nil, nil, err
}
conn, err := r.createStream(res)
if err != nil {
return nil, nil, err
}
r.conn = conn
// Start monitor once
r.monitorOnce.Do(func() {
go r.monitorConnections()
})
return conn.EventChan, conn.ErrorChan, nil
}
// Get retrieves the current connection
func (r *ConnectionRegistry) Get() (*ConnectionMeta, bool) {
conn := r.getConnection()
return conn, conn != nil
}
// IsConnected checks if there's an active connection
func (r *ConnectionRegistry) IsConnected() bool {
return r.getConnection() != nil
}
// UpdateLastPing updates the last ping time for the current connection
func (r *ConnectionRegistry) UpdateLastPing() {
if conn := r.getConnection(); conn != nil {
conn.UpdateLastPing()
}
}
// Close gracefully shuts down the registry
func (r *ConnectionRegistry) Close() {
// Stop monitor first
select {
case <-r.monitorStop:
// Already closed
default:
close(r.monitorStop)
}
// Close connection
r.mu.Lock()
if r.conn != nil {
r.conn.Cancel()
r.conn = nil
}
r.mu.Unlock()
}
// getConnection returns the current connection without locking
func (r *ConnectionRegistry) getConnection() *ConnectionMeta {
r.mu.RLock()
conn := r.conn
r.mu.RUnlock()
return conn
}
func (r *ConnectionRegistry) createStream(res *http.Response) (*ConnectionMeta, error) {
ctx, cancel := context.WithCancel(context.Background())
eventChan, errorChan, err := r.stream(ctx, res)
if err != nil {
cancel()
return nil, err
}
meta := &ConnectionMeta{
EventChan: eventChan,
ErrorChan: errorChan,
cancel: cancel,
}
meta.UpdateLastPing()
return meta, nil
}
// stream processes SSE data from an HTTP response
func (r *ConnectionRegistry) stream(ctx context.Context, res *http.Response) (<-chan Event, <-chan error, error) {
eventChan := make(chan Event, 10)
errorChan := make(chan error, 1)
go r.processStream(ctx, res.Body, eventChan, errorChan)
return eventChan, errorChan, nil
}
// processStream reads and parses SSE events from the response body
func (r *ConnectionRegistry) processStream(ctx context.Context, body io.ReadCloser, eventChan chan<- Event, errorChan chan<- error) {
defer body.Close()
defer close(eventChan)
defer close(errorChan)
scanner := bufio.NewScanner(body)
var currentEvent Event
var dataBuilder strings.Builder
for scanner.Scan() {
select {
case <-ctx.Done():
return
default:
}
line := scanner.Text()
// Empty line indicates end of event
if len(line) == 0 {
if currentEvent.Data != "" || currentEvent.Event != "" {
// Finalize data
if dataBuilder.Len() > 0 {
currentEvent.Data = dataBuilder.String()
dataBuilder.Reset()
}
// Handle ping events
if r.isPingEvent(currentEvent) {
if r.onPing != nil {
r.onPing()
}
} else {
// Send non-ping events
select {
case eventChan <- currentEvent:
case <-ctx.Done():
return
}
}
// Reset for next event
currentEvent = Event{}
}
continue
}
// Parse line efficiently
r.parseLine(line, &currentEvent, &dataBuilder)
}
if err := scanner.Err(); err != nil {
select {
case errorChan <- err:
case <-ctx.Done():
}
}
}
// parseLine efficiently parses SSE protocol lines
func (r *ConnectionRegistry) parseLine(line string, event *Event, dataBuilder *strings.Builder) {
colonIndex := strings.IndexByte(line, ':')
if colonIndex == -1 {
return // Invalid line format
}
field := line[:colonIndex]
value := line[colonIndex+1:]
// Trim leading space from value (SSE spec)
if len(value) > 0 && value[0] == ' ' {
value = value[1:]
}
switch field {
case "data":
if dataBuilder.Len() > 0 {
dataBuilder.WriteByte('\n')
}
dataBuilder.WriteString(value)
case "event":
event.Event = value
case "id":
event.ID = value
case "retry":
// Parse retry value if needed
// This could be used to configure reconnection delay
case "":
// Comment line, ignore
}
}
// isPingEvent checks if an event is a ping/keepalive
func (r *ConnectionRegistry) isPingEvent(event Event) bool {
// Check for common ping patterns
if event.Event == "ping" {
return true
}
// Check for heartbeat data (common pattern is "1" or similar)
if event.Event == "" && strings.TrimSpace(event.Data) == "1" {
return true
}
return false
}
// monitorConnections checks connection health periodically
func (r *ConnectionRegistry) monitorConnections() {
const (
checkInterval = 30 * time.Second
pingTimeout = 2 * time.Minute
)
ticker := time.NewTicker(checkInterval)
defer ticker.Stop()
for {
select {
case <-r.monitorStop:
return
case <-ticker.C:
r.checkConnectionHealth(pingTimeout)
}
}
}
// checkConnectionHealth verifies connection is still alive
func (r *ConnectionRegistry) checkConnectionHealth(timeout time.Duration) {
conn := r.getConnection()
if conn == nil {
return
}
if time.Since(conn.LastPing()) > timeout {
// Connection is stale, close it
r.mu.Lock()
if r.conn == conn { // Verify it's still the same connection
r.conn.Cancel()
r.monitorStop <- struct{}{}
r.conn = nil
}
r.mu.Unlock()
}
}

View File

@@ -9,7 +9,6 @@ import (
)
func GetProjectByID(accessToken string, projectId string) (model.Project, error) {
httpClient := resty.New()
httpClient.
SetAuthScheme("Bearer").
@@ -25,3 +24,21 @@ func GetProjectByID(accessToken string, projectId string) (model.Project, error)
return projectDetails.Project, nil
}
func GetProjectBySlug(accessToken string, projectSlug string) (model.Project, error) {
httpClient := resty.New()
httpClient.
SetAuthScheme("Bearer").
SetAuthToken(accessToken).
SetHeader("Accept", "application/json")
project, err := api.CallGetProjectByIDv2(httpClient, api.GetProjectByIDRequest{
ProjectID: projectSlug,
})
if err != nil {
return model.Project{}, fmt.Errorf("unable to get project by slug. [err=%v]", err)
}
return project, nil
}