Compare commits

..

10 Commits

Author SHA1 Message Date
Sid
fa00847071 Update docs/docs.json
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-07-10 22:42:05 +05:30
3c50c2d00a fix: query typo 2025-07-10 21:53:53 +05:30
3d8405d648 fix: sync fields 2025-07-10 21:53:53 +05:30
31a4d1bf3d fix: retry and doc images 2025-07-10 21:53:53 +05:30
05ab55a21b fix: cleanup railway integration 2025-07-10 21:53:53 +05:30
cc8febab41 lint: fix 2025-07-10 21:53:37 +05:30
8aa06302ad fix: undo mock on-prem change 2025-07-10 21:49:33 +05:30
abd23078b8 feat: add documentation on railway 2025-07-10 21:49:33 +05:30
9f34c61c2b fix: railway sync config 2025-07-10 21:49:33 +05:30
0796ee3e5f feat: implement railway secret sync 2025-07-10 21:49:33 +05:30
150 changed files with 676 additions and 1871 deletions

View File

@ -23,7 +23,7 @@ REDIS_URL=redis://redis:6379
# Required
SITE_URL=http://localhost:8080
# Mail/SMTP
# Mail/SMTP
SMTP_HOST=
SMTP_PORT=
SMTP_FROM_ADDRESS=
@ -132,6 +132,3 @@ DATADOG_PROFILING_ENABLED=
DATADOG_ENV=
DATADOG_SERVICE=
DATADOG_HOSTNAME=
# kubernetes
KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN=false

View File

@ -34,7 +34,6 @@ ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NODE_OPTIONS="--max-old-space-size=8192"
# Build
RUN npm run build
@ -78,7 +77,6 @@ RUN npm ci --only-production
COPY /backend .
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
RUN npm i -D tsconfig-paths
ENV NODE_OPTIONS="--max-old-space-size=8192"
RUN npm run build
# Production stage

View File

@ -1,55 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const existingSecretApprovalPolicies = await knex(TableName.SecretApprovalPolicy)
.whereNull("secretPath")
.orWhere("secretPath", "");
const existingAccessApprovalPolicies = await knex(TableName.AccessApprovalPolicy)
.whereNull("secretPath")
.orWhere("secretPath", "");
// update all the secret approval policies secretPath to be "/**"
if (existingSecretApprovalPolicies.length) {
await knex(TableName.SecretApprovalPolicy)
.whereIn(
"id",
existingSecretApprovalPolicies.map((el) => el.id)
)
.update({
secretPath: "/**"
});
}
// update all the access approval policies secretPath to be "/**"
if (existingAccessApprovalPolicies.length) {
await knex(TableName.AccessApprovalPolicy)
.whereIn(
"id",
existingAccessApprovalPolicies.map((el) => el.id)
)
.update({
secretPath: "/**"
});
}
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (table) => {
table.string("secretPath").notNullable().alter();
});
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (table) => {
table.string("secretPath").notNullable().alter();
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (table) => {
table.string("secretPath").nullable().alter();
});
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (table) => {
table.string("secretPath").nullable().alter();
});
}

View File

@ -1,35 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
const hasCommitterCol = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerUserId");
if (hasCommitterCol) {
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
tb.uuid("committerUserId").nullable().alter();
});
}
const hasRequesterCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
if (hasRequesterCol) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
tb.dropForeign("requestedByUserId");
tb.foreign("requestedByUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
});
}
}
export async function down(knex: Knex): Promise<void> {
// can't undo committer nullable
const hasRequesterCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
if (hasRequesterCol) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
tb.dropForeign("requestedByUserId");
tb.foreign("requestedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
});
}
}

View File

@ -1,66 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { selectAllTableCols } from "@app/lib/knex";
import { TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { getMigrationEncryptionServices } from "./utils/services";
export async function up(knex: Knex) {
const existingSuperAdminsWithGithubConnection = await knex(TableName.SuperAdmin)
.select(selectAllTableCols(TableName.SuperAdmin))
.whereNotNull(`${TableName.SuperAdmin}.encryptedGitHubAppConnectionClientId`);
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const decryptor = kmsService.decryptWithRootKey();
const encryptor = kmsService.encryptWithRootKey();
const tasks = existingSuperAdminsWithGithubConnection.map(async (admin) => {
const overrides = (
admin.encryptedEnvOverrides ? JSON.parse(decryptor(Buffer.from(admin.encryptedEnvOverrides)).toString()) : {}
) as Record<string, string>;
if (admin.encryptedGitHubAppConnectionClientId) {
overrides.INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID = decryptor(
admin.encryptedGitHubAppConnectionClientId
).toString();
}
if (admin.encryptedGitHubAppConnectionClientSecret) {
overrides.INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET = decryptor(
admin.encryptedGitHubAppConnectionClientSecret
).toString();
}
if (admin.encryptedGitHubAppConnectionPrivateKey) {
overrides.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY = decryptor(
admin.encryptedGitHubAppConnectionPrivateKey
).toString();
}
if (admin.encryptedGitHubAppConnectionSlug) {
overrides.INF_APP_CONNECTION_GITHUB_APP_SLUG = decryptor(admin.encryptedGitHubAppConnectionSlug).toString();
}
if (admin.encryptedGitHubAppConnectionId) {
overrides.INF_APP_CONNECTION_GITHUB_APP_ID = decryptor(admin.encryptedGitHubAppConnectionId).toString();
}
const encryptedEnvOverrides = encryptor(Buffer.from(JSON.stringify(overrides)));
await knex(TableName.SuperAdmin).where({ id: admin.id }).update({
encryptedEnvOverrides
});
});
await Promise.all(tasks);
}
export async function down() {
// No down migration needed as this migration is only for data transformation
// and does not change the schema.
}

View File

@ -14,8 +14,8 @@ export const AccessApprovalPoliciesApproversSchema = z.object({
updatedAt: z.date(),
approverUserId: z.string().uuid().nullable().optional(),
approverGroupId: z.string().uuid().nullable().optional(),
sequence: z.number().default(1).nullable().optional(),
approvalsRequired: z.number().nullable().optional()
sequence: z.number().default(0).nullable().optional(),
approvalsRequired: z.number().default(1).nullable().optional()
});
export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>;

View File

@ -11,7 +11,7 @@ export const AccessApprovalPoliciesSchema = z.object({
id: z.string().uuid(),
name: z.string(),
approvals: z.number().default(1),
secretPath: z.string(),
secretPath: z.string().nullable().optional(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),

View File

@ -12,8 +12,8 @@ export const CertificateAuthoritiesSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
status: z.string(),
enableDirectIssuance: z.boolean().default(true),
status: z.string(),
name: z.string()
});

View File

@ -25,8 +25,8 @@ export const CertificatesSchema = z.object({
certificateTemplateId: z.string().uuid().nullable().optional(),
keyUsages: z.string().array().nullable().optional(),
extendedKeyUsages: z.string().array().nullable().optional(),
projectId: z.string(),
pkiSubscriberId: z.string().uuid().nullable().optional()
pkiSubscriberId: z.string().uuid().nullable().optional(),
projectId: z.string()
});
export type TCertificates = z.infer<typeof CertificatesSchema>;

View File

@ -10,7 +10,7 @@ import { TImmutableDBKeys } from "./models";
export const SecretApprovalPoliciesSchema = z.object({
id: z.string().uuid(),
name: z.string(),
secretPath: z.string(),
secretPath: z.string().nullable().optional(),
approvals: z.number().default(1),
envId: z.string().uuid(),
createdAt: z.date(),

View File

@ -18,7 +18,7 @@ export const SecretApprovalRequestsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
isReplicated: z.boolean().nullable().optional(),
committerUserId: z.string().uuid().nullable().optional(),
committerUserId: z.string().uuid(),
statusChangedByUserId: z.string().uuid().nullable().optional(),
bypassReason: z.string().nullable().optional()
});

View File

@ -2,7 +2,6 @@ import { nanoid } from "nanoid";
import { z } from "zod";
import { ApproverType, BypasserType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { removeTrailingSlash } from "@app/lib/fn";
import { EnforcementLevel } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -20,7 +19,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
body: z.object({
projectSlug: z.string().trim(),
name: z.string().optional(),
secretPath: z.string().trim().min(1, { message: "Secret path cannot be empty" }).transform(removeTrailingSlash),
secretPath: z.string().trim().default("/"),
environment: z.string(),
approvers: z
.discriminatedUnion("type", [
@ -175,9 +174,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
secretPath: z
.string()
.trim()
.min(1, { message: "Secret path cannot be empty" })
.optional()
.transform((val) => (val ? removeTrailingSlash(val) : val)),
.transform((val) => (val === "" ? "/" : val)),
approvers: z
.discriminatedUnion("type", [
z.object({

View File

@ -23,8 +23,10 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
environment: z.string(),
secretPath: z
.string()
.min(1, { message: "Secret path cannot be empty" })
.transform((val) => removeTrailingSlash(val)),
.optional()
.nullable()
.default("/")
.transform((val) => (val ? removeTrailingSlash(val) : val)),
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
@ -98,10 +100,10 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
approvals: z.number().min(1).default(1),
secretPath: z
.string()
.trim()
.min(1, { message: "Secret path cannot be empty" })
.optional()
.transform((val) => (val ? removeTrailingSlash(val) : undefined)),
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val))
.transform((val) => (val === "" ? "/" : val)),
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
allowedSelfApprovals: z.boolean().default(true)
}),

View File

@ -58,7 +58,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
}),
committerUser: approvalRequestUser.nullish(),
committerUser: approvalRequestUser,
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
environment: z.string(),
reviewers: z.object({ userId: z.string(), status: z.string() }).array(),
@ -308,7 +308,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
}),
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),
committerUser: approvalRequestUser.nullish(),
committerUser: approvalRequestUser,
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
secretPath: z.string(),
commits: secretRawSchema

View File

@ -53,7 +53,7 @@ export interface TAccessApprovalPolicyDALFactory
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
@ -93,7 +93,7 @@ export interface TAccessApprovalPolicyDALFactory
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
@ -116,7 +116,7 @@ export interface TAccessApprovalPolicyDALFactory
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
findLastValidPolicy: (
@ -138,7 +138,7 @@ export interface TAccessApprovalPolicyDALFactory
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}
| undefined
@ -190,7 +190,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
deleteAccessApprovalPolicy: ({
@ -214,7 +214,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
@ -252,7 +252,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
getAccessApprovalPolicyByProjectSlug: ({
@ -286,7 +286,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
@ -337,7 +337,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;

View File

@ -60,26 +60,6 @@ export const accessApprovalPolicyServiceFactory = ({
accessApprovalRequestReviewerDAL,
orgMembershipDAL
}: TAccessApprovalPolicyServiceFactoryDep): TAccessApprovalPolicyServiceFactory => {
const $policyExists = async ({
envId,
secretPath,
policyId
}: {
envId: string;
secretPath: string;
policyId?: string;
}) => {
const policy = await accessApprovalPolicyDAL
.findOne({
envId,
secretPath,
deletedAt: null
})
.catch(() => null);
return policyId ? policy && policy.id !== policyId : Boolean(policy);
};
const createAccessApprovalPolicy: TAccessApprovalPolicyServiceFactory["createAccessApprovalPolicy"] = async ({
name,
actor,
@ -126,12 +106,6 @@ export const accessApprovalPolicyServiceFactory = ({
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
if (!env) throw new NotFoundError({ message: `Environment with slug '${environment}' not found` });
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${environment}'`
});
}
let approverUserIds = userApprovers;
if (userApproverNames.length) {
const approverUsersInDB = await userDAL.find({
@ -305,11 +279,7 @@ export const accessApprovalPolicyServiceFactory = ({
) as { username: string; sequence?: number }[];
const accessApprovalPolicy = await accessApprovalPolicyDAL.findById(policyId);
if (!accessApprovalPolicy) {
throw new NotFoundError({
message: `Access approval policy with ID '${policyId}' not found`
});
}
if (!accessApprovalPolicy) throw new BadRequestError({ message: "Approval policy not found" });
const currentApprovals = approvals || accessApprovalPolicy.approvals;
if (
@ -320,18 +290,9 @@ export const accessApprovalPolicyServiceFactory = ({
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
}
if (
await $policyExists({
envId: accessApprovalPolicy.envId,
secretPath: secretPath || accessApprovalPolicy.secretPath,
policyId: accessApprovalPolicy.id
})
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${accessApprovalPolicy.environment.slug}'`
});
if (!accessApprovalPolicy) {
throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
}
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,

View File

@ -122,7 +122,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
deleteAccessApprovalPolicy: ({
@ -146,7 +146,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
@ -218,7 +218,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
@ -269,7 +269,7 @@ export interface TAccessApprovalPolicyServiceFactory {
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;

View File

@ -1711,7 +1711,7 @@ interface SecretApprovalReopened {
interface SecretApprovalRequest {
type: EventType.SECRET_APPROVAL_REQUEST;
metadata: {
committedBy?: string | null;
committedBy: string;
secretApprovalRequestSlug: string;
secretApprovalRequestId: string;
eventType: SecretApprovalEvent;

View File

@ -21,7 +21,7 @@ import { randomUUID } from "crypto";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
@ -81,21 +81,6 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return client;
}
if (providerInputs.method === AwsIamAuthType.IRSA) {
// Allow instances to disable automatic service account token fetching (e.g. for shared cloud)
if (!appCfg.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN) {
throw new UnauthorizedError({
message: "Failed to get AWS credentials via IRSA: KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN is not enabled."
});
}
// The SDK will automatically pick up credentials from the environment
const client = new IAMClient({
region: providerInputs.region
});
return client;
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
@ -116,7 +101,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
.catch((err) => {
const message = (err as Error)?.message;
if (
(providerInputs.method === AwsIamAuthType.AssumeRole || providerInputs.method === AwsIamAuthType.IRSA) &&
providerInputs.method === AwsIamAuthType.AssumeRole &&
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
message.includes("Must specify userName when calling with non-User credentials")
) {

View File

@ -28,8 +28,7 @@ export enum SqlProviders {
export enum AwsIamAuthType {
AssumeRole = "assume-role",
AccessKey = "access-key",
IRSA = "irsa"
AccessKey = "access-key"
}
export enum ElasticSearchAuthTypes {
@ -222,16 +221,6 @@ export const DynamicSecretAwsIamSchema = z.preprocess(
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional(),
tags: ResourceMetadataSchema.optional()
}),
z.object({
method: z.literal(AwsIamAuthType.IRSA),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional(),
tags: ResourceMetadataSchema.optional()
})
])
);

View File

@ -361,6 +361,13 @@ export const ldapConfigServiceFactory = ({
});
} else {
const plan = await licenseService.getPlan(orgId);
if (plan?.slug !== "enterprise" && plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
throw new BadRequestError({
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
});
}
if (plan?.slug !== "enterprise" && plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
throw new BadRequestError({

View File

@ -1,4 +1,5 @@
export const BillingPlanRows = {
MemberLimit: { name: "Organization member limit", field: "memberLimit" },
IdentityLimit: { name: "Organization identity limit", field: "identityLimit" },
WorkspaceLimit: { name: "Project limit", field: "workspaceLimit" },
EnvironmentLimit: { name: "Environment limit", field: "environmentLimit" },

View File

@ -442,7 +442,9 @@ export const licenseServiceFactory = ({
rows: data.rows.map((el) => {
let used = "-";
if (el.name === BillingPlanRows.WorkspaceLimit.name) {
if (el.name === BillingPlanRows.MemberLimit.name) {
used = orgMembersUsed.toString();
} else if (el.name === BillingPlanRows.WorkspaceLimit.name) {
used = projectCount.toString();
} else if (el.name === BillingPlanRows.IdentityLimit.name) {
used = (identityUsed + orgMembersUsed).toString();
@ -462,10 +464,12 @@ export const licenseServiceFactory = ({
const allowed = onPremFeatures[field as keyof TFeatureSet];
let used = "-";
if (field === BillingPlanRows.WorkspaceLimit.field) {
if (field === BillingPlanRows.MemberLimit.field) {
used = orgMembersUsed.toString();
} else if (field === BillingPlanRows.WorkspaceLimit.field) {
used = projectCount.toString();
} else if (field === BillingPlanRows.IdentityLimit.field) {
used = (identityUsed + orgMembersUsed).toString();
used = identityUsed.toString();
}
return {

View File

@ -311,6 +311,13 @@ export const samlConfigServiceFactory = ({
});
} else {
const plan = await licenseService.getPlan(orgId);
if (plan?.slug !== "enterprise" && plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
throw new BadRequestError({
message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members."
});
}
if (plan?.slug !== "enterprise" && plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
throw new BadRequestError({

View File

@ -55,26 +55,6 @@ export const secretApprovalPolicyServiceFactory = ({
licenseService,
secretApprovalRequestDAL
}: TSecretApprovalPolicyServiceFactoryDep) => {
const $policyExists = async ({
envId,
secretPath,
policyId
}: {
envId: string;
secretPath: string;
policyId?: string;
}) => {
const policy = await secretApprovalPolicyDAL
.findOne({
envId,
secretPath,
deletedAt: null
})
.catch(() => null);
return policyId ? policy && policy.id !== policyId : Boolean(policy);
};
const createSecretApprovalPolicy = async ({
name,
actor,
@ -126,17 +106,10 @@ export const secretApprovalPolicyServiceFactory = ({
}
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
if (!env) {
if (!env)
throw new NotFoundError({
message: `Environment with slug '${environment}' not found in project with ID ${projectId}`
});
}
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${environment}'`
});
}
let groupBypassers: string[] = [];
let bypasserUserIds: string[] = [];
@ -287,18 +260,6 @@ export const secretApprovalPolicyServiceFactory = ({
});
}
if (
await $policyExists({
envId: secretApprovalPolicy.envId,
secretPath: secretPath || secretApprovalPolicy.secretPath,
policyId: secretApprovalPolicy.id
})
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${secretApprovalPolicy.environment.slug}'`
});
}
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,

View File

@ -4,7 +4,7 @@ import { ApproverType, BypasserType } from "../access-approval-policy/access-app
export type TCreateSapDTO = {
approvals: number;
secretPath: string;
secretPath?: string | null;
environment: string;
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; username?: string })[];
bypassers?: (
@ -20,7 +20,7 @@ export type TCreateSapDTO = {
export type TUpdateSapDTO = {
secretPolicyId: string;
approvals?: number;
secretPath?: string;
secretPath?: string | null;
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; username?: string })[];
bypassers?: (
| { type: BypasserType.Group; id: string }

View File

@ -45,7 +45,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalRequest}.statusChangedByUserId`,
`statusChangedByUser.id`
)
.leftJoin<TUsers>(
.join<TUsers>(
db(TableName.Users).as("committerUser"),
`${TableName.SecretApprovalRequest}.committerUserId`,
`committerUser.id`
@ -173,15 +173,13 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
username: el.statusChangedByUserUsername
}
: undefined,
committerUser: el.committerUserId
? {
userId: el.committerUserId,
email: el.committerUserEmail,
firstName: el.committerUserFirstName,
lastName: el.committerUserLastName,
username: el.committerUserUsername
}
: null,
committerUser: {
userId: el.committerUserId,
email: el.committerUserEmail,
firstName: el.committerUserFirstName,
lastName: el.committerUserLastName,
username: el.committerUserUsername
},
policy: {
id: el.policyId,
name: el.policyName,
@ -379,7 +377,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalPolicyBypasser}.bypasserGroupId`,
`bypasserUserGroupMembership.groupId`
)
.leftJoin<TUsers>(
.join<TUsers>(
db(TableName.Users).as("committerUser"),
`${TableName.SecretApprovalRequest}.committerUserId`,
`committerUser.id`
@ -490,15 +488,13 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals
},
committerUser: el.committerUserId
? {
userId: el.committerUserId,
email: el.committerUserEmail,
firstName: el.committerUserFirstName,
lastName: el.committerUserLastName,
username: el.committerUserUsername
}
: null
committerUser: {
userId: el.committerUserId,
email: el.committerUserEmail,
firstName: el.committerUserFirstName,
lastName: el.committerUserLastName,
username: el.committerUserUsername
}
}),
childrenMapper: [
{
@ -585,7 +581,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalPolicyBypasser}.bypasserGroupId`,
`bypasserUserGroupMembership.groupId`
)
.leftJoin<TUsers>(
.join<TUsers>(
db(TableName.Users).as("committerUser"),
`${TableName.SecretApprovalRequest}.committerUserId`,
`committerUser.id`
@ -697,15 +693,13 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals
},
committerUser: el.committerUserId
? {
userId: el.committerUserId,
email: el.committerUserEmail,
firstName: el.committerUserFirstName,
lastName: el.committerUserLastName,
username: el.committerUserUsername
}
: null
committerUser: {
userId: el.committerUserId,
email: el.committerUserEmail,
firstName: el.committerUserFirstName,
lastName: el.committerUserLastName,
username: el.committerUserUsername
}
}),
childrenMapper: [
{

View File

@ -1320,7 +1320,7 @@ export const secretApprovalRequestServiceFactory = ({
});
const env = await projectEnvDAL.findOne({ id: policy.envId });
const user = await userDAL.findById(actorId);
const user = await userDAL.findById(secretApprovalRequest.committerUserId);
await triggerWorkflowIntegrationNotification({
input: {
@ -1657,7 +1657,7 @@ export const secretApprovalRequestServiceFactory = ({
return { ...doc, commits: approvalCommits };
});
const user = await userDAL.findById(actorId);
const user = await userDAL.findById(secretApprovalRequest.committerUserId);
const env = await projectEnvDAL.findOne({ id: policy.envId });
await triggerWorkflowIntegrationNotification({

View File

@ -37,8 +37,7 @@ import {
TQueueSecretScanningDataSourceFullScan,
TQueueSecretScanningResourceDiffScan,
TQueueSecretScanningSendNotification,
TSecretScanningDataSourceWithConnection,
TSecretScanningFinding
TSecretScanningDataSourceWithConnection
} from "./secret-scanning-v2-types";
type TSecretRotationV2QueueServiceFactoryDep = {
@ -460,16 +459,13 @@ export const secretScanningV2QueueServiceFactory = async ({
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
if (newFindings.length) {
const finding = newFindings[0] as TSecretScanningFinding;
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Completed,
resourceName: resource.name,
isDiffScan: true,
dataSource,
numberOfSecrets: newFindings.length,
scanId,
authorName: finding?.details?.author,
authorEmail: finding?.details?.email
scanId
});
}
@ -586,8 +582,8 @@ export const secretScanningV2QueueServiceFactory = async ({
substitutions:
payload.status === SecretScanningScanStatus.Completed
? {
authorName: payload.authorName,
authorEmail: payload.authorEmail,
authorName: "Jim",
authorEmail: "jim@infisical.com",
resourceName,
numberOfSecrets: payload.numberOfSecrets,
isDiffScan: payload.isDiffScan,

View File

@ -119,14 +119,7 @@ export type TQueueSecretScanningSendNotification = {
resourceName: string;
} & (
| { status: SecretScanningScanStatus.Failed; errorMessage: string }
| {
status: SecretScanningScanStatus.Completed;
numberOfSecrets: number;
scanId: string;
isDiffScan: boolean;
authorName?: string;
authorEmail?: string;
}
| { status: SecretScanningScanStatus.Completed; numberOfSecrets: number; scanId: string; isDiffScan: boolean }
);
export type TCloneRepository = {

View File

@ -2472,9 +2472,6 @@ export const SecretSyncs = {
projectName: "The name of the Cloudflare Pages project to sync secrets to.",
environment: "The environment of the Cloudflare Pages project to sync secrets to."
},
CLOUDFLARE_WORKERS: {
scriptId: "The ID of the Cloudflare Workers script to sync secrets to."
},
ZABBIX: {
scope: "The Zabbix scope that secrets should be synced to.",
hostId: "The ID of the Zabbix host to sync secrets to.",

View File

@ -28,7 +28,6 @@ const databaseReadReplicaSchema = z
const envSchema = z
.object({
INFISICAL_PLATFORM_VERSION: zpStr(z.string().optional()),
KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN: zodStrBool.default("false"),
PORT: z.coerce.number().default(IS_PACKAGED ? 8080 : 4000),
DISABLE_SECRET_SCANNING: z
.enum(["true", "false"])
@ -374,19 +373,6 @@ export const overwriteSchema: {
fields: { key: keyof TEnvConfig; description?: string }[];
};
} = {
aws: {
name: "AWS",
fields: [
{
key: "INF_APP_CONNECTION_AWS_ACCESS_KEY_ID",
description: "The Access Key ID of your AWS account."
},
{
key: "INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY",
description: "The Client Secret of your AWS application."
}
]
},
azure: {
name: "Azure",
fields: [
@ -400,79 +386,16 @@ export const overwriteSchema: {
}
]
},
gcp: {
name: "GCP",
google_sso: {
name: "Google SSO",
fields: [
{
key: "INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL",
description: "The GCP Service Account JSON credentials."
}
]
},
github_app: {
name: "GitHub App",
fields: [
{
key: "INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID",
description: "The Client ID of your GitHub application."
key: "CLIENT_ID_GOOGLE_LOGIN",
description: "The Client ID of your GCP OAuth2 application."
},
{
key: "INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET",
description: "The Client Secret of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_APP_SLUG",
description: "The Slug of your GitHub application. This is the one found in the URL."
},
{
key: "INF_APP_CONNECTION_GITHUB_APP_ID",
description: "The App ID of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY",
description: "The Private Key of your GitHub application."
}
]
},
github_oauth: {
name: "GitHub OAuth",
fields: [
{
key: "INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID",
description: "The Client ID of your GitHub OAuth application."
},
{
key: "INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET",
description: "The Client Secret of your GitHub OAuth application."
}
]
},
github_radar_app: {
name: "GitHub Radar App",
fields: [
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID",
description: "The Client ID of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET",
description: "The Client Secret of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG",
description: "The Slug of your GitHub application. This is the one found in the URL."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_ID",
description: "The App ID of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY",
description: "The Private Key of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET",
description: "The Webhook Secret of your GitHub application."
key: "CLIENT_SECRET_GOOGLE_LOGIN",
description: "The Client Secret of your GCP OAuth2 application."
}
]
},
@ -489,19 +412,6 @@ export const overwriteSchema: {
}
]
},
gitlab_oauth: {
name: "GitLab OAuth",
fields: [
{
key: "INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID",
description: "The Client ID of your GitLab OAuth application."
},
{
key: "INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET",
description: "The Client Secret of your GitLab OAuth application."
}
]
},
gitlab_sso: {
name: "GitLab SSO",
fields: [
@ -519,19 +429,6 @@ export const overwriteSchema: {
"The URL of your self-hosted instance of GitLab where the OAuth application is registered. If no URL is passed in, this will default to https://gitlab.com."
}
]
},
google_sso: {
name: "Google SSO",
fields: [
{
key: "CLIENT_ID_GOOGLE_LOGIN",
description: "The Client ID of your GCP OAuth2 application."
},
{
key: "CLIENT_SECRET_GOOGLE_LOGIN",
description: "The Client Secret of your GCP OAuth2 application."
}
]
}
};

View File

@ -49,8 +49,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
defaultAuthOrgSlug: z.string().nullable(),
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
defaultAuthOrgAuthMethod: z.string().nullish(),
isSecretScanningDisabled: z.boolean(),
kubernetesAutoFetchServiceAccountToken: z.boolean()
isSecretScanningDisabled: z.boolean()
})
})
}
@ -62,8 +61,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
config: {
...config,
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING
}
};
}

View File

@ -50,32 +50,4 @@ export const registerCloudflareConnectionRouter = async (server: FastifyZodProvi
return projects;
}
});
server.route({
method: "GET",
url: `/:connectionId/cloudflare-workers-scripts`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z
.object({
id: z.string()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const projects = await server.services.appConnection.cloudflare.listWorkersScripts(connectionId, req.permission);
return projects;
}
});
};

View File

@ -1,17 +0,0 @@
import {
CloudflareWorkersSyncSchema,
CreateCloudflareWorkersSyncSchema,
UpdateCloudflareWorkersSyncSchema
} from "@app/services/secret-sync/cloudflare-workers/cloudflare-workers-schemas";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerCloudflareWorkersSyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.CloudflareWorkers,
server,
responseSchema: CloudflareWorkersSyncSchema,
createSchema: CreateCloudflareWorkersSyncSchema,
updateSchema: UpdateCloudflareWorkersSyncSchema
});

View File

@ -9,7 +9,6 @@ import { registerAzureDevOpsSyncRouter } from "./azure-devops-sync-router";
import { registerAzureKeyVaultSyncRouter } from "./azure-key-vault-sync-router";
import { registerCamundaSyncRouter } from "./camunda-sync-router";
import { registerCloudflarePagesSyncRouter } from "./cloudflare-pages-sync-router";
import { registerCloudflareWorkersSyncRouter } from "./cloudflare-workers-sync-router";
import { registerDatabricksSyncRouter } from "./databricks-sync-router";
import { registerFlyioSyncRouter } from "./flyio-sync-router";
import { registerGcpSyncRouter } from "./gcp-sync-router";
@ -51,8 +50,6 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
[SecretSync.Flyio]: registerFlyioSyncRouter,
[SecretSync.GitLab]: registerGitLabSyncRouter,
[SecretSync.CloudflarePages]: registerCloudflarePagesSyncRouter,
[SecretSync.CloudflareWorkers]: registerCloudflareWorkersSyncRouter,
[SecretSync.Zabbix]: registerZabbixSyncRouter,
[SecretSync.Railway]: registerRailwaySyncRouter
};

View File

@ -26,10 +26,6 @@ import {
CloudflarePagesSyncListItemSchema,
CloudflarePagesSyncSchema
} from "@app/services/secret-sync/cloudflare-pages/cloudflare-pages-schema";
import {
CloudflareWorkersSyncListItemSchema,
CloudflareWorkersSyncSchema
} from "@app/services/secret-sync/cloudflare-workers/cloudflare-workers-schemas";
import { DatabricksSyncListItemSchema, DatabricksSyncSchema } from "@app/services/secret-sync/databricks";
import { FlyioSyncListItemSchema, FlyioSyncSchema } from "@app/services/secret-sync/flyio";
import { GcpSyncListItemSchema, GcpSyncSchema } from "@app/services/secret-sync/gcp";
@ -69,8 +65,6 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
FlyioSyncSchema,
GitLabSyncSchema,
CloudflarePagesSyncSchema,
CloudflareWorkersSyncSchema,
ZabbixSyncSchema,
RailwaySyncSchema
]);
@ -98,8 +92,6 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
FlyioSyncListItemSchema,
GitLabSyncListItemSchema,
CloudflarePagesSyncListItemSchema,
CloudflareWorkersSyncListItemSchema,
ZabbixSyncListItemSchema,
RailwaySyncListItemSchema
]);

View File

@ -9,8 +9,7 @@ import { CloudflareConnectionMethod } from "./cloudflare-connection-enum";
import {
TCloudflareConnection,
TCloudflareConnectionConfig,
TCloudflarePagesProject,
TCloudflareWorkersScript
TCloudflarePagesProject
} from "./cloudflare-connection-types";
export const getCloudflareConnectionListItem = () => {
@ -44,28 +43,6 @@ export const listCloudflarePagesProjects = async (
}));
};
export const listCloudflareWorkersScripts = async (
appConnection: TCloudflareConnection
): Promise<TCloudflareWorkersScript[]> => {
const {
credentials: { apiToken, accountId }
} = appConnection;
const { data } = await request.get<{ result: { id: string }[] }>(
`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/accounts/${accountId}/workers/scripts`,
{
headers: {
Authorization: `Bearer ${apiToken}`,
Accept: "application/json"
}
}
);
return data.result.map((a) => ({
id: a.id
}));
};
export const validateCloudflareConnectionCredentials = async (config: TCloudflareConnectionConfig) => {
const { apiToken, accountId } = config.credentials;

View File

@ -2,7 +2,7 @@ import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { listCloudflarePagesProjects, listCloudflareWorkersScripts } from "./cloudflare-connection-fns";
import { listCloudflarePagesProjects } from "./cloudflare-connection-fns";
import { TCloudflareConnection } from "./cloudflare-connection-types";
type TGetAppConnectionFunc = (
@ -19,31 +19,12 @@ export const cloudflareConnectionService = (getAppConnection: TGetAppConnectionF
return projects;
} catch (error) {
logger.error(
error,
`Failed to list Cloudflare Pages projects for Cloudflare connection [connectionId=${connectionId}]`
);
return [];
}
};
const listWorkersScripts = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Cloudflare, connectionId, actor);
try {
const projects = await listCloudflareWorkersScripts(appConnection);
return projects;
} catch (error) {
logger.error(
error,
`Failed to list Cloudflare Workers scripts for Cloudflare connection [connectionId=${connectionId}]`
);
logger.error(error, "Failed to list Cloudflare Pages projects for Cloudflare connection");
return [];
}
};
return {
listPagesProjects,
listWorkersScripts
listPagesProjects
};
};

View File

@ -28,7 +28,3 @@ export type TCloudflarePagesProject = {
id: string;
name: string;
};
export type TCloudflareWorkersScript = {
id: string;
};

View File

@ -7,6 +7,7 @@ import { request } from "@app/lib/config/request";
import { BadRequestError, ForbiddenRequestError, InternalServerError } from "@app/lib/errors";
import { getAppConnectionMethodName } from "@app/services/app-connection/app-connection-fns";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { getInstanceIntegrationsConfig } from "@app/services/super-admin/super-admin-service";
import { AppConnection } from "../app-connection-enums";
import { GitHubConnectionMethod } from "./github-connection-enums";
@ -14,13 +15,14 @@ import { TGitHubConnection, TGitHubConnectionConfig } from "./github-connection-
export const getGitHubConnectionListItem = () => {
const { INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID, INF_APP_CONNECTION_GITHUB_APP_SLUG } = getConfig();
const { gitHubAppConnection } = getInstanceIntegrationsConfig();
return {
name: "GitHub" as const,
app: AppConnection.GitHub as const,
methods: Object.values(GitHubConnectionMethod) as [GitHubConnectionMethod.App, GitHubConnectionMethod.OAuth],
oauthClientId: INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID,
appClientSlug: INF_APP_CONNECTION_GITHUB_APP_SLUG
appClientSlug: gitHubAppConnection.appSlug || INF_APP_CONNECTION_GITHUB_APP_SLUG
};
};
@ -30,9 +32,10 @@ export const getGitHubClient = (appConnection: TGitHubConnection) => {
const { method, credentials } = appConnection;
let client: Octokit;
const { gitHubAppConnection } = getInstanceIntegrationsConfig();
const appId = appCfg.INF_APP_CONNECTION_GITHUB_APP_ID;
const appPrivateKey = appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
const appId = gitHubAppConnection.appId || appCfg.INF_APP_CONNECTION_GITHUB_APP_ID;
const appPrivateKey = gitHubAppConnection.privateKey || appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
switch (method) {
case GitHubConnectionMethod.App:
@ -154,6 +157,8 @@ type TokenRespData = {
export const validateGitHubConnectionCredentials = async (config: TGitHubConnectionConfig) => {
const { credentials, method } = config;
const { gitHubAppConnection } = getInstanceIntegrationsConfig();
const {
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID,
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET,
@ -165,8 +170,8 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
const { clientId, clientSecret } =
method === GitHubConnectionMethod.App
? {
clientId: INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID,
clientSecret: INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET
clientId: gitHubAppConnection.clientId || INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID,
clientSecret: gitHubAppConnection.clientSecret || INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET
}
: // oauth
{

View File

@ -912,6 +912,14 @@ export const orgServiceFactory = ({
// if there exist no org membership we set is as given by the request
if (!inviteeOrgMembership) {
if (plan?.slug !== "enterprise" && plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
throw new BadRequestError({
name: "InviteUser",
message: "Failed to invite member due to member limit reached. Upgrade plan to invite more members."
});
}
if (plan?.slug !== "enterprise" && plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
throw new BadRequestError({

View File

@ -214,7 +214,7 @@ export const secretFolderServiceFactory = ({
}
},
message: "Folder created",
folderId: parentFolder.id,
folderId: doc.id,
changes: [
{
type: CommitType.ADD,

View File

@ -1,10 +0,0 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { TSecretSyncListItem } from "@app/services/secret-sync/secret-sync-types";
export const CLOUDFLARE_WORKERS_SYNC_LIST_OPTION: TSecretSyncListItem = {
name: "Cloudflare Workers",
destination: SecretSync.CloudflareWorkers,
connection: AppConnection.Cloudflare,
canImportSecrets: false
};

View File

@ -1,121 +0,0 @@
import { request } from "@app/lib/config/request";
import { applyJitter } from "@app/lib/dates";
import { delay as delayMs } from "@app/lib/delay";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
import { SECRET_SYNC_NAME_MAP } from "../secret-sync-maps";
import { TCloudflareWorkersSyncWithCredentials } from "./cloudflare-workers-types";
const getSecretKeys = async (secretSync: TCloudflareWorkersSyncWithCredentials): Promise<string[]> => {
const {
destinationConfig,
connection: {
credentials: { apiToken, accountId }
}
} = secretSync;
const { data } = await request.get<{
result: Array<{ name: string }>;
}>(
`${IntegrationUrls.CLOUDFLARE_WORKERS_API_URL}/client/v4/accounts/${accountId}/workers/scripts/${destinationConfig.scriptId}/secrets`,
{
headers: {
Authorization: `Bearer ${apiToken}`,
Accept: "application/json"
}
}
);
return data.result.map((s) => s.name);
};
export const CloudflareWorkersSyncFns = {
syncSecrets: async (secretSync: TCloudflareWorkersSyncWithCredentials, secretMap: TSecretMap) => {
const {
connection: {
credentials: { apiToken, accountId }
},
destinationConfig: { scriptId }
} = secretSync;
const existingSecretNames = await getSecretKeys(secretSync);
const secretMapKeys = new Set(Object.keys(secretMap));
for await (const [key, val] of Object.entries(secretMap)) {
await delayMs(Math.max(0, applyJitter(100, 200)));
await request.put(
`${IntegrationUrls.CLOUDFLARE_WORKERS_API_URL}/client/v4/accounts/${accountId}/workers/scripts/${scriptId}/secrets`,
{ name: key, text: val.value, type: "secret_text" },
{
headers: {
Authorization: `Bearer ${apiToken}`,
"Content-Type": "application/json"
}
}
);
}
if (!secretSync.syncOptions.disableSecretDeletion) {
const secretsToDelete = existingSecretNames.filter((existingKey) => {
const isManagedBySchema = matchesSchema(
existingKey,
secretSync.environment?.slug || "",
secretSync.syncOptions.keySchema
);
const isInNewSecretMap = secretMapKeys.has(existingKey);
return !isInNewSecretMap && isManagedBySchema;
});
for await (const key of secretsToDelete) {
await delayMs(Math.max(0, applyJitter(100, 200)));
await request.delete(
`${IntegrationUrls.CLOUDFLARE_WORKERS_API_URL}/client/v4/accounts/${accountId}/workers/scripts/${scriptId}/secrets/${key}`,
{
headers: {
Authorization: `Bearer ${apiToken}`
}
}
);
}
}
},
getSecrets: async (secretSync: TCloudflareWorkersSyncWithCredentials): Promise<TSecretMap> => {
throw new Error(`${SECRET_SYNC_NAME_MAP[secretSync.destination]} does not support importing secrets.`);
},
removeSecrets: async (secretSync: TCloudflareWorkersSyncWithCredentials, secretMap: TSecretMap) => {
const {
connection: {
credentials: { apiToken, accountId }
},
destinationConfig: { scriptId }
} = secretSync;
const existingSecretNames = await getSecretKeys(secretSync);
const secretMapToRemoveKeys = new Set(Object.keys(secretMap));
for await (const existingKey of existingSecretNames) {
const isManagedBySchema = matchesSchema(
existingKey,
secretSync.environment?.slug || "",
secretSync.syncOptions.keySchema
);
const isInSecretMapToRemove = secretMapToRemoveKeys.has(existingKey);
if (isInSecretMapToRemove && isManagedBySchema) {
await delayMs(Math.max(0, applyJitter(100, 200)));
await request.delete(
`${IntegrationUrls.CLOUDFLARE_WORKERS_API_URL}/client/v4/accounts/${accountId}/workers/scripts/${scriptId}/secrets/${existingKey}`,
{
headers: {
Authorization: `Bearer ${apiToken}`
}
}
);
}
}
}
};

View File

@ -1,55 +0,0 @@
import RE2 from "re2";
import { z } from "zod";
import { SecretSyncs } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import {
BaseSecretSyncSchema,
GenericCreateSecretSyncFieldsSchema,
GenericUpdateSecretSyncFieldsSchema
} from "@app/services/secret-sync/secret-sync-schemas";
import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types";
const CloudflareWorkersSyncDestinationConfigSchema = z.object({
scriptId: z
.string()
.min(1, "Script ID is required")
.max(64)
.refine((val) => {
const re2 = new RE2(/^[a-z0-9]([a-z0-9-]*[a-z0-9])?$/);
return re2.test(val);
}, "Invalid script ID format")
.describe(SecretSyncs.DESTINATION_CONFIG.CLOUDFLARE_WORKERS.scriptId)
});
const CloudflareWorkersSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: false };
export const CloudflareWorkersSyncSchema = BaseSecretSyncSchema(
SecretSync.CloudflareWorkers,
CloudflareWorkersSyncOptionsConfig
).extend({
destination: z.literal(SecretSync.CloudflareWorkers),
destinationConfig: CloudflareWorkersSyncDestinationConfigSchema
});
export const CreateCloudflareWorkersSyncSchema = GenericCreateSecretSyncFieldsSchema(
SecretSync.CloudflareWorkers,
CloudflareWorkersSyncOptionsConfig
).extend({
destinationConfig: CloudflareWorkersSyncDestinationConfigSchema
});
export const UpdateCloudflareWorkersSyncSchema = GenericUpdateSecretSyncFieldsSchema(
SecretSync.CloudflareWorkers,
CloudflareWorkersSyncOptionsConfig
).extend({
destinationConfig: CloudflareWorkersSyncDestinationConfigSchema.optional()
});
export const CloudflareWorkersSyncListItemSchema = z.object({
name: z.literal("Cloudflare Workers"),
connection: z.literal(AppConnection.Cloudflare),
destination: z.literal(SecretSync.CloudflareWorkers),
canImportSecrets: z.literal(false)
});

View File

@ -1,19 +0,0 @@
import z from "zod";
import { TCloudflareConnection } from "@app/services/app-connection/cloudflare/cloudflare-connection-types";
import {
CloudflareWorkersSyncListItemSchema,
CloudflareWorkersSyncSchema,
CreateCloudflareWorkersSyncSchema
} from "./cloudflare-workers-schemas";
export type TCloudflareWorkersSyncListItem = z.infer<typeof CloudflareWorkersSyncListItemSchema>;
export type TCloudflareWorkersSync = z.infer<typeof CloudflareWorkersSyncSchema>;
export type TCloudflareWorkersSyncInput = z.infer<typeof CreateCloudflareWorkersSyncSchema>;
export type TCloudflareWorkersSyncWithCredentials = TCloudflareWorkersSync & {
connection: TCloudflareConnection;
};

View File

@ -1,4 +0,0 @@
export * from "./cloudflare-workers-constants";
export * from "./cloudflare-workers-fns";
export * from "./cloudflare-workers-schemas";
export * from "./cloudflare-workers-types";

View File

@ -21,8 +21,6 @@ export enum SecretSync {
Flyio = "flyio",
GitLab = "gitlab",
CloudflarePages = "cloudflare-pages",
CloudflareWorkers = "cloudflare-workers",
Zabbix = "zabbix",
Railway = "railway"
}

View File

@ -31,7 +31,6 @@ import { AZURE_KEY_VAULT_SYNC_LIST_OPTION, azureKeyVaultSyncFactory } from "./az
import { CAMUNDA_SYNC_LIST_OPTION, camundaSyncFactory } from "./camunda";
import { CLOUDFLARE_PAGES_SYNC_LIST_OPTION } from "./cloudflare-pages/cloudflare-pages-constants";
import { CloudflarePagesSyncFns } from "./cloudflare-pages/cloudflare-pages-fns";
import { CLOUDFLARE_WORKERS_SYNC_LIST_OPTION, CloudflareWorkersSyncFns } from "./cloudflare-workers";
import { FLYIO_SYNC_LIST_OPTION, FlyioSyncFns } from "./flyio";
import { GCP_SYNC_LIST_OPTION } from "./gcp";
import { GcpSyncFns } from "./gcp/gcp-sync-fns";
@ -73,8 +72,6 @@ const SECRET_SYNC_LIST_OPTIONS: Record<SecretSync, TSecretSyncListItem> = {
[SecretSync.Flyio]: FLYIO_SYNC_LIST_OPTION,
[SecretSync.GitLab]: GITLAB_SYNC_LIST_OPTION,
[SecretSync.CloudflarePages]: CLOUDFLARE_PAGES_SYNC_LIST_OPTION,
[SecretSync.CloudflareWorkers]: CLOUDFLARE_WORKERS_SYNC_LIST_OPTION,
[SecretSync.Zabbix]: ZABBIX_SYNC_LIST_OPTION,
[SecretSync.Railway]: RAILWAY_SYNC_LIST_OPTION
};
@ -244,8 +241,6 @@ export const SecretSyncFns = {
return GitLabSyncFns.syncSecrets(secretSync, schemaSecretMap, { appConnectionDAL, kmsService });
case SecretSync.CloudflarePages:
return CloudflarePagesSyncFns.syncSecrets(secretSync, schemaSecretMap);
case SecretSync.CloudflareWorkers:
return CloudflareWorkersSyncFns.syncSecrets(secretSync, schemaSecretMap);
case SecretSync.Zabbix:
return ZabbixSyncFns.syncSecrets(secretSync, schemaSecretMap);
case SecretSync.Railway:
@ -342,9 +337,6 @@ export const SecretSyncFns = {
case SecretSync.CloudflarePages:
secretMap = await CloudflarePagesSyncFns.getSecrets(secretSync);
break;
case SecretSync.CloudflareWorkers:
secretMap = await CloudflareWorkersSyncFns.getSecrets(secretSync);
break;
case SecretSync.Zabbix:
secretMap = await ZabbixSyncFns.getSecrets(secretSync);
break;
@ -428,8 +420,6 @@ export const SecretSyncFns = {
return GitLabSyncFns.removeSecrets(secretSync, schemaSecretMap, { appConnectionDAL, kmsService });
case SecretSync.CloudflarePages:
return CloudflarePagesSyncFns.removeSecrets(secretSync, schemaSecretMap);
case SecretSync.CloudflareWorkers:
return CloudflareWorkersSyncFns.removeSecrets(secretSync, schemaSecretMap);
case SecretSync.Zabbix:
return ZabbixSyncFns.removeSecrets(secretSync, schemaSecretMap);
case SecretSync.Railway:

View File

@ -24,8 +24,6 @@ export const SECRET_SYNC_NAME_MAP: Record<SecretSync, string> = {
[SecretSync.Flyio]: "Fly.io",
[SecretSync.GitLab]: "GitLab",
[SecretSync.CloudflarePages]: "Cloudflare Pages",
[SecretSync.CloudflareWorkers]: "Cloudflare Workers",
[SecretSync.Zabbix]: "Zabbix",
[SecretSync.Railway]: "Railway"
};
@ -53,8 +51,6 @@ export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
[SecretSync.Flyio]: AppConnection.Flyio,
[SecretSync.GitLab]: AppConnection.GitLab,
[SecretSync.CloudflarePages]: AppConnection.Cloudflare,
[SecretSync.CloudflareWorkers]: AppConnection.Cloudflare,
[SecretSync.Zabbix]: AppConnection.Zabbix,
[SecretSync.Railway]: AppConnection.Railway
};
@ -82,8 +78,6 @@ export const SECRET_SYNC_PLAN_MAP: Record<SecretSync, SecretSyncPlanType> = {
[SecretSync.Flyio]: SecretSyncPlanType.Regular,
[SecretSync.GitLab]: SecretSyncPlanType.Regular,
[SecretSync.CloudflarePages]: SecretSyncPlanType.Regular,
[SecretSync.CloudflareWorkers]: SecretSyncPlanType.Regular,
[SecretSync.Zabbix]: SecretSyncPlanType.Regular,
[SecretSync.Railway]: SecretSyncPlanType.Regular
};

View File

@ -78,12 +78,6 @@ import {
TCloudflarePagesSyncListItem,
TCloudflarePagesSyncWithCredentials
} from "./cloudflare-pages/cloudflare-pages-types";
import {
TCloudflareWorkersSync,
TCloudflareWorkersSyncInput,
TCloudflareWorkersSyncListItem,
TCloudflareWorkersSyncWithCredentials
} from "./cloudflare-workers";
import { TFlyioSync, TFlyioSyncInput, TFlyioSyncListItem, TFlyioSyncWithCredentials } from "./flyio/flyio-sync-types";
import { TGcpSync, TGcpSyncInput, TGcpSyncListItem, TGcpSyncWithCredentials } from "./gcp";
import { TGitLabSync, TGitLabSyncInput, TGitLabSyncListItem, TGitLabSyncWithCredentials } from "./gitlab";
@ -150,7 +144,6 @@ export type TSecretSync =
| TFlyioSync
| TGitLabSync
| TCloudflarePagesSync
| TCloudflareWorkersSync
| TZabbixSync
| TRailwaySync;
@ -177,7 +170,6 @@ export type TSecretSyncWithCredentials =
| TFlyioSyncWithCredentials
| TGitLabSyncWithCredentials
| TCloudflarePagesSyncWithCredentials
| TCloudflareWorkersSyncWithCredentials
| TZabbixSyncWithCredentials
| TRailwaySyncWithCredentials;
@ -204,7 +196,6 @@ export type TSecretSyncInput =
| TFlyioSyncInput
| TGitLabSyncInput
| TCloudflarePagesSyncInput
| TCloudflareWorkersSyncInput
| TZabbixSyncInput
| TRailwaySyncInput;
@ -231,7 +222,6 @@ export type TSecretSyncListItem =
| TFlyioSyncListItem
| TGitLabSyncListItem
| TCloudflarePagesSyncListItem
| TCloudflareWorkersSyncListItem
| TZabbixSyncListItem
| TRailwaySyncListItem;

View File

@ -1,36 +1,6 @@
FROM node:20-alpine AS builder
WORKDIR /app
RUN npm install -g mint@4.2.13
COPY . .
# Install a local version of our OpenAPI spec
RUN apk add --no-cache wget jq && \
wget -O spec.json https://app.infisical.com/api/docs/json && \
jq '.api.openapi = "./spec.json"' docs.json > temp.json && \
mv temp.json docs.json
# Run mint dev briefly to download the web client
RUN timeout 30 mint dev || true
FROM node:20-alpine
WORKDIR /app
RUN addgroup -g 1001 -S mintuser && \
adduser -S -D -H -u 1001 -s /sbin/nologin -G mintuser mintuser && \
npm install -g mint@4.2.13
COPY --chown=mintuser:mintuser . .
COPY --from=builder --chown=mintuser:mintuser /root/.mintlify /home/mintuser/.mintlify
COPY --from=builder --chown=mintuser:mintuser /app/docs.json /app/docs.json
COPY --from=builder --chown=mintuser:mintuser /app/spec.json /app/spec.json
USER mintuser
RUN npm install -g mint
COPY . .
EXPOSE 3000
CMD ["mint", "dev"]

View File

@ -1,4 +0,0 @@
---
title: "Create"
openapi: "POST /api/v1/secret-syncs/cloudflare-workers"
---

View File

@ -1,4 +0,0 @@
---
title: "Delete"
openapi: "DELETE /api/v1/secret-syncs/cloudflare-workers/{syncId}"
---

View File

@ -1,4 +0,0 @@
---
title: "Get by ID"
openapi: "GET /api/v1/secret-syncs/cloudflare-workers/{syncId}"
---

View File

@ -1,4 +0,0 @@
---
title: "Get by Name"
openapi: "GET /api/v1/secret-syncs/cloudflare-workers/sync-name/{syncName}"
---

View File

@ -1,4 +0,0 @@
---
title: "List"
openapi: "GET /api/v1/secret-syncs/cloudflare-workers"
---

View File

@ -1,4 +0,0 @@
---
title: "Remove Secrets"
openapi: "POST /api/v1/secret-syncs/cloudflare-workers/{syncId}/remove-secrets"
---

View File

@ -1,4 +0,0 @@
---
title: "Sync Secrets"
openapi: "POST /api/v1/secret-syncs/cloudflare-workers/{syncId}/sync-secrets"
---

View File

@ -1,4 +0,0 @@
---
title: "Update"
openapi: "PATCH /api/v1/secret-syncs/cloudflare-workers/{syncId}"
---

View File

@ -4,61 +4,6 @@ title: "Changelog"
The changelog below reflects new product developments and updates on a monthly basis.
## July 2025
- Improved speed performance of audit log filtering.
- Revamped password reset flow pages.
- Added support for [Bitbucket for Secret Scanning](https://infisical.com/docs/documentation/platform/secret-scanning/bitbucket).
- Released Secret Sync for [Zabbix](https://infisical.com/docs/integrations/secret-syncs/zabbix).
## June 2025
- Released Secret Sync for [1Password](https://infisical.com/docs/integrations/secret-syncs/1password), [Heroku](https://infisical.com/docs/integrations/secret-syncs/heroku), [Fly.io](https://infisical.com/docs/integrations/secret-syncs/flyio), and [Render](https://infisical.com/docs/integrations/secret-syncs/render).
- Added support for [Kubernetes dynamic secrets](https://infisical.com/docs/documentation/platform/dynamic-secrets/kubernetes) to generate service account tokens
- Released Secret Rotation for [MySQL](https://infisical.com/docs/documentation/platform/secret-rotation/mysql-credentials) and [OracleDB](https://infisical.com/docs/documentation/platform/secret-rotation/oracledb-credentials) as well as Dynamic Secrets for [Vertica](https://infisical.com/docs/documentation/platform/dynamic-secrets/vertica) and [GitHub App Tokens](https://infisical.com/docs/documentation/platform/dynamic-secrets/github).
- Added support for Azure Auth in ESO.
- [Kubernetes auth](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth) now supports gateway as a token reviewer.
- Revamped [Infisical CLI](https://infisical.com/docs/cli/commands/login) to auto-open login link.
- Rolled out [Infisical Packer integration](https://infisical.com/docs/integrations/frameworks/packer).
- Released [AliCloud Authentication method](https://infisical.com/docs/documentation/platform/identities/alicloud-auth).
- Added support for [multi-step approval workflows](https://infisical.com/docs/documentation/platform/pr-workflows).
- Revamped UI for Access Controls, Access Tree, Policies, and Approval Workflows.
- Released [TLS Certificate Authentication method](https://infisical.com/docs/documentation/platform/identities/tls-cert-auth).
- Added ability to copy session tokens in the Infisical Dashboard.
- Expanded resource support for [Infisical Terraform Provider](https://infisical.com/docs/integrations/frameworks/terraform).
## May 2025
- Added support for [Microsoft Teams integration](https://infisical.com/docs/documentation/platform/workflow-integrations/microsoft-teams-integration).
- Released [Infisical Gateway](https://infisical.com/docs/documentation/platform/gateways/overview) for accessing private network resources from Infisical.
- Added support for [Host Groups](https://infisical.com/docs/documentation/platform/ssh/host-groups) in Infisical SSH.
- Updated the designs of all emails send by Infisical.
- Added secret rotation support for [Azure Client](https://infisical.com/docs/documentation/platform/secret-rotation/azure-client-secret).
- Released secret sync for [HashiCorp Vault](https://infisical.com/docs/integrations/secret-syncs/hashicorp-vault).
- Made significant improvements to [Infisical Secret Scanning](https://infisical.com/docs/documentation/platform/secret-scanning/overview).
- Released [Infisical ACME Client](https://infisical.com/docs/documentation/platform/pki/acme-ca#certificates-with-acme-ca).
- [Access requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests) now support "break-glass" policies.
- Updated [Point-in-time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery) UI/UX.
- Redesigned [Approval Workflows and Change Requests](https://infisical.com/docs/documentation/platform/pr-workflows) user interface.
## April 2025
- Released ability to [request access to projects](https://infisical.com/docs/documentation/platform/access-controls/project-access-requests#project-access-requests).
- Updated UI for Audit Logs and Log Filtering.
- Launched [Infisical SSH V2](https://infisical.com/docs/documentation/platform/ssh/overview).
- Developer [Infisical MCP](https://github.com/Infisical/infisical-mcp-server).
- Added support for [Spotify Backstage Infisical plugin](https://infisical.com/docs/integrations/external/backstage).
- Added secret syncs for Terraform Cloud, Vercel, Windmill, TeamCity, and Camunda.
- Released [Auth0 Client Secret Rotation](https://infisical.com/docs/documentation/platform/secret-rotation/auth0-client-secret).
- Launched [Infisical C++ SDK](https://github.com/Infisical/infisical-cpp-sdk).
- Service tokens will now get expiry notifications.
- Added Infisical [Linux binary](https://infisical.com/docs/self-hosting/reference-architectures/linux-deployment-ha#linux-ha).
- Released ability to perform user impersonation.
- Added support for [LDAP password rotation](https://infisical.com/docs/documentation/platform/secret-rotation/ldap-password).
## March 2025
- Released [Infisical Gateway](https://infisical.com/docs/documentation/platform/gateways/overview) for secure access to private resources without needing direct inbound connections to private networks.

View File

@ -78,10 +78,7 @@
},
{
"group": "Infisical SSH",
"pages": [
"documentation/platform/ssh/overview",
"documentation/platform/ssh/host-groups"
]
"pages": ["documentation/platform/ssh/overview", "documentation/platform/ssh/host-groups"]
},
{
"group": "Key Management (KMS)",
@ -378,10 +375,7 @@
},
{
"group": "Architecture",
"pages": [
"internals/architecture/components",
"internals/architecture/cloud"
]
"pages": ["internals/architecture/components", "internals/architecture/cloud"]
},
"internals/security",
"internals/service-tokens"
@ -514,7 +508,6 @@
"integrations/secret-syncs/azure-key-vault",
"integrations/secret-syncs/camunda",
"integrations/secret-syncs/cloudflare-pages",
"integrations/secret-syncs/cloudflare-workers",
"integrations/secret-syncs/databricks",
"integrations/secret-syncs/flyio",
"integrations/secret-syncs/gcp-secret-manager",
@ -553,10 +546,7 @@
"integrations/cloud/gcp-secret-manager",
{
"group": "Cloudflare",
"pages": [
"integrations/cloud/cloudflare-pages",
"integrations/cloud/cloudflare-workers"
]
"pages": ["integrations/cloud/cloudflare-pages", "integrations/cloud/cloudflare-workers"]
},
"integrations/cloud/terraform-cloud",
"integrations/cloud/databricks",
@ -668,11 +658,7 @@
"cli/commands/reset",
{
"group": "infisical scan",
"pages": [
"cli/commands/scan",
"cli/commands/scan-git-changes",
"cli/commands/scan-install"
]
"pages": ["cli/commands/scan", "cli/commands/scan-git-changes", "cli/commands/scan-install"]
}
]
},
@ -996,9 +982,7 @@
"pages": [
{
"group": "Kubernetes",
"pages": [
"api-reference/endpoints/dynamic-secrets/kubernetes/create-lease"
]
"pages": ["api-reference/endpoints/dynamic-secrets/kubernetes/create-lease"]
},
"api-reference/endpoints/dynamic-secrets/create",
"api-reference/endpoints/dynamic-secrets/update",
@ -1721,19 +1705,6 @@
"api-reference/endpoints/secret-syncs/cloudflare-pages/remove-secrets"
]
},
{
"group": "Cloudflare Workers",
"pages": [
"api-reference/endpoints/secret-syncs/cloudflare-workers/list",
"api-reference/endpoints/secret-syncs/cloudflare-workers/get-by-id",
"api-reference/endpoints/secret-syncs/cloudflare-workers/get-by-name",
"api-reference/endpoints/secret-syncs/cloudflare-workers/create",
"api-reference/endpoints/secret-syncs/cloudflare-workers/update",
"api-reference/endpoints/secret-syncs/cloudflare-workers/delete",
"api-reference/endpoints/secret-syncs/cloudflare-workers/sync-secrets",
"api-reference/endpoints/secret-syncs/cloudflare-workers/remove-secrets"
]
},
{
"group": "Databricks",
"pages": [
@ -2189,7 +2160,6 @@
"sdks/languages/python",
"sdks/languages/java",
"sdks/languages/csharp",
"sdks/languages/cpp",
"sdks/languages/go",
"sdks/languages/ruby"
]

View File

@ -3,13 +3,13 @@ title: "AWS IAM"
description: "Learn how to dynamically generate AWS IAM Users."
---
The Infisical AWS IAM dynamic secret allows you to generate AWS IAM Users on demand based on a configured AWS policy. Infisical supports several authentication methods to connect to your AWS account, including assuming an IAM Role, using IAM Roles for Service Accounts (IRSA) on EKS, or static Access Keys.
The Infisical AWS IAM dynamic secret allows you to generate AWS IAM Users on demand based on configured AWS policy.
## Prerequisite
Infisical needs an AWS IAM principal (a user or a role) with the required permissions to create and manage other IAM users. This principal will be responsible for the lifecycle of the dynamically generated users.
Infisical needs an initial AWS IAM user with the required permissions to create sub IAM users. This IAM user will be responsible for managing the lifecycle of new IAM users.
<Accordion title="Required IAM Permissions">
<Accordion title="Managing AWS IAM User minimum permission policy">
```json
{
@ -235,169 +235,7 @@ Replace **\<account id\>** with your AWS account id and **\<aws-scope-path\>** w
![Provision Lease](/images/platform/dynamic-secrets/lease-values-aws-iam.png)
</Step>
</Steps>
</Tab>
<Tab title="IRSA (EKS)">
This method is recommended for self-hosted Infisical instances running on AWS EKS. It uses [IAM Roles for Service Accounts (IRSA)](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html) to securely grant permissions to the Infisical pods without managing static credentials.
<Warning type="warning" title="IRSA Configuration Prerequisite">
In order to use IRSA, the `KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN` environment variable must be set to `true` for your self-hosted Infisical instance.
</Warning>
<Steps>
<Step title="Create an IAM OIDC provider for your cluster">
If you don't already have one, you need to create an IAM OIDC provider for your EKS cluster. This allows IAM to trust authentication tokens from your Kubernetes cluster.
1. Find your cluster's OIDC provider URL from the EKS console or by using the AWS CLI:
`aws eks describe-cluster --name <your-cluster-name> --query "cluster.identity.oidc.issuer" --output text`
2. Navigate to the [IAM Identity Providers](https://console.aws.amazon.com/iam/home#/providers) page in your AWS Console and create a new OpenID Connect provider with the URL and `sts.amazonaws.com` as the audience.
![Create OIDC Provider Placeholder](/images/integrations/aws/irsa-create-oidc-provider.png)
</Step>
<Step title="Create the Managing User IAM Role for Infisical">
1. Navigate to the [Create IAM Role](https://console.aws.amazon.com/iamv2/home#/roles/create?step=selectEntities) page in your AWS Console.
2. Select **Web identity** as the **Trusted Entity Type**.
3. Choose the OIDC provider you created in the previous step.
4. For the **Audience**, select `sts.amazonaws.com`.
![IAM Role Creation for IRSA](/images/integrations/aws/irsa-iam-role-creation.png)
5. Attach the permission policy detailed in the **Prerequisite** section at the top of this page.
6. After creating the role, edit its **Trust relationship** to specify the service account Infisical is using in your cluster. This ensures only the Infisical pod can assume this role.
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Federated": "arn:aws:iam::<ACCOUNT_ID>:oidc-provider/oidc.eks.<REGION>.amazonaws.com/id/<OIDC_ID>"
},
"Action": "sts:AssumeRoleWithWebIdentity",
"Condition": {
"StringEquals": {
"oidc.eks.<REGION>.amazonaws.com/id/<OIDC_ID>:sub": "system:serviceaccount:<K8S_NAMESPACE>:<INFISICAL_SERVICE_ACCOUNT_NAME>",
"oidc.eks.<REGION>.amazonaws.com/id/<OIDC_ID>:aud": "sts.amazonaws.com"
}
}
}
]
}
```
Replace `<ACCOUNT_ID>`, `<REGION>`, `<OIDC_ID>`, `<K8S_NAMESPACE>`, and `<INFISICAL_SERVICE_ACCOUNT_NAME>` with your specific values.
</Step>
<Step title="Annotate the Infisical Kubernetes Service Account">
For the IRSA mechanism to work, the Infisical service account in your Kubernetes cluster must be annotated with the ARN of the IAM role you just created.
Run the following command, replacing the placeholders with your values:
```bash
kubectl annotate serviceaccount -n <infisical-namespace> <infisical-service-account> \
eks.amazonaws.com/role-arn=arn:aws:iam::<account-id>:role/<iam-role-name>
```
This annotation tells the EKS Pod Identity Webhook to inject the necessary environment variables and tokens into the Infisical pod, allowing it to assume the specified IAM role.
</Step>
<Step title="Secret Overview Dashboard">
Navigate to the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret to.
</Step>
<Step title="Click on the 'Add Dynamic Secret' button">
![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png)
</Step>
<Step title="Select AWS IAM">
![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-aws-iam.png)
</Step>
<Step title="Provide the inputs for dynamic secret parameters">
![Dynamic Secret Setup Modal for IRSA](/images/platform/dynamic-secrets/dynamic-secret-setup-modal-aws-iam-irsa.png)
<ParamField path="Secret Name" type="string" required>
Name by which you want the secret to be referenced
</ParamField>
<ParamField path="Default TTL" type="string" required>
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
</ParamField>
<ParamField path="Max TTL" type="string" required>
Maximum time-to-live for a generated secret
</ParamField>
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
<ParamField path="Tags" type="map<string, string>[]">
Tags to be added to the created IAM User resource.
</ParamField>
<ParamField path="Method" type="string" required>
Select *IRSA* method.
</ParamField>
<ParamField path="Aws Role ARN" type="string" required>
The ARN of the AWS IAM Role for the service account to assume.
</ParamField>
<ParamField path="AWS IAM Path" type="string">
[IAM AWS Path](https://aws.amazon.com/blogs/security/optimize-aws-administration-with-iam-paths/) to scope created IAM User resource access.
</ParamField>
<ParamField path="AWS Region" type="string" required>
The AWS data center region.
</ParamField>
<ParamField path="IAM User Permission Boundary" type="string" required>
The IAM Policy ARN of the [AWS Permissions Boundary](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html) to attach to IAM users created in the role.
</ParamField>
<ParamField path="AWS IAM Groups" type="string">
The AWS IAM groups that should be assigned to the created users. Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="AWS Policy ARNs" type="string">
The AWS IAM managed policies that should be attached to the created users. Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="AWS IAM Policy Document" type="string">
The AWS IAM inline policy that should be attached to the created users.
Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
</Step>
<Step title="Click 'Submit'">
After submitting the form, you will see a dynamic secret created in the dashboard.
![Dynamic Secret](../../../images/platform/dynamic-secrets/dynamic-secret.png)
</Step>
<Step title="Generate dynamic secrets">
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png)
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png)
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png)
<Tip>
Ensure that the TTL for the lease falls within the maximum TTL defined when configuring the dynamic secret in step 4.
</Tip>
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
![Provision Lease](/images/platform/dynamic-secrets/lease-values-aws-iam.png)
</Step>
</Steps>
</Tab>
<Tab title="Access Key">
Infisical will use the provided **Access Key ID** and **Secret Key** to connect to your AWS instance.
@ -425,9 +263,9 @@ Replace **\<account id\>** with your AWS account id and **\<aws-scope-path\>** w
Maximum time-to-live for a generated secret
</ParamField>
<ParamField path="Method" type="string" required>
Select *Access Key* method.
</ParamField>
<ParamField path="Method" type="string" required>
Select *Access Key* method.
</ParamField>
<ParamField path="AWS Access Key" type="string" required>
The managing AWS IAM User Access Key

Binary file not shown.

Before

Width:  |  Height:  |  Size: 325 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 346 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 373 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 667 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 966 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 578 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 575 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 624 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 598 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 558 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 637 KiB

View File

@ -35,17 +35,6 @@ Infisical supports connecting to Cloudflare using API tokens and Account ID for
- **Account** - **Cloudflare Pages** - **Edit**
- **Account** - **Account Settings** - **Read**
Add these permissions to your API token and click **Continue to summary**, then **Create Token** to generate your API token.
</Accordion>
<Accordion title="Cloudflare Workers">
Use the following permissions to grant Infisical access to sync secrets to Cloudflare Workers:
![Configure Token](/images/app-connections/cloudflare/cloudflare-workers-configure-permissions.png)
**Required Permissions:**
- **Account** - **Workers Scripts** - **Edit**
- **Account** - **Account Settings** - **Read**
Add these permissions to your API token and click **Continue to summary**, then **Create Token** to generate your API token.
</Accordion>
</AccordionGroup>
@ -55,7 +44,7 @@ Infisical supports connecting to Cloudflare using API tokens and Account ID for
</Step>
<Step title="Save Your API Token">
After creation, copy and securely store your API token as it will not be shown again.
![Generated API Token](/images/app-connections/cloudflare/cloudflare-generated-token.png)
<Warning>

View File

@ -1,128 +0,0 @@
---
title: "Cloudflare Workers Sync"
description: "Learn how to configure a Cloudflare Workers Sync for Infisical."
---
**Prerequisites:**
- Set up and add secrets to [Infisical Cloud](https://app.infisical.com)
- Create a [Cloudflare Connection](/integrations/app-connections/cloudflare)
<Tabs>
<Tab title="Infisical UI">
1. Navigate to **Project** > **Integrations** and select the **Secret Syncs** tab. Click on the **Add Sync** button.
![Secret Syncs Tab](/images/secret-syncs/general/secret-sync-tab.png)
2. Select the **Cloudflare Workers** option.
![Select Cloudflare Workers](/images/secret-syncs/cloudflare-workers/select-cloudflare-workers-option.png)
3. Configure the **Source** from where secrets should be retrieved, then click **Next**.
![Configure Source](/images/secret-syncs/cloudflare-workers/cloudflare-workers-sync-source.png)
- **Environment**: The project environment to retrieve secrets from.
- **Secret Path**: The folder path to retrieve secrets from.
<Tip>
If you need to sync secrets from multiple folder locations, check out [secret imports](/documentation/platform/secret-reference#secret-imports).
</Tip>
4. Configure the **Destination** to where secrets should be deployed, then click **Next**.
![Configure Destination](/images/secret-syncs/cloudflare-workers/cloudflare-workers-sync-destination.png)
- **Cloudflare Connection**: The Cloudflare Connection to authenticate with.
- **Cloudflare Workers Script**: Choose the Cloudflare Workers script you want to sync secrets to.
5. Configure the **Sync Options** to specify how secrets should be synced, then click **Next**.
![Configure Options](/images/secret-syncs/cloudflare-workers/cloudflare-workers-sync-options.png)
- **Initial Sync Behavior**: Determines how Infisical should resolve the initial sync.
- **Overwrite Destination Secrets**: Removes any secrets at the destination endpoint not present in Infisical.
- **Key Schema**: Template that determines how secret names are transformed when syncing, using `{{secretKey}}` as a placeholder for the original secret name and `{{environment}}` for the environment.
- **Auto-Sync Enabled**: If enabled, secrets will automatically be synced from the source location when changes occur. Disable to enforce manual syncing only.
- **Disable Secret Deletion**: If enabled, Infisical will not remove secrets from the sync destination. Enable this option if you intend to manage some secrets manually outside of Infisical.
6. Configure the **Details** of your Cloudflare Workers Sync, then click **Next**.
![Configure Details](/images/secret-syncs/cloudflare-workers/cloudflare-workers-sync-details.png)
- **Name**: The name of your sync. Must be slug-friendly.
- **Description**: An optional description for your sync.
7. Review your Cloudflare Workers Sync configuration, then click **Create Sync**.
![Confirm Configuration](/images/secret-syncs/cloudflare-workers/cloudflare-workers-sync-review.png)
8. If enabled, your Cloudflare Workers Sync will begin syncing your secrets to the destination endpoint.
![Sync Secrets](/images/secret-syncs/cloudflare-workers/cloudflare-workers-sync-created.png)
</Tab>
<Tab title="API">
To create a **Cloudflare Workers Sync**, make an API request to the [Create Cloudflare Workers Sync](/api-reference/endpoints/secret-syncs/cloudflare-workers/create) API endpoint.
### Sample request
```bash Request
curl --request POST \
--url https://app.infisical.com/api/v1/secret-syncs/cloudflare-workers \
--header 'Content-Type: application/json' \
--data '{
"name": "my-cloudflare-workers-sync",
"projectId": "your-project-id",
"description": "an example sync",
"connectionId": "your-cloudflare-connection-id",
"environment": "production",
"secretPath": "/my-secrets",
"isEnabled": true,
"syncOptions": {
"initialSyncBehavior": "overwrite-destination"
},
"destinationConfig": {
"scriptId": "my-workers-script"
}
}'
```
### Sample response
```bash Response
{
"secretSync": {
"id": "your-sync-id",
"name": "my-cloudflare-workers-sync",
"description": "an example sync",
"isEnabled": true,
"version": 1,
"folderId": "your-folder-id",
"connectionId": "your-cloudflare-connection-id",
"createdAt": "2024-05-01T12:00:00Z",
"updatedAt": "2024-05-01T12:00:00Z",
"syncStatus": "succeeded",
"lastSyncJobId": "123",
"lastSyncMessage": null,
"lastSyncedAt": "2024-05-01T12:00:00Z",
"syncOptions": {
"initialSyncBehavior": "overwrite-destination"
},
"projectId": "your-project-id",
"connection": {
"app": "cloudflare",
"name": "my-cloudflare-connection",
"id": "your-cloudflare-connection-id"
},
"environment": {
"slug": "production",
"name": "Production",
"id": "your-env-id"
},
"folder": {
"id": "your-folder-id",
"path": "/my-secrets"
},
"destination": "cloudflare-workers",
"destinationConfig": {
"scriptId": "my-workers-script"
}
}
}
```
</Tab>
</Tabs>

View File

@ -1,6 +0,0 @@
---
title: "Infisical C++ SDK"
sidebarTitle: "C++"
url: "https://github.com/Infisical/infisical-cpp-sdk/?tab=readme-ov-file#infisical-c-sdk"
icon: "c"
---

View File

@ -25,9 +25,6 @@ From local development to production, Infisical SDKs provide the easiest way for
<Card href="https://github.com/Infisical/infisical-dotnet-sdk?tab=readme-ov-file#infisical-net-sdk" title=".NET" icon="bars" color="#368833">
Manage secrets for your .NET application on demand
</Card>
<Card href="https://github.com/Infisical/infisical-cpp-sdk/?tab=readme-ov-file#infisical-c-sdk" title="C++" icon="c" color="#b00dd1">
Manage secrets for your C++ application on demand
</Card>
<Card href="/sdks/languages/ruby" title="Ruby" icon="diamond" color="#367B99">
Manage secrets for your Ruby application on demand
</Card>

View File

@ -59,15 +59,6 @@ Example values:
connect with internal/private IP addresses.
</ParamField>
<ParamField
query="KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN"
type="bool"
default="false"
optional
>
Determines whether your Infisical instance can automatically read the service account token of the pod it's running on. Used for features such as the IRSA auth method.
</ParamField>
## CORS
Cross-Origin Resource Sharing (CORS) is a security feature that allows web applications running on one domain to access resources from another domain.

View File

@ -4,20 +4,17 @@ description: "Read how to run Infisical with Docker Compose template."
---
This self-hosting guide will walk you through the steps to self-host Infisical using Docker Compose.
## Prerequisites
- [Docker](https://docs.docker.com/engine/install/)
- [Docker compose](https://docs.docker.com/compose/install/)
<Tabs>
<Tab title="Docker Compose">
## Prerequisites
- [Docker](https://docs.docker.com/engine/install/)
- [Docker compose](https://docs.docker.com/compose/install/)
<Warning>
This Docker Compose configuration is not designed for high-availability production scenarios.
It includes just the essential components needed to set up an Infisical proof of concept (POC).
To run Infisical in a highly available manner, give the [Docker Swarm guide](/self-hosting/deployment-options/docker-swarm).
</Warning>
<Warning>
This Docker Compose configuration is not designed for high-availability production scenarios.
It includes just the essential components needed to set up an Infisical proof of concept (POC).
To run Infisical in a highly available manner, give the [Docker Swarm guide](/self-hosting/deployment-options/docker-swarm).
</Warning>
## Verify prerequisites
## Verify prerequisites
To verify that Docker compose and Docker are installed on the machine where you plan to install Infisical, run the following commands.
Check for docker installation
@ -30,145 +27,55 @@ This self-hosting guide will walk you through the steps to self-host Infisical u
docker-compose
```
## Download docker compose file
You can obtain the Infisical docker compose file by using a command-line downloader such as `wget` or `curl`.
If your system doesn't have either of these, you can use a equivalent command that works with your machine.
<Tabs>
<Tab title="curl">
```bash
curl -o docker-compose.prod.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.prod.yml
```
</Tab>
<Tab title="wget">
```bash
wget -O docker-compose.prod.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.prod.yml
```
</Tab>
</Tabs>
## Configure instance credentials
Infisical requires a set of credentials used for connecting to dependent services such as Postgres, Redis, etc.
The default credentials can be downloaded using the one of the commands listed below.
<Tabs>
<Tab title="curl">
```bash
curl -o .env https://raw.githubusercontent.com/Infisical/infisical/main/.env.example
```
</Tab>
<Tab title="wget">
```bash
wget -O .env https://raw.githubusercontent.com/Infisical/infisical/main/.env.example
```
</Tab>
</Tabs>
Once downloaded, the credentials file will be saved to your working directly as `.env` file.
View all available configurations [here](/self-hosting/configuration/envars).
<Warning>
The default .env file contains credentials that are intended solely for testing purposes.
Please generate a new `ENCRYPTION_KEY` and `AUTH_SECRET` for use outside of testing.
Instructions to do so, can be found [here](/self-hosting/configuration/envars).
</Warning>
## Start Infisical
Run the command below to start Infisical and all related services.
## Download docker compose file
You can obtain the Infisical docker compose file by using a command-line downloader such as `wget` or `curl`.
If your system doesn't have either of these, you can use a equivalent command that works with your machine.
<Tabs>
<Tab title="curl">
```bash
docker-compose -f docker-compose.prod.yml up
curl -o docker-compose.prod.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.prod.yml
```
</Tab>
<Tab title="Podman Compose">
Podman Compose is an alternative way to run Infisical using Podman as a replacement for Docker. Podman is backwards compatible with Docker Compose files.
## Prerequisites
- [Podman](https://podman-desktop.io/docs/installation)
- [Podman Compose](https://podman-desktop.io/docs/compose)
<Warning>
This Docker Compose configuration is not designed for high-availability production scenarios.
It includes just the essential components needed to set up an Infisical proof of concept (POC).
To run Infisical in a highly available manner, give the [Docker Swarm guide](/self-hosting/deployment-options/docker-swarm).
</Warning>
## Verify prerequisites
To verify that Podman compose and Podman are installed on the machine where you plan to install Infisical, run the following commands.
Check for podman installation
```bash
podman version
```
Check for podman compose installation
```bash
podman-compose version
```
## Download Docker Compose file
You can obtain the Infisical docker compose file by using a command-line downloader such as `wget` or `curl`.
If your system doesn't have either of these, you can use a equivalent command that works with your machine.
<Tabs>
<Tab title="curl">
```bash
curl -o docker-compose.prod.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.prod.yml
```
</Tab>
<Tab title="wget">
```bash
wget -O docker-compose.prod.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.prod.yml
```
</Tab>
</Tabs>
## Configure instance credentials
Infisical requires a set of credentials used for connecting to dependent services such as Postgres, Redis, etc.
The default credentials can be downloaded using the one of the commands listed below.
<Tabs>
<Tab title="curl">
```bash
curl -o .env https://raw.githubusercontent.com/Infisical/infisical/main/.env.example
```
</Tab>
<Tab title="wget">
```bash
wget -O .env https://raw.githubusercontent.com/Infisical/infisical/main/.env.example
```
</Tab>
</Tabs>
<Note>
Make sure to rename the `.env.example` file to `.env` before starting Infisical. Additionally it's important that the `.env` file is in the same directory as the `docker-compose.prod.yml` file.
</Note>
## Setup Podman
Run the commands below to setup Podman for first time use.
```bash
podman machine init --now
podman machine set --rootful
podman machine start
```
<Note>
If you are using a rootless podman installation, you can skip the `podman machine set --rootful` command.
</Note>
## Start Infisical
Run the command below to start Infisical and all related services.
```bash
podman-compose -f docker-compose.prod.yml up
<Tab title="wget">
```bash
wget -O docker-compose.prod.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.prod.yml
```
</Tab>
</Tabs>
## Configure instance credentials
Infisical requires a set of credentials used for connecting to dependent services such as Postgres, Redis, etc.
The default credentials can be downloaded using the one of the commands listed below.
<Tabs>
<Tab title="curl">
```bash
curl -o .env https://raw.githubusercontent.com/Infisical/infisical/main/.env.example
```
</Tab>
<Tab title="wget">
```bash
wget -O .env https://raw.githubusercontent.com/Infisical/infisical/main/.env.example
```
</Tab>
</Tabs>
Once downloaded, the credentials file will be saved to your working directly as `.env` file.
View all available configurations [here](/self-hosting/configuration/envars).
<Warning>
The default .env file contains credentials that are intended solely for testing purposes.
Please generate a new `ENCRYPTION_KEY` and `AUTH_SECRET` for use outside of testing.
Instructions to do so, can be found [here](/self-hosting/configuration/envars).
</Warning>
## Start Infisical
Run the command below to start Infisical and all related services.
```bash
docker-compose -f docker-compose.prod.yml up
```
Your Infisical instance should now be running on port `80`. To access your instance, visit `http://localhost:80`.

File diff suppressed because one or more lines are too long

View File

@ -4,7 +4,7 @@ import { SingleValue } from "react-select";
import { SecretSyncConnectionField } from "@app/components/secret-syncs/forms/SecretSyncConnectionField";
import { FilterableSelect, FormControl, Select, SelectItem } from "@app/components/v2";
import {
TCloudflarePagesProject,
TCloudflareProject,
useCloudflareConnectionListPagesProjects
} from "@app/hooks/api/appConnections/cloudflare";
import { SecretSync } from "@app/hooks/api/secretSyncs";
@ -52,7 +52,7 @@ export const CloudflarePagesSyncFields = () => {
isDisabled={!connectionId}
value={projects ? (projects.find((project) => project.name === value) ?? []) : []}
onChange={(option) => {
onChange((option as SingleValue<TCloudflarePagesProject>)?.name ?? null);
onChange((option as SingleValue<TCloudflareProject>)?.name ?? null);
}}
options={projects}
placeholder="Select a project..."

View File

@ -1,59 +0,0 @@
import { Controller, useFormContext, useWatch } from "react-hook-form";
import { SingleValue } from "react-select";
import { SecretSyncConnectionField } from "@app/components/secret-syncs/forms/SecretSyncConnectionField";
import { FilterableSelect, FormControl } from "@app/components/v2";
import {
TCloudflareWorkersScript,
useCloudflareConnectionListWorkersScripts
} from "@app/hooks/api/appConnections/cloudflare";
import { SecretSync } from "@app/hooks/api/secretSyncs";
import { TSecretSyncForm } from "../schemas";
export const CloudflareWorkersSyncFields = () => {
const { control, setValue } = useFormContext<
TSecretSyncForm & { destination: SecretSync.CloudflareWorkers }
>();
const connectionId = useWatch({ name: "connection.id", control });
const { data: scripts = [], isPending: isScriptsPending } =
useCloudflareConnectionListWorkersScripts(connectionId, {
enabled: Boolean(connectionId)
});
return (
<>
<SecretSyncConnectionField
onChange={() => {
setValue("destinationConfig.scriptId", "");
}}
/>
<Controller
name="destinationConfig.scriptId"
control={control}
render={({ field: { value, onChange }, fieldState: { error } }) => (
<FormControl
errorText={error?.message}
isError={Boolean(error?.message)}
label="Worker Script"
>
<FilterableSelect
isLoading={isScriptsPending && Boolean(connectionId)}
isDisabled={!connectionId}
value={scripts?.find((script) => script.id === value) || []}
onChange={(option) => {
onChange((option as SingleValue<TCloudflareWorkersScript>)?.id ?? null);
}}
options={scripts}
placeholder="Select a worker script..."
getOptionLabel={(option) => option.id}
getOptionValue={(option) => option.id}
/>
</FormControl>
)}
/>
</>
);
};

View File

@ -11,7 +11,6 @@ import { AzureDevOpsSyncFields } from "./AzureDevOpsSyncFields";
import { AzureKeyVaultSyncFields } from "./AzureKeyVaultSyncFields";
import { CamundaSyncFields } from "./CamundaSyncFields";
import { CloudflarePagesSyncFields } from "./CloudflarePagesSyncFields";
import { CloudflareWorkersSyncFields } from "./CloudflareWorkersSyncFields";
import { DatabricksSyncFields } from "./DatabricksSyncFields";
import { FlyioSyncFields } from "./FlyioSyncFields";
import { GcpSyncFields } from "./GcpSyncFields";
@ -79,8 +78,6 @@ export const SecretSyncDestinationFields = () => {
return <GitLabSyncFields />;
case SecretSync.CloudflarePages:
return <CloudflarePagesSyncFields />;
case SecretSync.CloudflareWorkers:
return <CloudflareWorkersSyncFields />;
case SecretSync.Zabbix:
return <ZabbixSyncFields />;
case SecretSync.Railway:

View File

@ -58,7 +58,6 @@ export const SecretSyncOptionsFields = ({ hideInitialSync }: Props) => {
case SecretSync.Flyio:
case SecretSync.GitLab:
case SecretSync.CloudflarePages:
case SecretSync.CloudflareWorkers:
case SecretSync.Zabbix:
case SecretSync.Railway:
AdditionalSyncOptionsFieldsComponent = null;

View File

@ -1,14 +0,0 @@
import { useFormContext } from "react-hook-form";
import { TSecretSyncForm } from "@app/components/secret-syncs/forms/schemas";
import { GenericFieldLabel } from "@app/components/v2";
import { SecretSync } from "@app/hooks/api/secretSyncs";
export const CloudflareWorkersSyncReviewFields = () => {
const { watch } = useFormContext<
TSecretSyncForm & { destination: SecretSync.CloudflareWorkers }
>();
const scriptId = watch("destinationConfig.scriptId");
return <GenericFieldLabel label="Script">{scriptId}</GenericFieldLabel>;
};

View File

@ -20,7 +20,6 @@ import { AzureDevOpsSyncReviewFields } from "./AzureDevOpsSyncReviewFields";
import { AzureKeyVaultSyncReviewFields } from "./AzureKeyVaultSyncReviewFields";
import { CamundaSyncReviewFields } from "./CamundaSyncReviewFields";
import { CloudflarePagesSyncReviewFields } from "./CloudflarePagesReviewFields";
import { CloudflareWorkersSyncReviewFields } from "./CloudflareWorkersReviewFields";
import { DatabricksSyncReviewFields } from "./DatabricksSyncReviewFields";
import { FlyioSyncReviewFields } from "./FlyioSyncReviewFields";
import { GcpSyncReviewFields } from "./GcpSyncReviewFields";
@ -127,9 +126,6 @@ export const SecretSyncReviewFields = () => {
case SecretSync.CloudflarePages:
DestinationFieldsComponent = <CloudflarePagesSyncReviewFields />;
break;
case SecretSync.CloudflareWorkers:
DestinationFieldsComponent = <CloudflareWorkersSyncReviewFields />;
break;
case SecretSync.Zabbix:
DestinationFieldsComponent = <ZabbixSyncReviewFields />;
break;

View File

@ -1,18 +0,0 @@
import { z } from "zod";
import { BaseSecretSyncSchema } from "@app/components/secret-syncs/forms/schemas/base-secret-sync-schema";
import { SecretSync } from "@app/hooks/api/secretSyncs";
export const CloudflareWorkersSyncDestinationSchema = BaseSecretSyncSchema().merge(
z.object({
destination: z.literal(SecretSync.CloudflareWorkers),
destinationConfig: z.object({
scriptId: z
.string()
.trim()
.min(1, "Script ID is required")
.max(64)
.regex(/^[a-z0-9]([a-z0-9-]*[a-z0-9])?$/, "Invalid script ID format")
})
})
);

View File

@ -8,7 +8,6 @@ import { AzureDevOpsSyncDestinationSchema } from "./azure-devops-sync-destinatio
import { AzureKeyVaultSyncDestinationSchema } from "./azure-key-vault-sync-destination-schema";
import { CamundaSyncDestinationSchema } from "./camunda-sync-destination-schema";
import { CloudflarePagesSyncDestinationSchema } from "./cloudflare-pages-sync-destination-schema";
import { CloudflareWorkersSyncDestinationSchema } from "./cloudflare-workers-sync-destination-schema";
import { DatabricksSyncDestinationSchema } from "./databricks-sync-destination-schema";
import { FlyioSyncDestinationSchema } from "./flyio-sync-destination-schema";
import { GcpSyncDestinationSchema } from "./gcp-sync-destination-schema";
@ -49,8 +48,6 @@ const SecretSyncUnionSchema = z.discriminatedUnion("destination", [
FlyioSyncDestinationSchema,
GitlabSyncDestinationSchema,
CloudflarePagesSyncDestinationSchema,
CloudflareWorkersSyncDestinationSchema,
ZabbixSyncDestinationSchema,
RailwaySyncDestinationSchema
]);

View File

@ -26,7 +26,6 @@ export const envConfig = {
import.meta.env.VITE_TELEMETRY_CAPTURING_ENABLED === true
);
},
get PLATFORM_VERSION() {
return import.meta.env.VITE_INFISICAL_PLATFORM_VERSION;
}

View File

@ -29,6 +29,10 @@ export const ROUTE_PATHS = Object.freeze({
"/_authenticate/_inject-org-details/_org-layout/organization/settings/oauth/callback"
)
},
SsoPage: setRoute(
"/organization/sso",
"/_authenticate/_inject-org-details/_org-layout/organization/sso"
),
SecretSharing: setRoute(
"/organization/secret-sharing",
"/_authenticate/_inject-org-details/_org-layout/organization/secret-sharing/"

View File

@ -82,10 +82,6 @@ export const SECRET_SYNC_MAP: Record<SecretSync, { name: string; image: string }
name: "Cloudflare Pages",
image: "Cloudflare.png"
},
[SecretSync.CloudflareWorkers]: {
name: "Cloudflare Workers",
image: "Cloudflare.png"
},
[SecretSync.Zabbix]: {
name: "Zabbix",
image: "Zabbix.png"
@ -119,8 +115,6 @@ export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
[SecretSync.Flyio]: AppConnection.Flyio,
[SecretSync.GitLab]: AppConnection.Gitlab,
[SecretSync.CloudflarePages]: AppConnection.Cloudflare,
[SecretSync.CloudflareWorkers]: AppConnection.Cloudflare,
[SecretSync.Zabbix]: AppConnection.Zabbix,
[SecretSync.Railway]: AppConnection.Railway
};

View File

@ -170,7 +170,7 @@ export type TCreateAccessPolicyDTO = {
approvers?: Approver[];
bypassers?: Bypasser[];
approvals?: number;
secretPath: string;
secretPath?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals: boolean;
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];

View File

@ -40,7 +40,6 @@ export type TServerConfig = {
trustLdapEmails: boolean;
trustOidcEmails: boolean;
isSecretScanningDisabled: boolean;
kubernetesAutoFetchServiceAccountToken: boolean;
defaultAuthOrgSlug: string | null;
defaultAuthOrgId: string | null;
defaultAuthOrgAuthMethod?: string | null;

View File

@ -3,23 +3,21 @@ import { useQuery, UseQueryOptions } from "@tanstack/react-query";
import { apiRequest } from "@app/config/request";
import { appConnectionKeys } from "../queries";
import { TCloudflarePagesProject, TCloudflareWorkersScript } from "./types";
import { TCloudflareProject } from "./types";
const cloudflareConnectionKeys = {
all: [...appConnectionKeys.all, "cloudflare"] as const,
listPagesProjects: (connectionId: string) =>
[...cloudflareConnectionKeys.all, "pages-projects", connectionId] as const,
listWorkersScripts: (connectionId: string) =>
[...cloudflareConnectionKeys.all, "workers-scripts", connectionId] as const
[...cloudflareConnectionKeys.all, "pages-projects", connectionId] as const
};
export const useCloudflareConnectionListPagesProjects = (
connectionId: string,
options?: Omit<
UseQueryOptions<
TCloudflarePagesProject[],
TCloudflareProject[],
unknown,
TCloudflarePagesProject[],
TCloudflareProject[],
ReturnType<typeof cloudflareConnectionKeys.listPagesProjects>
>,
"queryKey" | "queryFn"
@ -28,7 +26,7 @@ export const useCloudflareConnectionListPagesProjects = (
return useQuery({
queryKey: cloudflareConnectionKeys.listPagesProjects(connectionId),
queryFn: async () => {
const { data } = await apiRequest.get<TCloudflarePagesProject[]>(
const { data } = await apiRequest.get<TCloudflareProject[]>(
`/api/v1/app-connections/cloudflare/${connectionId}/cloudflare-pages-projects`
);
@ -37,28 +35,3 @@ export const useCloudflareConnectionListPagesProjects = (
...options
});
};
export const useCloudflareConnectionListWorkersScripts = (
connectionId: string,
options?: Omit<
UseQueryOptions<
TCloudflareWorkersScript[],
unknown,
TCloudflareWorkersScript[],
ReturnType<typeof cloudflareConnectionKeys.listWorkersScripts>
>,
"queryKey" | "queryFn"
>
) => {
return useQuery({
queryKey: cloudflareConnectionKeys.listWorkersScripts(connectionId),
queryFn: async () => {
const { data } = await apiRequest.get<TCloudflareWorkersScript[]>(
`/api/v1/app-connections/cloudflare/${connectionId}/cloudflare-workers-scripts`
);
return data;
},
...options
});
};

View File

@ -1,8 +1,4 @@
export type TCloudflarePagesProject = {
export type TCloudflareProject = {
id: string;
name: string;
};
export type TCloudflareWorkersScript = {
id: string;
};

View File

@ -54,8 +54,7 @@ export enum SqlProviders {
export enum DynamicSecretAwsIamAuth {
AssumeRole = "assume-role",
AccessKey = "access-key",
IRSA = "irsa"
AccessKey = "access-key"
}
export type TDynamicSecretProvider =
@ -112,14 +111,6 @@ export type TDynamicSecretProvider =
policyDocument?: string;
userGroups?: string;
policyArns?: string;
}
| {
method: DynamicSecretAwsIamAuth.IRSA;
region: string;
awsPath?: string;
policyDocument?: string;
userGroups?: string;
policyArns?: string;
};
}
| {

View File

@ -3,7 +3,6 @@ import { useMutation, useQueryClient } from "@tanstack/react-query";
import { apiRequest } from "@app/config/request";
import { organizationKeys } from "../organization/queries";
import { subscriptionQueryKeys } from "../subscriptions/queries";
import { identitiesKeys } from "./queries";
import {
AddIdentityAliCloudAuthDTO,
@ -83,9 +82,6 @@ export const useCreateIdentity = () => {
queryClient.invalidateQueries({
queryKey: organizationKeys.getOrgIdentityMemberships(organizationId)
});
queryClient.invalidateQueries({
queryKey: subscriptionQueryKeys.getOrgSubsription(organizationId)
});
}
});
};
@ -127,9 +123,6 @@ export const useDeleteIdentity = () => {
queryClient.invalidateQueries({
queryKey: organizationKeys.getOrgIdentityMemberships(organizationId)
});
queryClient.invalidateQueries({
queryKey: subscriptionQueryKeys.getOrgSubsription(organizationId)
});
}
});
};

View File

@ -49,7 +49,7 @@ export type TCreateSecretPolicyDTO = {
workspaceId: string;
name?: string;
environment: string;
secretPath: string;
secretPath?: string | null;
approvers?: Approver[];
bypassers?: Bypasser[];
approvals?: number;
@ -62,7 +62,7 @@ export type TUpdateSecretPolicyDTO = {
name?: string;
approvers?: Approver[];
bypassers?: Bypasser[];
secretPath?: string;
secretPath?: string | null;
approvals?: number;
allowedSelfApprovals?: boolean;
enforcementLevel?: EnforcementLevel;

Some files were not shown because too many files have changed in this diff Show More