Compare commits

..

18 Commits

Author SHA1 Message Date
30f3dac35f rephrase input and filer for resvered folder 2024-06-12 15:56:47 -04:00
87df5a2749 misc: addressed PR comments 2024-06-11 21:54:14 +08:00
e696bff004 misc: optimized prune orphan snapshots 2024-06-10 21:08:14 +08:00
d9c4c332ea feat: added handling of versioned folders and cleanup script 2024-06-10 20:40:19 +08:00
7fe7056af4 Merge remote-tracking branch 'origin/main' into feat/add-version-limits 2024-06-08 01:37:14 +08:00
ee152f2d20 misc: added cleanup frequency note for pit versions 2024-06-04 23:50:10 +08:00
f21a13f388 adjustment: removed artificial limiting of pit versions 2024-06-04 23:46:02 +08:00
68a30f4212 misc: removed transactional 2024-06-03 22:06:32 +08:00
4d830f1d1a misc: added outer try catch block 2024-06-03 17:58:39 +08:00
cd6caab508 misc: migrated to using keyset pagnination 2024-06-03 17:56:00 +08:00
ab093dfc85 misc: simplified delete query for secret folder version 2024-06-03 12:49:40 +08:00
b8e9417466 misc: modified pruning sql logic 2024-06-01 03:26:35 +08:00
4eb08c64d4 misc: updated error message 2024-06-01 01:07:25 +08:00
d76760fa9c misc: updated schema 2024-05-31 23:42:49 +08:00
4d8f94a9dc feat: added version prune to daily resource queue 2024-05-31 23:25:56 +08:00
abd8d6aa8a feat: added support for version limit update 2024-05-31 23:18:02 +08:00
9117067ab5 feat: finalized pruning logic 2024-05-31 21:38:16 +08:00
3a1168c7e8 feat: added initial version pruning and result limiting 2024-05-31 19:12:55 +08:00
55 changed files with 603 additions and 386 deletions

View File

@ -63,7 +63,3 @@ CLIENT_SECRET_GITHUB_LOGIN=
CLIENT_ID_GITLAB_LOGIN=
CLIENT_SECRET_GITLAB_LOGIN=
CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=

View File

@ -1,7 +1,7 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
ARG SAML_ORG_SLUG=saml-org-slug-default
FROM node:20-alpine AS base
@ -36,8 +36,8 @@ ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
# Build
RUN npm run build
@ -113,9 +113,9 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
ARG INTERCOM_ID=intercom-id
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
WORKDIR /

View File

@ -85,13 +85,13 @@ To set up and run Infisical locally, make sure you have Git and Docker installed
Linux/macOS:
```console
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.prod.yml up
```
Windows Command Prompt:
```console
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.prod.yml up
```
Create an account at `http://localhost:80`

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
await knex.schema.alterTable(TableName.Project, (tb) => {
if (!hasPitVersionLimitColumn) {
tb.integer("pitVersionLimit").notNullable().defaultTo(10);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
await knex.schema.alterTable(TableName.Project, (tb) => {
if (hasPitVersionLimitColumn) {
tb.dropColumn("pitVersionLimit");
}
});
}

View File

@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
TableName.Users,
"consecutiveFailedPasswordAttempts"
);
await knex.schema.alterTable(TableName.Users, (tb) => {
if (!hasConsecutiveFailedPasswordAttempts) {
tb.integer("consecutiveFailedPasswordAttempts").defaultTo(0);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
TableName.Users,
"consecutiveFailedPasswordAttempts"
);
await knex.schema.alterTable(TableName.Users, (tb) => {
if (hasConsecutiveFailedPasswordAttempts) {
tb.dropColumn("consecutiveFailedPasswordAttempts");
}
});
}

View File

@ -16,7 +16,8 @@ export const ProjectsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
version: z.number().default(1),
upgradeStatus: z.string().nullable().optional()
upgradeStatus: z.string().nullable().optional(),
pitVersionLimit: z.number().default(10)
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@ -25,8 +25,7 @@ export const UsersSchema = z.object({
isEmailVerified: z.boolean().default(false).nullable().optional(),
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
isLocked: z.boolean().default(false).nullable().optional(),
temporaryLockDateEnd: z.date().nullable().optional(),
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
temporaryLockDateEnd: z.date().nullable().optional()
});
export type TUsers = z.infer<typeof UsersSchema>;

View File

@ -81,8 +81,7 @@ export const secretSnapshotServiceFactory = ({
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const count = await snapshotDAL.countOfSnapshotsByFolderId(folder.id);
return count;
return snapshotDAL.countOfSnapshotsByFolderId(folder.id);
};
const listSnapshots = async ({

View File

@ -1,3 +1,4 @@
/* eslint-disable no-await-in-loop */
import { Knex } from "knex";
import { TDbClient } from "@app/db";
@ -11,6 +12,7 @@ import {
} from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
export type TSnapshotDALFactory = ReturnType<typeof snapshotDALFactory>;
@ -325,12 +327,152 @@ export const snapshotDALFactory = (db: TDbClient) => {
}
};
/**
* Prunes excess snapshots from the database to ensure only a specified number of recent snapshots are retained for each folder.
*
* This function operates in three main steps:
* 1. Pruning snapshots from root/non-versioned folders.
* 2. Pruning snapshots from versioned folders.
* 3. Removing orphaned snapshots that do not belong to any existing folder or folder version.
*
* The function processes snapshots in batches, determined by the `PRUNE_FOLDER_BATCH_SIZE` constant,
* to manage the large datasets without overwhelming the DB.
*
* Steps:
* - Fetch a batch of folder IDs.
* - For each batch, use a Common Table Expression (CTE) to rank snapshots within each folder by their creation date.
* - Identify and delete snapshots that exceed the project's point-in-time version limit (`pitVersionLimit`).
* - Repeat the process for versioned folders.
* - Finally, delete orphaned snapshots that do not have an associated folder.
*/
const pruneExcessSnapshots = async () => {
const PRUNE_FOLDER_BATCH_SIZE = 10000;
try {
let uuidOffset = "00000000-0000-0000-0000-000000000000";
// cleanup snapshots from root/non-versioned folders
// eslint-disable-next-line no-constant-condition, no-unreachable-loop
while (true) {
const folderBatch = await db(TableName.SecretFolder)
.where("id", ">", uuidOffset)
.where("isReserved", false)
.orderBy("id", "asc")
.limit(PRUNE_FOLDER_BATCH_SIZE)
.select("id");
const batchEntries = folderBatch.map((folder) => folder.id);
if (folderBatch.length) {
try {
logger.info(`Pruning snapshots in [range=${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}]`);
await db(TableName.Snapshot)
.with("snapshot_cte", (qb) => {
void qb
.from(TableName.Snapshot)
.whereIn(`${TableName.Snapshot}.folderId`, batchEntries)
.select(
"folderId",
`${TableName.Snapshot}.id as id`,
db.raw(
`ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num`
)
);
})
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Snapshot}.folderId`)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
.whereNull(`${TableName.SecretFolder}.parentId`)
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (err) {
logger.error(
`Failed to prune snapshots from root/non-versioned folders in range ${batchEntries[0]}:${
batchEntries[batchEntries.length - 1]
}`
);
} finally {
uuidOffset = batchEntries[batchEntries.length - 1];
}
} else {
break;
}
}
// cleanup snapshots from versioned folders
uuidOffset = "00000000-0000-0000-0000-000000000000";
// eslint-disable-next-line no-constant-condition
while (true) {
const folderBatch = await db(TableName.SecretFolderVersion)
.select("folderId")
.distinct("folderId")
.where("folderId", ">", uuidOffset)
.orderBy("folderId", "asc")
.limit(PRUNE_FOLDER_BATCH_SIZE);
const batchEntries = folderBatch.map((folder) => folder.folderId);
if (folderBatch.length) {
try {
logger.info(`Pruning snapshots in range ${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}`);
await db(TableName.Snapshot)
.with("snapshot_cte", (qb) => {
void qb
.from(TableName.Snapshot)
.whereIn(`${TableName.Snapshot}.folderId`, batchEntries)
.select(
"folderId",
`${TableName.Snapshot}.id as id`,
db.raw(
`ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num`
)
);
})
.join(
TableName.SecretFolderVersion,
`${TableName.SecretFolderVersion}.folderId`,
`${TableName.Snapshot}.folderId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (err) {
logger.error(
`Failed to prune snapshots from versioned folders in range ${batchEntries[0]}:${
batchEntries[batchEntries.length - 1]
}`
);
} finally {
uuidOffset = batchEntries[batchEntries.length - 1];
}
} else {
break;
}
}
// cleanup orphaned snapshots (those that don't belong to an existing folder and folder version)
await db(TableName.Snapshot)
.whereNotIn("folderId", (qb) => {
void qb
.select("folderId")
.from(TableName.SecretFolderVersion)
.union((qb1) => void qb1.select("id").from(TableName.SecretFolder));
})
.delete();
} catch (error) {
throw new DatabaseError({ error, name: "SnapshotPrune" });
}
};
return {
...secretSnapshotOrm,
findById,
findLatestSnapshotByFolderId,
findRecursivelySnapshots,
countOfSnapshotsByFolderId,
findSecretSnapshotDataById
findSecretSnapshotDataById,
pruneExcessSnapshots
};
};

View File

@ -386,8 +386,6 @@ export const SECRET_IMPORTS = {
environment: "The slug of the environment to import into.",
path: "The path to import into.",
workspaceId: "The ID of the project you are working in.",
isReplication:
"When true, secrets from the source will be automatically sent to the destination. If approval policies exist at the destination, the secrets will be sent as approval requests instead of being applied immediately.",
import: {
environment: "The slug of the environment to import from.",
path: "The path to import from."
@ -676,8 +674,7 @@ export const INTEGRATION = {
secretGCPLabel: "The label for GCP secrets.",
secretAWSTag: "The tags for AWS secrets.",
kmsKeyId: "The ID of the encryption key from AWS KMS.",
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
shouldEnableDelete: "The flag to enable deletion of secrets"
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store."
}
},
UPDATE: {

View File

@ -75,7 +75,6 @@ const envSchema = z
.optional()
.default(process.env.URL_GITLAB_LOGIN ?? GITLAB_URL)
), // fallback since URL_GITLAB_LOGIN has been renamed
DEFAULT_SAML_ORG_SLUG: zpStr(z.string().optional()).default(process.env.NEXT_PUBLIC_SAML_ORG_SLUG),
// integration client secrets
// heroku
CLIENT_ID_HEROKU: zpStr(z.string().optional()),
@ -120,8 +119,7 @@ const envSchema = z
.transform((val) => val === "true")
.optional(),
INFISICAL_CLOUD: zodStrBool.default("false"),
MAINTENANCE_MODE: zodStrBool.default("false"),
CAPTCHA_SECRET: zpStr(z.string().optional())
MAINTENANCE_MODE: zodStrBool.default("false")
})
.transform((data) => ({
...data,
@ -133,8 +131,7 @@ const envSchema = z
isSecretScanningConfigured:
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET)
}));
let envCfg: Readonly<z.infer<typeof envSchema>>;

View File

@ -824,6 +824,9 @@ export const registerRoutes = async (
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
auditLogDAL,
queueService,
secretVersionDAL,
secretFolderVersionDAL: folderVersionDAL,
snapshotDAL,
identityAccessTokenDAL,
secretSharingDAL
});
@ -919,8 +922,7 @@ export const registerRoutes = async (
emailConfigured: z.boolean().optional(),
inviteOnlySignup: z.boolean().optional(),
redisConfigured: z.boolean().optional(),
secretScanningConfigured: z.boolean().optional(),
samlDefaultOrgSlug: z.string().optional()
secretScanningConfigured: z.boolean().optional()
})
}
},
@ -933,8 +935,7 @@ export const registerRoutes = async (
emailConfigured: cfg.isSmtpConfigured,
inviteOnlySignup: Boolean(serverCfg.allowSignUp),
redisConfigured: cfg.isRedisConfigured,
secretScanningConfigured: cfg.isSecretScanningConfigured,
samlDefaultOrgSlug: cfg.samlDefaultOrgSlug
secretScanningConfigured: cfg.isSecretScanningConfigured
};
}
});

View File

@ -8,7 +8,7 @@ import { writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { IntegrationMetadataSchema } from "@app/services/integration/integration-schema";
import { IntegrationMappingBehavior } from "@app/services/integration-auth/integration-list";
import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types";
export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
@ -46,7 +46,36 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
path: z.string().trim().optional().describe(INTEGRATION.CREATE.path),
region: z.string().trim().optional().describe(INTEGRATION.CREATE.region),
scope: z.string().trim().optional().describe(INTEGRATION.CREATE.scope),
metadata: IntegrationMetadataSchema.default({})
metadata: z
.object({
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
mappingBehavior: z
.nativeEnum(IntegrationMappingBehavior)
.optional()
.describe(INTEGRATION.CREATE.metadata.mappingBehavior),
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
secretGCPLabel: z
.object({
labelName: z.string(),
labelValue: z.string()
})
.optional()
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
secretAWSTag: z
.array(
z.object({
key: z.string(),
value: z.string()
})
)
.optional()
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete)
})
.default({})
}),
response: {
200: z.object({
@ -132,7 +161,33 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
targetEnvironment: z.string().trim().describe(INTEGRATION.UPDATE.targetEnvironment),
owner: z.string().trim().describe(INTEGRATION.UPDATE.owner),
environment: z.string().trim().describe(INTEGRATION.UPDATE.environment),
metadata: IntegrationMetadataSchema.optional()
metadata: z
.object({
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
mappingBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.mappingBehavior),
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
secretGCPLabel: z
.object({
labelName: z.string(),
labelValue: z.string()
})
.optional()
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
secretAWSTag: z
.array(
z.object({
key: z.string(),
value: z.string()
})
)
.optional()
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete)
})
.optional()
}),
response: {
200: z.object({

View File

@ -334,6 +334,44 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "PUT",
url: "/:workspaceSlug/version-limit",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
workspaceSlug: z.string().trim()
}),
body: z.object({
pitVersionLimit: z.number().min(1).max(100)
}),
response: {
200: z.object({
message: z.string(),
workspace: ProjectsSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const workspace = await server.services.project.updateVersionLimit({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
pitVersionLimit: req.body.pitVersionLimit,
workspaceSlug: req.params.workspaceSlug
});
return {
message: "Successfully changed workspace version limit",
workspace
};
}
});
server.route({
method: "GET",
url: "/:workspaceId/integrations",

View File

@ -30,7 +30,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
environment: z.string().trim().describe(SECRET_IMPORTS.CREATE.import.environment),
path: z.string().trim().transform(removeTrailingSlash).describe(SECRET_IMPORTS.CREATE.import.path)
}),
isReplication: z.boolean().default(false).describe(SECRET_IMPORTS.CREATE.isReplication)
isReplication: z.boolean().default(false)
}),
response: {
200: z.object({

View File

@ -80,8 +80,7 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
body: z.object({
email: z.string().trim(),
providerAuthToken: z.string().trim().optional(),
clientProof: z.string().trim(),
captchaToken: z.string().trim().optional()
clientProof: z.string().trim()
}),
response: {
200: z.discriminatedUnion("mfaEnabled", [
@ -107,7 +106,6 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const data = await server.services.login.loginExchangeClientProof({
captchaToken: req.body.captchaToken,
email: req.body.email,
ip: req.realIp,
userAgent,

View File

@ -3,7 +3,6 @@ import jwt from "jsonwebtoken";
import { TUsers, UserDeviceSchema } from "@app/db/schemas";
import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
import { BadRequestError, DatabaseError, UnauthorizedError } from "@app/lib/errors";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
@ -177,16 +176,12 @@ export const authLoginServiceFactory = ({
clientProof,
ip,
userAgent,
providerAuthToken,
captchaToken
providerAuthToken
}: TLoginClientProofDTO) => {
const appCfg = getConfig();
const userEnc = await userDAL.findUserEncKeyByUsername({
username: email
});
if (!userEnc) throw new Error("Failed to find user");
const user = await userDAL.findById(userEnc.userId);
const cfg = getConfig();
let authMethod = AuthMethod.EMAIL;
@ -201,31 +196,6 @@ export const authLoginServiceFactory = ({
}
}
if (
user.consecutiveFailedPasswordAttempts &&
user.consecutiveFailedPasswordAttempts >= 10 &&
Boolean(appCfg.CAPTCHA_SECRET)
) {
if (!captchaToken) {
throw new BadRequestError({
name: "Captcha Required",
message: "Accomplish the required captcha by logging in via Web"
});
}
// validate captcha token
const response = await request.postForm<{ success: boolean }>("https://api.hcaptcha.com/siteverify", {
response: captchaToken,
secret: appCfg.CAPTCHA_SECRET
});
if (!response.data.success) {
throw new BadRequestError({
name: "Invalid Captcha"
});
}
}
if (!userEnc.serverPrivateKey || !userEnc.clientPublicKey) throw new Error("Failed to authenticate. Try again?");
const isValidClientProof = await srpCheckClientProof(
userEnc.salt,
@ -234,31 +204,15 @@ export const authLoginServiceFactory = ({
userEnc.clientPublicKey,
clientProof
);
if (!isValidClientProof) {
await userDAL.update(
{ id: userEnc.userId },
{
$incr: {
consecutiveFailedPasswordAttempts: 1
}
}
);
throw new Error("Failed to authenticate. Try again?");
}
if (!isValidClientProof) throw new Error("Failed to authenticate. Try again?");
await userDAL.updateUserEncryptionByUserId(userEnc.userId, {
serverPrivateKey: null,
clientPublicKey: null
});
await userDAL.updateById(userEnc.userId, {
consecutiveFailedPasswordAttempts: 0
});
// send multi factor auth token if they it enabled
if (userEnc.isMfaEnabled && userEnc.email) {
const user = await userDAL.findById(userEnc.userId);
enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd);
const mfaToken = jwt.sign(

View File

@ -12,7 +12,6 @@ export type TLoginClientProofDTO = {
providerAuthToken?: string;
ip: string;
userAgent: string;
captchaToken?: string;
};
export type TVerifyMfaTokenDTO = {

View File

@ -31,7 +31,6 @@ import { logger } from "@app/lib/logger";
import { TCreateManySecretsRawFn, TUpdateManySecretsRawFn } from "@app/services/secret/secret-types";
import { TIntegrationDALFactory } from "../integration/integration-dal";
import { IntegrationMetadataSchema } from "../integration/integration-schema";
import {
IntegrationInitialSyncBehavior,
IntegrationMappingBehavior,
@ -1364,8 +1363,6 @@ const syncSecretsGitHub = async ({
}
}
const metadata = IntegrationMetadataSchema.parse(integration.metadata);
if (metadata.shouldEnableDelete) {
for await (const encryptedSecret of encryptedSecrets) {
if (
!(encryptedSecret.name in secrets) &&
@ -1402,7 +1399,6 @@ const syncSecretsGitHub = async ({
}
}
}
}
await sodium.ready.then(async () => {
for await (const key of Object.keys(secrets)) {

View File

@ -1,35 +0,0 @@
import { z } from "zod";
import { INTEGRATION } from "@app/lib/api-docs";
import { IntegrationMappingBehavior } from "../integration-auth/integration-list";
export const IntegrationMetadataSchema = z.object({
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
mappingBehavior: z
.nativeEnum(IntegrationMappingBehavior)
.optional()
.describe(INTEGRATION.CREATE.metadata.mappingBehavior),
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
secretGCPLabel: z
.object({
labelName: z.string(),
labelValue: z.string()
})
.optional()
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
secretAWSTag: z
.array(
z.object({
key: z.string(),
value: z.string()
})
)
.optional()
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete),
shouldEnableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldEnableDelete)
});

View File

@ -29,7 +29,6 @@ export type TCreateIntegrationDTO = {
}[];
kmsKeyId?: string;
shouldDisableDelete?: boolean;
shouldEnableDelete?: boolean;
};
} & Omit<TProjectPermission, "projectId">;
@ -55,7 +54,6 @@ export type TUpdateIntegrationDTO = {
}[];
kmsKeyId?: string;
shouldDisableDelete?: boolean;
shouldEnableDelete?: boolean;
};
} & Omit<TProjectPermission, "projectId">;

View File

@ -77,7 +77,7 @@ export const kmsServiceFactory = ({ kmsDAL, kmsRootConfigDAL, keyStore }: TKmsSe
// This will switch to a seal process and HMS flow in future
const encryptionKey = appCfg.ENCRYPTION_KEY || appCfg.ROOT_ENCRYPTION_KEY;
// if root key its base64 encoded
const isBase64 = !appCfg.ENCRYPTION_KEY;
const isBase64 = Boolean(appCfg.ROOT_ENCRYPTION_KEY);
if (!encryptionKey) throw new Error("Root encryption key not found for KMS service.");
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");

View File

@ -39,6 +39,7 @@ import {
TToggleProjectAutoCapitalizationDTO,
TUpdateProjectDTO,
TUpdateProjectNameDTO,
TUpdateProjectVersionLimitDTO,
TUpgradeProjectDTO
} from "./project-types";
@ -133,7 +134,8 @@ export const projectServiceFactory = ({
name: workspaceName,
orgId: organization.id,
slug: projectSlug || slugify(`${workspaceName}-${alphaNumericNanoId(4)}`),
version: ProjectVersion.V2
version: ProjectVersion.V2,
pitVersionLimit: 10
},
tx
);
@ -406,6 +408,35 @@ export const projectServiceFactory = ({
return updatedProject;
};
const updateVersionLimit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
pitVersionLimit,
workspaceSlug
}: TUpdateProjectVersionLimitDTO) => {
const project = await projectDAL.findProjectBySlug(workspaceSlug, actorOrgId);
if (!project) {
throw new BadRequestError({
message: "Project not found"
});
}
const { hasRole } = await permissionService.getProjectPermission(
actor,
actorId,
project.id,
actorAuthMethod,
actorOrgId
);
if (!hasRole(ProjectMembershipRole.Admin))
throw new BadRequestError({ message: "Only admins are allowed to take this action" });
return projectDAL.updateById(project.id, { pitVersionLimit });
};
const updateName = async ({
projectId,
actor,
@ -501,6 +532,7 @@ export const projectServiceFactory = ({
getAProject,
toggleAutoCapitalization,
updateName,
upgradeProject
upgradeProject,
updateVersionLimit
};
};

View File

@ -43,6 +43,11 @@ export type TToggleProjectAutoCapitalizationDTO = {
autoCapitalization: boolean;
} & TProjectPermission;
export type TUpdateProjectVersionLimitDTO = {
pitVersionLimit: number;
workspaceSlug: string;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateProjectNameDTO = {
name: string;
} & TProjectPermission;

View File

@ -1,13 +1,19 @@
import { TAuditLogDALFactory } from "@app/ee/services/audit-log/audit-log-dal";
import { TSnapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
import { TSecretVersionDALFactory } from "../secret/secret-version-dal";
import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal";
import { TSecretSharingDALFactory } from "../secret-sharing/secret-sharing-dal";
type TDailyResourceCleanUpQueueServiceFactoryDep = {
auditLogDAL: Pick<TAuditLogDALFactory, "pruneAuditLog">;
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "removeExpiredTokens">;
secretVersionDAL: Pick<TSecretVersionDALFactory, "pruneExcessVersions">;
secretFolderVersionDAL: Pick<TSecretFolderVersionDALFactory, "pruneExcessVersions">;
snapshotDAL: Pick<TSnapshotDALFactory, "pruneExcessSnapshots">;
secretSharingDAL: Pick<TSecretSharingDALFactory, "pruneExpiredSharedSecrets">;
queueService: TQueueServiceFactory;
};
@ -17,6 +23,9 @@ export type TDailyResourceCleanUpQueueServiceFactory = ReturnType<typeof dailyRe
export const dailyResourceCleanUpQueueServiceFactory = ({
auditLogDAL,
queueService,
snapshotDAL,
secretVersionDAL,
secretFolderVersionDAL,
identityAccessTokenDAL,
secretSharingDAL
}: TDailyResourceCleanUpQueueServiceFactoryDep) => {
@ -25,6 +34,9 @@ export const dailyResourceCleanUpQueueServiceFactory = ({
await auditLogDAL.pruneAuditLog();
await identityAccessTokenDAL.removeExpiredTokens();
await secretSharingDAL.pruneExpiredSharedSecrets();
await snapshotDAL.pruneExcessSnapshots();
await secretVersionDAL.pruneExcessVersions();
await secretFolderVersionDAL.pruneExcessVersions();
logger.info(`${QueueName.DailyResourceCleanUp}: queue task completed`);
});

View File

@ -62,5 +62,32 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
}
};
return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId };
const pruneExcessVersions = async () => {
try {
await db(TableName.SecretFolderVersion)
.with("folder_cte", (qb) => {
void qb
.from(TableName.SecretFolderVersion)
.select(
"id",
"folderId",
db.raw(
`ROW_NUMBER() OVER (PARTITION BY ${TableName.SecretFolderVersion}."folderId" ORDER BY ${TableName.SecretFolderVersion}."createdAt" DESC) AS row_num`
)
);
})
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("folder_cte", "folder_cte.id", `${TableName.SecretFolderVersion}.id`)
.whereRaw(`folder_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (error) {
throw new DatabaseError({
error,
name: "Secret Folder Version Prune"
});
}
};
return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId, pruneExcessVersions };
};

View File

@ -309,7 +309,7 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
};
const expandSecrets = async (
secrets: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean | null }>
secrets: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }>
) => {
const expandedSec: Record<string, string> = {};
const interpolatedSec: Record<string, string> = {};
@ -329,8 +329,8 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
// should not do multi line encoding if user has set it to skip
// eslint-disable-next-line
secrets[key].value = secrets[key].skipMultilineEncoding
? formatMultiValueEnv(expandedSec[key])
: expandedSec[key];
? expandedSec[key]
: formatMultiValueEnv(expandedSec[key]);
// eslint-disable-next-line
continue;
}
@ -347,7 +347,7 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
);
// eslint-disable-next-line
secrets[key].value = secrets[key].skipMultilineEncoding ? formatMultiValueEnv(expandedVal) : expandedVal;
secrets[key].value = secrets[key].skipMultilineEncoding ? expandedVal : formatMultiValueEnv(expandedVal);
}
return secrets;
@ -395,8 +395,7 @@ export const decryptSecretRaw = (
type: secret.type,
_id: secret.id,
id: secret.id,
user: secret.userId,
skipMultilineEncoding: secret.skipMultilineEncoding
user: secret.userId
};
};

View File

@ -67,10 +67,7 @@ const MAX_SYNC_SECRET_DEPTH = 5;
export const uniqueSecretQueueKey = (environment: string, secretPath: string) =>
`secret-queue-dedupe-${environment}-${secretPath}`;
type TIntegrationSecret = Record<
string,
{ value: string; comment?: string; skipMultilineEncoding?: boolean | null | undefined }
>;
type TIntegrationSecret = Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }>;
export const secretQueueFactory = ({
queueService,
integrationDAL,

View File

@ -971,24 +971,10 @@ export const secretServiceFactory = ({
});
const batchSecretsExpand = async (
secretBatch: {
secretKey: string;
secretValue: string;
secretComment?: string;
secretPath: string;
skipMultilineEncoding: boolean | null | undefined;
}[]
secretBatch: { secretKey: string; secretValue: string; secretComment?: string; secretPath: string }[]
) => {
// Group secrets by secretPath
const secretsByPath: Record<
string,
{
secretKey: string;
secretValue: string;
secretComment?: string;
skipMultilineEncoding: boolean | null | undefined;
}[]
> = {};
const secretsByPath: Record<string, { secretKey: string; secretValue: string; secretComment?: string }[]> = {};
secretBatch.forEach((secret) => {
if (!secretsByPath[secret.secretPath]) {
@ -1004,15 +990,11 @@ export const secretServiceFactory = ({
continue;
}
const secretRecord: Record<
string,
{ value: string; comment?: string; skipMultilineEncoding: boolean | null | undefined }
> = {};
const secretRecord: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }> = {};
secretsByPath[secPath].forEach((decryptedSecret) => {
secretRecord[decryptedSecret.secretKey] = {
value: decryptedSecret.secretValue,
comment: decryptedSecret.secretComment,
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
comment: decryptedSecret.secretComment
};
});

View File

@ -111,8 +111,37 @@ export const secretVersionDALFactory = (db: TDbClient) => {
}
};
const pruneExcessVersions = async () => {
try {
await db(TableName.SecretVersion)
.with("version_cte", (qb) => {
void qb
.from(TableName.SecretVersion)
.select(
"id",
"folderId",
db.raw(
`ROW_NUMBER() OVER (PARTITION BY ${TableName.SecretVersion}."secretId" ORDER BY ${TableName.SecretVersion}."createdAt" DESC) AS row_num`
)
);
})
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.SecretVersion}.folderId`)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("version_cte", "version_cte.id", `${TableName.SecretVersion}.id`)
.whereRaw(`version_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (error) {
throw new DatabaseError({
error,
name: "Secret Version Prune"
});
}
};
return {
...secretVersionOrm,
pruneExcessVersions,
findLatestVersionMany,
bulkUpdate,
findLatestVersionByFolderId,

View File

@ -318,11 +318,6 @@ SMTP_FROM_NAME=Infisical
By default, users can only login via email/password based login method.
To login into Infisical with OAuth providers such as Google, configure the associated variables.
<ParamField query="DEFAULT_SAML_ORG_SLUG" type="string">
When set, all visits to the Infisical login page will automatically redirect users of your Infisical instance to the SAML identity provider associated with the specified organization slug.
</ParamField>
<Accordion title="Google">
Follow detailed guide to configure [Google SSO](/documentation/platform/sso/google)
@ -374,6 +369,11 @@ To login into Infisical with OAuth providers such as Google, configure the assoc
information.
</Accordion>
<ParamField query="NEXT_PUBLIC_SAML_ORG_SLUG" type="string">
Configure SAML organization slug to automatically redirect all users of your
Infisical instance to the identity provider.
</ParamField>
## Native secret integrations
To help you sync secrets from Infisical to services such as Github and Gitlab, Infisical provides native integrations out of the box.

View File

@ -2,7 +2,6 @@ ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG NEXT_INFISICAL_PLATFORM_VERSION=next-infisical-platform-version
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:16-alpine AS deps
# Install dependencies only when needed. Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
@ -32,8 +31,6 @@ ARG POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@ -60,9 +57,7 @@ ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
ARG NEXT_INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION=$NEXT_INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
COPY --chown=nextjs:nodejs --chmod=555 scripts ./scripts
COPY --from=builder /app/public ./public
RUN chown nextjs:nodejs ./public/data

View File

@ -1,12 +1,13 @@
const path = require("path");
const ContentSecurityPolicy = `
default-src 'self';
script-src 'self' https://app.posthog.com https://js.stripe.com https://api.stripe.com https://widget.intercom.io https://js.intercomcdn.com https://hcaptcha.com https://*.hcaptcha.com 'unsafe-inline' 'unsafe-eval';
style-src 'self' https://rsms.me 'unsafe-inline' https://hcaptcha.com https://*.hcaptcha.com;
script-src 'self' https://app.posthog.com https://js.stripe.com https://api.stripe.com https://widget.intercom.io https://js.intercomcdn.com 'unsafe-inline' 'unsafe-eval';
style-src 'self' https://rsms.me 'unsafe-inline';
child-src https://api.stripe.com;
frame-src https://js.stripe.com/ https://api.stripe.com https://www.youtube.com/ https://hcaptcha.com https://*.hcaptcha.com;
connect-src 'self' wss://nexus-websocket-a.intercom.io https://api-iam.intercom.io https://api.heroku.com/ https://id.heroku.com/oauth/authorize https://id.heroku.com/oauth/token https://checkout.stripe.com https://app.posthog.com https://api.stripe.com https://api.pwnedpasswords.com http://127.0.0.1:* https://hcaptcha.com https://*.hcaptcha.com;
frame-src https://js.stripe.com/ https://api.stripe.com https://www.youtube.com/;
connect-src 'self' wss://nexus-websocket-a.intercom.io https://api-iam.intercom.io https://api.heroku.com/ https://id.heroku.com/oauth/authorize https://id.heroku.com/oauth/token https://checkout.stripe.com https://app.posthog.com https://api.stripe.com https://api.pwnedpasswords.com http://127.0.0.1:*;
img-src 'self' https://static.intercomassets.com https://js.intercomcdn.com https://downloads.intercomcdn.com https://*.stripe.com https://i.ytimg.com/ data:;
media-src https://js.intercomcdn.com;
font-src 'self' https://fonts.intercomcdn.com/ https://maxcdn.bootstrapcdn.com https://rsms.me https://fonts.gstatic.com;

View File

@ -4,6 +4,7 @@
"requires": true,
"packages": {
"": {
"name": "frontend",
"dependencies": {
"@casl/ability": "^6.5.0",
"@casl/react": "^3.1.0",
@ -18,7 +19,6 @@
"@fortawesome/free-regular-svg-icons": "^6.1.1",
"@fortawesome/free-solid-svg-icons": "^6.1.2",
"@fortawesome/react-fontawesome": "^0.2.0",
"@hcaptcha/react-hcaptcha": "^1.10.1",
"@headlessui/react": "^1.7.7",
"@hookform/resolvers": "^2.9.10",
"@octokit/rest": "^19.0.7",
@ -3200,24 +3200,6 @@
"react": ">=16.3"
}
},
"node_modules/@hcaptcha/loader": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/@hcaptcha/loader/-/loader-1.2.4.tgz",
"integrity": "sha512-3MNrIy/nWBfyVVvMPBKdKrX7BeadgiimW0AL/a/8TohNtJqxoySKgTJEXOQvYwlHemQpUzFrIsK74ody7JiMYw=="
},
"node_modules/@hcaptcha/react-hcaptcha": {
"version": "1.10.1",
"resolved": "https://registry.npmjs.org/@hcaptcha/react-hcaptcha/-/react-hcaptcha-1.10.1.tgz",
"integrity": "sha512-P0en4gEZAecah7Pt3WIaJO2gFlaLZKkI0+Tfdg8fNqsDxqT9VytZWSkH4WAkiPRULK1QcGgUZK+J56MXYmPifw==",
"dependencies": {
"@babel/runtime": "^7.17.9",
"@hcaptcha/loader": "^1.2.1"
},
"peerDependencies": {
"react": ">= 16.3.0",
"react-dom": ">= 16.3.0"
}
},
"node_modules/@headlessui/react": {
"version": "1.7.18",
"resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.18.tgz",

View File

@ -26,7 +26,6 @@
"@fortawesome/free-regular-svg-icons": "^6.1.1",
"@fortawesome/free-solid-svg-icons": "^6.1.2",
"@fortawesome/react-fontawesome": "^0.2.0",
"@hcaptcha/react-hcaptcha": "^1.10.1",
"@headlessui/react": "^1.7.7",
"@hookform/resolvers": "^2.9.10",
"@octokit/rest": "^19.0.7",

View File

@ -4,7 +4,7 @@ scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_POSTHOG_API_KEY
scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_INTERCOM_ID" "$NEXT_PUBLIC_INTERCOM_ID"
scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY" "$NEXT_PUBLIC_CAPTCHA_SITE_KEY"
scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_SAML_ORG_SLUG" "$NEXT_PUBLIC_SAML_ORG_SLUG"
if [ "$TELEMETRY_ENABLED" != "false" ]; then
echo "Telemetry is enabled"

View File

@ -6,8 +6,6 @@ scripts/replace-variable.sh "$BAKED_NEXT_PUBLIC_INTERCOM_ID" "$NEXT_PUBLIC_INTER
scripts/replace-variable.sh "$BAKED_NEXT_SAML_ORG_SLUG" "$NEXT_PUBLIC_SAML_ORG_SLUG"
scripts/replace-variable.sh "$BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY" "$NEXT_PUBLIC_CAPTCHA_SITE_KEY"
if [ "$TELEMETRY_ENABLED" != "false" ]; then
echo "Telemetry is enabled"
scripts/set-telemetry.sh true

View File

@ -30,13 +30,11 @@ export interface IsCliLoginSuccessful {
const attemptLogin = async ({
email,
password,
providerAuthToken,
captchaToken
providerAuthToken
}: {
email: string;
password: string;
providerAuthToken?: string;
captchaToken?: string;
}): Promise<IsCliLoginSuccessful> => {
const telemetry = new Telemetry().getInstance();
return new Promise((resolve, reject) => {
@ -72,8 +70,7 @@ const attemptLogin = async ({
} = await login2({
email,
clientProof,
providerAuthToken,
captchaToken
providerAuthToken
});
if (mfaEnabled) {
// case: MFA is enabled

View File

@ -22,13 +22,11 @@ interface IsLoginSuccessful {
const attemptLogin = async ({
email,
password,
providerAuthToken,
captchaToken
providerAuthToken
}: {
email: string;
password: string;
providerAuthToken?: string;
captchaToken?: string;
}): Promise<IsLoginSuccessful> => {
const telemetry = new Telemetry().getInstance();
// eslint-disable-next-line new-cap
@ -60,7 +58,6 @@ const attemptLogin = async ({
iv,
tag
} = await login2({
captchaToken,
email,
clientProof,
providerAuthToken

View File

@ -2,6 +2,5 @@ const ENV = process.env.NEXT_PUBLIC_ENV! || "development"; // investigate
const POSTHOG_API_KEY = process.env.NEXT_PUBLIC_POSTHOG_API_KEY!;
const POSTHOG_HOST = process.env.NEXT_PUBLIC_POSTHOG_HOST! || "https://app.posthog.com";
const INTERCOMid = process.env.NEXT_PUBLIC_INTERCOMid!;
const CAPTCHA_SITE_KEY = process.env.NEXT_PUBLIC_CAPTCHA_SITE_KEY!;
export { CAPTCHA_SITE_KEY, ENV, INTERCOMid, POSTHOG_API_KEY, POSTHOG_HOST };
export { ENV, INTERCOMid, POSTHOG_API_KEY, POSTHOG_HOST };

View File

@ -30,7 +30,6 @@ export type Login1DTO = {
};
export type Login2DTO = {
captchaToken?: string;
email: string;
clientProof: string;
providerAuthToken?: string;

View File

@ -73,7 +73,6 @@ export const useCreateIntegration = () => {
}[];
kmsKeyId?: string;
shouldDisableDelete?: boolean;
shouldEnableDelete?: boolean;
};
}) => {
const {

View File

@ -4,5 +4,4 @@ export type ServerStatus = {
emailConfigured: boolean;
secretScanningConfigured: boolean;
redisConfigured: boolean;
samlDefaultOrgSlug: boolean
};

View File

@ -20,6 +20,7 @@ import {
TUpdateWorkspaceIdentityRoleDTO,
TUpdateWorkspaceUserRoleDTO,
UpdateEnvironmentDTO,
UpdatePitVersionLimitDTO,
Workspace
} from "./types";
@ -249,6 +250,21 @@ export const useToggleAutoCapitalization = () => {
});
};
export const useUpdateWorkspaceVersionLimit = () => {
const queryClient = useQueryClient();
return useMutation<{}, {}, UpdatePitVersionLimitDTO>({
mutationFn: ({ projectSlug, pitVersionLimit }) => {
return apiRequest.put(`/api/v1/workspace/${projectSlug}/version-limit`, {
pitVersionLimit
});
},
onSuccess: () => {
queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace);
}
});
};
export const useDeleteWorkspace = () => {
const queryClient = useQueryClient();

View File

@ -16,6 +16,7 @@ export type Workspace = {
upgradeStatus: string | null;
autoCapitalization: boolean;
environments: WorkspaceEnv[];
pitVersionLimit: number;
slug: string;
};
@ -48,6 +49,7 @@ export type CreateWorkspaceDTO = {
};
export type RenameWorkspaceDTO = { workspaceID: string; newWorkspaceName: string };
export type UpdatePitVersionLimitDTO = { projectSlug: string; pitVersionLimit: number };
export type ToggleAutoCapitalizationDTO = { workspaceID: string; state: boolean };
export type DeleteWorkspaceDTO = { workspaceID: string };

View File

@ -33,7 +33,6 @@ import {
Input,
Select,
SelectItem,
Switch,
Tab,
TabList,
TabPanel,
@ -60,7 +59,7 @@ const schema = yup.object({
selectedSourceEnvironment: yup.string().trim().required("Project Environment is required"),
secretPath: yup.string().trim().required("Secrets Path is required"),
secretSuffix: yup.string().trim().optional(),
shouldEnableDelete: yup.boolean().optional(),
scope: yup.mixed<TargetEnv>().oneOf(targetEnv.slice()).required(),
repoIds: yup.mixed().when("scope", {
@ -100,6 +99,7 @@ export default function GitHubCreateIntegrationPage() {
const router = useRouter();
const { mutateAsync } = useCreateIntegration();
const integrationAuthId =
(queryString.parse(router.asPath.split("?")[1]).integrationAuthId as string) ?? "";
@ -120,8 +120,7 @@ export default function GitHubCreateIntegrationPage() {
defaultValues: {
secretPath: "/",
scope: "github-repo",
repoIds: [],
shouldEnableDelete: false
repoIds: []
}
});
@ -178,8 +177,7 @@ export default function GitHubCreateIntegrationPage() {
app: targetApp.name, // repo name
owner: targetApp.owner, // repo owner
metadata: {
secretSuffix: data.secretSuffix,
shouldEnableDelete: data.shouldEnableDelete
secretSuffix: data.secretSuffix
}
});
})
@ -196,8 +194,7 @@ export default function GitHubCreateIntegrationPage() {
scope: data.scope,
owner: integrationAuthOrgs?.find((e) => e.orgId === data.orgId)?.name,
metadata: {
secretSuffix: data.secretSuffix,
shouldEnableDelete: data.shouldEnableDelete
secretSuffix: data.secretSuffix
}
});
break;
@ -214,8 +211,7 @@ export default function GitHubCreateIntegrationPage() {
owner: repoOwner,
targetEnvironmentId: data.envId,
metadata: {
secretSuffix: data.secretSuffix,
shouldEnableDelete: data.shouldEnableDelete
secretSuffix: data.secretSuffix
}
});
break;
@ -550,21 +546,6 @@ export default function GitHubCreateIntegrationPage() {
animate={{ opacity: 1, translateX: 0 }}
exit={{ opacity: 0, translateX: 30 }}
>
<div className="ml-1 mb-5">
<Controller
control={control}
name="shouldEnableDelete"
render={({ field: { onChange, value } }) => (
<Switch
id="delete-github-option"
onCheckedChange={(isChecked) => onChange(isChecked)}
isChecked={value}
>
Delete secrets in Github that are not in Infisical
</Switch>
)}
/>
</div>
<Controller
control={control}
name="secretSuffix"

View File

@ -1,20 +1,17 @@
import { FormEvent, useEffect, useRef, useState } from "react";
import { FormEvent, useEffect, useState } from "react";
import { useTranslation } from "react-i18next";
import Link from "next/link";
import { useRouter } from "next/router";
import { faGithub, faGitlab, faGoogle } from "@fortawesome/free-brands-svg-icons";
import { faLock } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import HCaptcha from "@hcaptcha/react-hcaptcha";
import Error from "@app/components/basic/Error";
import { createNotification } from "@app/components/notifications";
import attemptCliLogin from "@app/components/utilities/attemptCliLogin";
import attemptLogin from "@app/components/utilities/attemptLogin";
import { CAPTCHA_SITE_KEY } from "@app/components/utilities/config";
import { Button, Input } from "@app/components/v2";
import { useServerConfig } from "@app/context";
import { useFetchServerStatus } from "@app/hooks/api";
import { navigateUserToSelectOrg } from "../../Login.utils";
@ -34,18 +31,21 @@ export const InitialStep = ({ setStep, email, setEmail, password, setPassword }:
const [loginError, setLoginError] = useState(false);
const { config } = useServerConfig();
const queryParams = new URLSearchParams(window.location.search);
const [captchaToken, setCaptchaToken] = useState("");
const [shouldShowCaptcha, setShouldShowCaptcha] = useState(false);
const captchaRef = useRef<HCaptcha>(null);
const { data: serverDetails } = useFetchServerStatus();
useEffect(() => {
if (serverDetails?.samlDefaultOrgSlug){
if (
process.env.NEXT_PUBLIC_SAML_ORG_SLUG &&
process.env.NEXT_PUBLIC_SAML_ORG_SLUG !== "saml-org-slug-default"
) {
const callbackPort = queryParams.get("callback_port");
const redirectUrl = `/api/v1/sso/redirect/saml2/organizations/${serverDetails?.samlDefaultOrgSlug}${callbackPort ? `?callback_port=${callbackPort}` : ""}`
router.push(redirectUrl);
window.open(
`/api/v1/sso/redirect/saml2/organizations/${process.env.NEXT_PUBLIC_SAML_ORG_SLUG}${
callbackPort ? `?callback_port=${callbackPort}` : ""
}`
);
window.close();
}
}, [serverDetails?.samlDefaultOrgSlug]);
}, []);
const handleLogin = async (e: FormEvent<HTMLFormElement>) => {
e.preventDefault();
@ -61,8 +61,7 @@ export const InitialStep = ({ setStep, email, setEmail, password, setPassword }:
// attemptCliLogin
const isCliLoginSuccessful = await attemptCliLogin({
email: email.toLowerCase(),
password,
captchaToken
password
});
if (isCliLoginSuccessful && isCliLoginSuccessful.success) {
@ -84,8 +83,7 @@ export const InitialStep = ({ setStep, email, setEmail, password, setPassword }:
} else {
const isLoginSuccessful = await attemptLogin({
email: email.toLowerCase(),
password,
captchaToken
password
});
if (isLoginSuccessful && isLoginSuccessful.success) {
@ -119,12 +117,6 @@ export const InitialStep = ({ setStep, email, setEmail, password, setPassword }:
return;
}
if (err.response.data.error === "Captcha Required") {
setShouldShowCaptcha(true);
setIsLoading(false);
return;
}
setLoginError(true);
createNotification({
text: "Login unsuccessful. Double-check your credentials and try again.",
@ -132,11 +124,6 @@ export const InitialStep = ({ setStep, email, setEmail, password, setPassword }:
});
}
if (captchaRef.current) {
captchaRef.current.resetCaptcha();
}
setCaptchaToken("");
setIsLoading(false);
};
@ -258,19 +245,8 @@ export const InitialStep = ({ setStep, email, setEmail, password, setPassword }:
className="select:-webkit-autofill:focus h-10"
/>
</div>
{shouldShowCaptcha && (
<div className="mt-4">
<HCaptcha
theme="dark"
sitekey={CAPTCHA_SITE_KEY}
onVerify={(token) => setCaptchaToken(token)}
ref={captchaRef}
/>
</div>
)}
<div className="mt-3 w-1/4 min-w-[21.2rem] rounded-md text-center md:min-w-[20.1rem] lg:w-1/6">
<Button
disabled={shouldShowCaptcha && captchaToken === ""}
type="submit"
size="sm"
isFullWidth

View File

@ -1,15 +1,13 @@
import { useRef, useState } from "react";
import { useState } from "react";
import { useTranslation } from "react-i18next";
import Link from "next/link";
import { useRouter } from "next/router";
import HCaptcha from "@hcaptcha/react-hcaptcha";
import axios from "axios";
import jwt_decode from "jwt-decode";
import { createNotification } from "@app/components/notifications";
import attemptCliLogin from "@app/components/utilities/attemptCliLogin";
import attemptLogin from "@app/components/utilities/attemptLogin";
import { CAPTCHA_SITE_KEY } from "@app/components/utilities/config";
import { Button, Input } from "@app/components/v2";
import { useUpdateUserAuthMethods } from "@app/hooks/api";
import { useSelectOrganization } from "@app/hooks/api/auth/queries";
@ -43,10 +41,6 @@ export const PasswordStep = ({
providerAuthToken
) as any;
const [captchaToken, setCaptchaToken] = useState("");
const [shouldShowCaptcha, setShouldShowCaptcha] = useState(false);
const captchaRef = useRef<HCaptcha>(null);
const handleLogin = async (e: React.FormEvent) => {
e.preventDefault();
try {
@ -57,8 +51,7 @@ export const PasswordStep = ({
const isCliLoginSuccessful = await attemptCliLogin({
email,
password,
providerAuthToken,
captchaToken
providerAuthToken
});
if (isCliLoginSuccessful && isCliLoginSuccessful.success) {
@ -106,8 +99,7 @@ export const PasswordStep = ({
const loginAttempt = await attemptLogin({
email,
password,
providerAuthToken,
captchaToken
providerAuthToken
});
if (loginAttempt && loginAttempt.success) {
@ -166,21 +158,11 @@ export const PasswordStep = ({
return;
}
if (err.response.data.error === "Captcha Required") {
setShouldShowCaptcha(true);
return;
}
createNotification({
text: "Login unsuccessful. Double-check your master password and try again.",
type: "error"
});
}
if (captchaRef.current) {
captchaRef.current.resetCaptcha();
}
setCaptchaToken("");
};
return (
@ -212,19 +194,8 @@ export const PasswordStep = ({
/>
</div>
</div>
{shouldShowCaptcha && (
<div className="mx-auto mt-4 flex w-full min-w-[22rem] items-center justify-center lg:w-1/6">
<HCaptcha
theme="dark"
sitekey={CAPTCHA_SITE_KEY}
onVerify={(token) => setCaptchaToken(token)}
ref={captchaRef}
/>
</div>
)}
<div className="mx-auto mt-4 flex w-1/4 w-full min-w-[22rem] items-center justify-center rounded-md text-center lg:w-1/6">
<Button
disabled={shouldShowCaptcha && captchaToken === ""}
type="submit"
colorSchema="primary"
variant="outline_bg"

View File

@ -393,15 +393,15 @@ export const SecretDetailSidebar = ({
{(isAllowed) => (
<Switch
id="skipmultiencoding-option"
onCheckedChange={(isChecked) => onChange(isChecked)}
isChecked={value}
onCheckedChange={(isChecked) => onChange(!isChecked)}
isChecked={!value}
onBlur={onBlur}
isDisabled={!isAllowed}
className="items-center"
>
Multi line encoding
Enable multi line encoding
<Tooltip
content="When enabled, multiline secrets will be handled by escaping newlines and enclosing the entire value in double quotes."
content="Infisical encodes multiline secrets by escaping newlines and wrapping in quotes. To disable, enable this option"
className="z-[100]"
>
<FontAwesomeIcon icon={faCircleQuestion} className="ml-1" size="sm" />

View File

@ -454,12 +454,12 @@ export const SecretOverviewPage = () => {
const filteredSecretNames = secKeys
?.filter((name) => name.toUpperCase().includes(searchFilter.toUpperCase()))
.sort((a, b) => (sortDir === "asc" ? a.localeCompare(b) : b.localeCompare(a)));
const filteredFolderNames = folderNames
?.filter((name) => name.toLowerCase().includes(searchFilter.toLowerCase()))
.sort((a, b) => (sortDir === "asc" ? a.localeCompare(b) : b.localeCompare(a)));
const filteredDynamicSecrets = dynamicSecretNames
?.filter((name) => name.toLowerCase().includes(searchFilter.toLowerCase()))
.sort((a, b) => (sortDir === "asc" ? a.localeCompare(b) : b.localeCompare(a)));
const filteredFolderNames = folderNames?.filter((name) =>
name.toLowerCase().includes(searchFilter.toLowerCase())
);
const filteredDynamicSecrets = dynamicSecretNames?.filter((name) =>
name.toLowerCase().includes(searchFilter.toLowerCase())
);
const isTableEmpty =
!(

View File

@ -0,0 +1,92 @@
import { Controller, useForm } from "react-hook-form";
import { zodResolver } from "@hookform/resolvers/zod";
import { z } from "zod";
import { createNotification } from "@app/components/notifications";
import { Button, FormControl, Input } from "@app/components/v2";
import { useProjectPermission, useWorkspace } from "@app/context";
import { ProjectMembershipRole } from "@app/hooks/api/roles/types";
import { useUpdateWorkspaceVersionLimit } from "@app/hooks/api/workspace/queries";
const formSchema = z.object({
pitVersionLimit: z.coerce.number().min(1).max(100)
});
type TForm = z.infer<typeof formSchema>;
export const PointInTimeVersionLimitSection = () => {
const { mutateAsync: updatePitVersion } = useUpdateWorkspaceVersionLimit();
const { currentWorkspace } = useWorkspace();
const { membership } = useProjectPermission();
const {
control,
formState: { isSubmitting, isDirty },
handleSubmit
} = useForm<TForm>({
resolver: zodResolver(formSchema),
values: {
pitVersionLimit: currentWorkspace?.pitVersionLimit || 10
}
});
if (!currentWorkspace) return null;
const handleVersionLimitSubmit = async ({ pitVersionLimit }: TForm) => {
try {
await updatePitVersion({
pitVersionLimit,
projectSlug: currentWorkspace.slug
});
createNotification({
text: "Successfully updated version limit",
type: "success"
});
} catch (err) {
createNotification({
text: "Failed updating project's version limit",
type: "error"
});
}
};
const isAdmin = membership.roles.includes(ProjectMembershipRole.Admin);
return (
<div className="mb-6 rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-4">
<div className="flex w-full items-center justify-between">
<p className="text-xl font-semibold">Version Retention</p>
</div>
<p className="mb-4 mt-2 max-w-2xl text-sm text-gray-400">
This defines the maximum number of recent secret versions to keep per folder. Excess versions will be removed at midnight (UTC) each day.
</p>
<form onSubmit={handleSubmit(handleVersionLimitSubmit)} autoComplete="off">
<div className="max-w-xs">
<Controller
control={control}
defaultValue={0}
name="pitVersionLimit"
render={({ field, fieldState: { error } }) => (
<FormControl
isError={Boolean(error)}
errorText={error?.message}
label="Recent versions to keep"
>
<Input {...field} type="number" min={1} step={1} isDisabled={!isAdmin} />
</FormControl>
)}
/>
</div>
<Button
colorSchema="secondary"
type="submit"
isLoading={isSubmitting}
disabled={!isAdmin || !isDirty}
>
Save
</Button>
</form>
</div>
);
};

View File

@ -0,0 +1 @@
export { PointInTimeVersionLimitSection } from "./PointInTimeVersionLimitSection";

View File

@ -3,6 +3,7 @@ import { BackfillSecretReferenceSecretion } from "../BackfillSecretReferenceSect
import { DeleteProjectSection } from "../DeleteProjectSection";
import { E2EESection } from "../E2EESection";
import { EnvironmentSection } from "../EnvironmentSection";
import { PointInTimeVersionLimitSection } from "../PointInTimeVersionLimitSection";
import { ProjectNameChangeSection } from "../ProjectNameChangeSection";
import { SecretTagsSection } from "../SecretTagsSection";
@ -14,6 +15,7 @@ export const ProjectGeneralTab = () => {
<SecretTagsSection />
<AutoCapitalizationSection />
<E2EESection />
<PointInTimeVersionLimitSection />
<BackfillSecretReferenceSecretion />
<DeleteProjectSection />
</div>

View File

@ -178,7 +178,7 @@ export const AddShareSecretModal = ({ popUp, handlePopUpToggle }: Props) => {
errorText={error?.message}
>
<SecretInput
isVisible={false}
isVisible
{...field}
containerClassName="py-1.5 rounded-md transition-all group-hover:mr-2 text-bunker-300 hover:border-primary-400/50 border border-mineshaft-600 bg-mineshaft-900 px-2 min-h-[100px]"
/>

View File

@ -232,6 +232,7 @@ func (r *InfisicalSecretReconciler) UpdateInfisicalManagedKubeSecret(ctx context
}
managedKubeSecret.Data = plainProcessedSecrets
managedKubeSecret.ObjectMeta.Annotations = map[string]string{}
managedKubeSecret.ObjectMeta.Annotations[SECRET_VERSION_ANNOTATION] = ETag
err := r.Client.Update(ctx, &managedKubeSecret)