mirror of
https://github.com/Infisical/infisical.git
synced 2025-08-22 10:12:15 +00:00
Compare commits
12 Commits
feat/add-v
...
daniel/go-
Author | SHA1 | Date | |
---|---|---|---|
|
a61e92c49c | ||
|
fa05639592 | ||
|
6c596092b0 | ||
|
fcd13eac8a | ||
|
1fb653754c | ||
|
bb1d73b0f5 | ||
|
59e9226d85 | ||
|
e6f42e1231 | ||
|
06e7a90a44 | ||
|
f075ff23a9 | ||
|
e5b7ebbabf | ||
|
9d46c269d4 |
@@ -1,7 +1,6 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG SAML_ORG_SLUG=saml-org-slug-default
|
||||
|
||||
FROM node:20-alpine AS base
|
||||
|
||||
@@ -35,9 +34,7 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG SAML_ORG_SLUG
|
||||
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
|
||||
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
@@ -113,9 +110,6 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||
ARG SAML_ORG_SLUG
|
||||
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
|
||||
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
|
||||
|
||||
WORKDIR /
|
||||
|
||||
|
@@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
|
||||
await knex.schema.alterTable(TableName.Project, (tb) => {
|
||||
if (!hasPitVersionLimitColumn) {
|
||||
tb.integer("pitVersionLimit").notNullable().defaultTo(10);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
|
||||
await knex.schema.alterTable(TableName.Project, (tb) => {
|
||||
if (hasPitVersionLimitColumn) {
|
||||
tb.dropColumn("pitVersionLimit");
|
||||
}
|
||||
});
|
||||
}
|
@@ -16,8 +16,7 @@ export const ProjectsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
version: z.number().default(1),
|
||||
upgradeStatus: z.string().nullable().optional(),
|
||||
pitVersionLimit: z.number().default(10)
|
||||
upgradeStatus: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
@@ -81,7 +81,8 @@ export const secretSnapshotServiceFactory = ({
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
return snapshotDAL.countOfSnapshotsByFolderId(folder.id);
|
||||
const count = await snapshotDAL.countOfSnapshotsByFolderId(folder.id);
|
||||
return count;
|
||||
};
|
||||
|
||||
const listSnapshots = async ({
|
||||
|
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
@@ -12,7 +11,6 @@ import {
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
export type TSnapshotDALFactory = ReturnType<typeof snapshotDALFactory>;
|
||||
|
||||
@@ -327,152 +325,12 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Prunes excess snapshots from the database to ensure only a specified number of recent snapshots are retained for each folder.
|
||||
*
|
||||
* This function operates in three main steps:
|
||||
* 1. Pruning snapshots from root/non-versioned folders.
|
||||
* 2. Pruning snapshots from versioned folders.
|
||||
* 3. Removing orphaned snapshots that do not belong to any existing folder or folder version.
|
||||
*
|
||||
* The function processes snapshots in batches, determined by the `PRUNE_FOLDER_BATCH_SIZE` constant,
|
||||
* to manage the large datasets without overwhelming the DB.
|
||||
*
|
||||
* Steps:
|
||||
* - Fetch a batch of folder IDs.
|
||||
* - For each batch, use a Common Table Expression (CTE) to rank snapshots within each folder by their creation date.
|
||||
* - Identify and delete snapshots that exceed the project's point-in-time version limit (`pitVersionLimit`).
|
||||
* - Repeat the process for versioned folders.
|
||||
* - Finally, delete orphaned snapshots that do not have an associated folder.
|
||||
*/
|
||||
const pruneExcessSnapshots = async () => {
|
||||
const PRUNE_FOLDER_BATCH_SIZE = 10000;
|
||||
|
||||
try {
|
||||
let uuidOffset = "00000000-0000-0000-0000-000000000000";
|
||||
// cleanup snapshots from root/non-versioned folders
|
||||
// eslint-disable-next-line no-constant-condition, no-unreachable-loop
|
||||
while (true) {
|
||||
const folderBatch = await db(TableName.SecretFolder)
|
||||
.where("id", ">", uuidOffset)
|
||||
.where("isReserved", false)
|
||||
.orderBy("id", "asc")
|
||||
.limit(PRUNE_FOLDER_BATCH_SIZE)
|
||||
.select("id");
|
||||
|
||||
const batchEntries = folderBatch.map((folder) => folder.id);
|
||||
|
||||
if (folderBatch.length) {
|
||||
try {
|
||||
logger.info(`Pruning snapshots in [range=${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}]`);
|
||||
await db(TableName.Snapshot)
|
||||
.with("snapshot_cte", (qb) => {
|
||||
void qb
|
||||
.from(TableName.Snapshot)
|
||||
.whereIn(`${TableName.Snapshot}.folderId`, batchEntries)
|
||||
.select(
|
||||
"folderId",
|
||||
`${TableName.Snapshot}.id as id`,
|
||||
db.raw(
|
||||
`ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num`
|
||||
)
|
||||
);
|
||||
})
|
||||
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Snapshot}.folderId`)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
|
||||
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
|
||||
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
|
||||
.whereNull(`${TableName.SecretFolder}.parentId`)
|
||||
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
|
||||
.delete();
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`Failed to prune snapshots from root/non-versioned folders in range ${batchEntries[0]}:${
|
||||
batchEntries[batchEntries.length - 1]
|
||||
}`
|
||||
);
|
||||
} finally {
|
||||
uuidOffset = batchEntries[batchEntries.length - 1];
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// cleanup snapshots from versioned folders
|
||||
uuidOffset = "00000000-0000-0000-0000-000000000000";
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
const folderBatch = await db(TableName.SecretFolderVersion)
|
||||
.select("folderId")
|
||||
.distinct("folderId")
|
||||
.where("folderId", ">", uuidOffset)
|
||||
.orderBy("folderId", "asc")
|
||||
.limit(PRUNE_FOLDER_BATCH_SIZE);
|
||||
|
||||
const batchEntries = folderBatch.map((folder) => folder.folderId);
|
||||
|
||||
if (folderBatch.length) {
|
||||
try {
|
||||
logger.info(`Pruning snapshots in range ${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}`);
|
||||
await db(TableName.Snapshot)
|
||||
.with("snapshot_cte", (qb) => {
|
||||
void qb
|
||||
.from(TableName.Snapshot)
|
||||
.whereIn(`${TableName.Snapshot}.folderId`, batchEntries)
|
||||
.select(
|
||||
"folderId",
|
||||
`${TableName.Snapshot}.id as id`,
|
||||
db.raw(
|
||||
`ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num`
|
||||
)
|
||||
);
|
||||
})
|
||||
.join(
|
||||
TableName.SecretFolderVersion,
|
||||
`${TableName.SecretFolderVersion}.folderId`,
|
||||
`${TableName.Snapshot}.folderId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`)
|
||||
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
|
||||
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
|
||||
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
|
||||
.delete();
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`Failed to prune snapshots from versioned folders in range ${batchEntries[0]}:${
|
||||
batchEntries[batchEntries.length - 1]
|
||||
}`
|
||||
);
|
||||
} finally {
|
||||
uuidOffset = batchEntries[batchEntries.length - 1];
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// cleanup orphaned snapshots (those that don't belong to an existing folder and folder version)
|
||||
await db(TableName.Snapshot)
|
||||
.whereNotIn("folderId", (qb) => {
|
||||
void qb
|
||||
.select("folderId")
|
||||
.from(TableName.SecretFolderVersion)
|
||||
.union((qb1) => void qb1.select("id").from(TableName.SecretFolder));
|
||||
})
|
||||
.delete();
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "SnapshotPrune" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...secretSnapshotOrm,
|
||||
findById,
|
||||
findLatestSnapshotByFolderId,
|
||||
findRecursivelySnapshots,
|
||||
countOfSnapshotsByFolderId,
|
||||
findSecretSnapshotDataById,
|
||||
pruneExcessSnapshots
|
||||
findSecretSnapshotDataById
|
||||
};
|
||||
};
|
||||
|
@@ -75,6 +75,7 @@ const envSchema = z
|
||||
.optional()
|
||||
.default(process.env.URL_GITLAB_LOGIN ?? GITLAB_URL)
|
||||
), // fallback since URL_GITLAB_LOGIN has been renamed
|
||||
DEFAULT_SAML_ORG_SLUG: zpStr(z.string().optional()).default(process.env.NEXT_PUBLIC_SAML_ORG_SLUG),
|
||||
// integration client secrets
|
||||
// heroku
|
||||
CLIENT_ID_HEROKU: zpStr(z.string().optional()),
|
||||
@@ -131,7 +132,8 @@ const envSchema = z
|
||||
isSecretScanningConfigured:
|
||||
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
|
||||
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
|
||||
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET)
|
||||
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
|
||||
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG
|
||||
}));
|
||||
|
||||
let envCfg: Readonly<z.infer<typeof envSchema>>;
|
||||
|
@@ -824,9 +824,6 @@ export const registerRoutes = async (
|
||||
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
|
||||
auditLogDAL,
|
||||
queueService,
|
||||
secretVersionDAL,
|
||||
secretFolderVersionDAL: folderVersionDAL,
|
||||
snapshotDAL,
|
||||
identityAccessTokenDAL,
|
||||
secretSharingDAL
|
||||
});
|
||||
@@ -922,7 +919,8 @@ export const registerRoutes = async (
|
||||
emailConfigured: z.boolean().optional(),
|
||||
inviteOnlySignup: z.boolean().optional(),
|
||||
redisConfigured: z.boolean().optional(),
|
||||
secretScanningConfigured: z.boolean().optional()
|
||||
secretScanningConfigured: z.boolean().optional(),
|
||||
samlDefaultOrgSlug: z.string().optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -935,7 +933,8 @@ export const registerRoutes = async (
|
||||
emailConfigured: cfg.isSmtpConfigured,
|
||||
inviteOnlySignup: Boolean(serverCfg.allowSignUp),
|
||||
redisConfigured: cfg.isRedisConfigured,
|
||||
secretScanningConfigured: cfg.isSecretScanningConfigured
|
||||
secretScanningConfigured: cfg.isSecretScanningConfigured,
|
||||
samlDefaultOrgSlug: cfg.samlDefaultOrgSlug
|
||||
};
|
||||
}
|
||||
});
|
||||
|
@@ -334,44 +334,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PUT",
|
||||
url: "/:workspaceSlug/version-limit",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
workspaceSlug: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
pitVersionLimit: z.number().min(1).max(100)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string(),
|
||||
workspace: ProjectsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const workspace = await server.services.project.updateVersionLimit({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
pitVersionLimit: req.body.pitVersionLimit,
|
||||
workspaceSlug: req.params.workspaceSlug
|
||||
});
|
||||
|
||||
return {
|
||||
message: "Successfully changed workspace version limit",
|
||||
workspace
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:workspaceId/integrations",
|
||||
|
@@ -77,7 +77,7 @@ export const kmsServiceFactory = ({ kmsDAL, kmsRootConfigDAL, keyStore }: TKmsSe
|
||||
// This will switch to a seal process and HMS flow in future
|
||||
const encryptionKey = appCfg.ENCRYPTION_KEY || appCfg.ROOT_ENCRYPTION_KEY;
|
||||
// if root key its base64 encoded
|
||||
const isBase64 = Boolean(appCfg.ROOT_ENCRYPTION_KEY);
|
||||
const isBase64 = !appCfg.ENCRYPTION_KEY;
|
||||
if (!encryptionKey) throw new Error("Root encryption key not found for KMS service.");
|
||||
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
|
||||
|
||||
|
@@ -39,7 +39,6 @@ import {
|
||||
TToggleProjectAutoCapitalizationDTO,
|
||||
TUpdateProjectDTO,
|
||||
TUpdateProjectNameDTO,
|
||||
TUpdateProjectVersionLimitDTO,
|
||||
TUpgradeProjectDTO
|
||||
} from "./project-types";
|
||||
|
||||
@@ -134,8 +133,7 @@ export const projectServiceFactory = ({
|
||||
name: workspaceName,
|
||||
orgId: organization.id,
|
||||
slug: projectSlug || slugify(`${workspaceName}-${alphaNumericNanoId(4)}`),
|
||||
version: ProjectVersion.V2,
|
||||
pitVersionLimit: 10
|
||||
version: ProjectVersion.V2
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -408,35 +406,6 @@ export const projectServiceFactory = ({
|
||||
return updatedProject;
|
||||
};
|
||||
|
||||
const updateVersionLimit = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
pitVersionLimit,
|
||||
workspaceSlug
|
||||
}: TUpdateProjectVersionLimitDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(workspaceSlug, actorOrgId);
|
||||
if (!project) {
|
||||
throw new BadRequestError({
|
||||
message: "Project not found"
|
||||
});
|
||||
}
|
||||
|
||||
const { hasRole } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
project.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
if (!hasRole(ProjectMembershipRole.Admin))
|
||||
throw new BadRequestError({ message: "Only admins are allowed to take this action" });
|
||||
|
||||
return projectDAL.updateById(project.id, { pitVersionLimit });
|
||||
};
|
||||
|
||||
const updateName = async ({
|
||||
projectId,
|
||||
actor,
|
||||
@@ -532,7 +501,6 @@ export const projectServiceFactory = ({
|
||||
getAProject,
|
||||
toggleAutoCapitalization,
|
||||
updateName,
|
||||
upgradeProject,
|
||||
updateVersionLimit
|
||||
upgradeProject
|
||||
};
|
||||
};
|
||||
|
@@ -43,11 +43,6 @@ export type TToggleProjectAutoCapitalizationDTO = {
|
||||
autoCapitalization: boolean;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateProjectVersionLimitDTO = {
|
||||
pitVersionLimit: number;
|
||||
workspaceSlug: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateProjectNameDTO = {
|
||||
name: string;
|
||||
} & TProjectPermission;
|
||||
|
@@ -1,19 +1,13 @@
|
||||
import { TAuditLogDALFactory } from "@app/ee/services/audit-log/audit-log-dal";
|
||||
import { TSnapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
|
||||
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
||||
import { TSecretVersionDALFactory } from "../secret/secret-version-dal";
|
||||
import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal";
|
||||
import { TSecretSharingDALFactory } from "../secret-sharing/secret-sharing-dal";
|
||||
|
||||
type TDailyResourceCleanUpQueueServiceFactoryDep = {
|
||||
auditLogDAL: Pick<TAuditLogDALFactory, "pruneAuditLog">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "removeExpiredTokens">;
|
||||
secretVersionDAL: Pick<TSecretVersionDALFactory, "pruneExcessVersions">;
|
||||
secretFolderVersionDAL: Pick<TSecretFolderVersionDALFactory, "pruneExcessVersions">;
|
||||
snapshotDAL: Pick<TSnapshotDALFactory, "pruneExcessSnapshots">;
|
||||
secretSharingDAL: Pick<TSecretSharingDALFactory, "pruneExpiredSharedSecrets">;
|
||||
queueService: TQueueServiceFactory;
|
||||
};
|
||||
@@ -23,9 +17,6 @@ export type TDailyResourceCleanUpQueueServiceFactory = ReturnType<typeof dailyRe
|
||||
export const dailyResourceCleanUpQueueServiceFactory = ({
|
||||
auditLogDAL,
|
||||
queueService,
|
||||
snapshotDAL,
|
||||
secretVersionDAL,
|
||||
secretFolderVersionDAL,
|
||||
identityAccessTokenDAL,
|
||||
secretSharingDAL
|
||||
}: TDailyResourceCleanUpQueueServiceFactoryDep) => {
|
||||
@@ -34,9 +25,6 @@ export const dailyResourceCleanUpQueueServiceFactory = ({
|
||||
await auditLogDAL.pruneAuditLog();
|
||||
await identityAccessTokenDAL.removeExpiredTokens();
|
||||
await secretSharingDAL.pruneExpiredSharedSecrets();
|
||||
await snapshotDAL.pruneExcessSnapshots();
|
||||
await secretVersionDAL.pruneExcessVersions();
|
||||
await secretFolderVersionDAL.pruneExcessVersions();
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: queue task completed`);
|
||||
});
|
||||
|
||||
|
@@ -62,32 +62,5 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const pruneExcessVersions = async () => {
|
||||
try {
|
||||
await db(TableName.SecretFolderVersion)
|
||||
.with("folder_cte", (qb) => {
|
||||
void qb
|
||||
.from(TableName.SecretFolderVersion)
|
||||
.select(
|
||||
"id",
|
||||
"folderId",
|
||||
db.raw(
|
||||
`ROW_NUMBER() OVER (PARTITION BY ${TableName.SecretFolderVersion}."folderId" ORDER BY ${TableName.SecretFolderVersion}."createdAt" DESC) AS row_num`
|
||||
)
|
||||
);
|
||||
})
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`)
|
||||
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
|
||||
.join("folder_cte", "folder_cte.id", `${TableName.SecretFolderVersion}.id`)
|
||||
.whereRaw(`folder_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
|
||||
.delete();
|
||||
} catch (error) {
|
||||
throw new DatabaseError({
|
||||
error,
|
||||
name: "Secret Folder Version Prune"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId, pruneExcessVersions };
|
||||
return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId };
|
||||
};
|
||||
|
@@ -111,37 +111,8 @@ export const secretVersionDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const pruneExcessVersions = async () => {
|
||||
try {
|
||||
await db(TableName.SecretVersion)
|
||||
.with("version_cte", (qb) => {
|
||||
void qb
|
||||
.from(TableName.SecretVersion)
|
||||
.select(
|
||||
"id",
|
||||
"folderId",
|
||||
db.raw(
|
||||
`ROW_NUMBER() OVER (PARTITION BY ${TableName.SecretVersion}."secretId" ORDER BY ${TableName.SecretVersion}."createdAt" DESC) AS row_num`
|
||||
)
|
||||
);
|
||||
})
|
||||
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.SecretVersion}.folderId`)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
|
||||
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
|
||||
.join("version_cte", "version_cte.id", `${TableName.SecretVersion}.id`)
|
||||
.whereRaw(`version_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
|
||||
.delete();
|
||||
} catch (error) {
|
||||
throw new DatabaseError({
|
||||
error,
|
||||
name: "Secret Version Prune"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...secretVersionOrm,
|
||||
pruneExcessVersions,
|
||||
findLatestVersionMany,
|
||||
bulkUpdate,
|
||||
findLatestVersionByFolderId,
|
||||
|
@@ -384,6 +384,7 @@
|
||||
"pages": [
|
||||
"sdks/languages/node",
|
||||
"sdks/languages/python",
|
||||
"sdks/languages/go",
|
||||
"sdks/languages/java",
|
||||
"sdks/languages/csharp"
|
||||
]
|
||||
|
438
docs/sdks/languages/go.mdx
Normal file
438
docs/sdks/languages/go.mdx
Normal file
@@ -0,0 +1,438 @@
|
||||
---
|
||||
title: "Infisical Go SDK"
|
||||
sidebarTitle: "Go"
|
||||
icon: "golang"
|
||||
---
|
||||
|
||||
|
||||
|
||||
If you're working with Go Lang, the official [Infisical Go SDK](https://github.com/infisical/go-sdk) package is the easiest way to fetch and work with secrets for your application.
|
||||
|
||||
- [Package](https://pkg.go.dev/github.com/infisical/go-sdk)
|
||||
- [Github Repository](https://github.com/infiscial/go-sdk)
|
||||
|
||||
## Basic Usage
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
infisical "github.com/infisical/go-sdk"
|
||||
)
|
||||
|
||||
func main() {
|
||||
|
||||
client, err := infisical.NewInfisicalClient(infisical.Config{
|
||||
SiteUrl: "https://app.infisical.com", // Optional, default is https://app.infisical.com
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("Error: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
_, err = client.Auth().UniversalAuthLogin("YOUR_CLIENT_ID", "YOUR_CLIENT_SECRET")
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("Authentication failed: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
apiKeySecret, err := client.Secrets().Retrieve(infisical.RetrieveSecretOptions{
|
||||
SecretKey: "API_KEY",
|
||||
Environment: "dev",
|
||||
ProjectID: "YOUR_PROJECT_ID",
|
||||
SecretPath: "/",
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("Error: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
fmt.Printf("API Key Secret: %v", apiKeySecret)
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
This example demonstrates how to use the Infisical Go SDK in a simple Go application. The application retrieves a secret named `API_KEY` from the `dev` environment of the `YOUR_PROJECT_ID` project.
|
||||
|
||||
<Warning>
|
||||
We do not recommend hardcoding your [Machine Identity Tokens](/platform/identities/overview). Setting it as an environment variable would be best.
|
||||
</Warning>
|
||||
|
||||
# Installation
|
||||
|
||||
```console
|
||||
$ go get github.com/infisical/go-sdk
|
||||
```
|
||||
# Configuration
|
||||
|
||||
Import the SDK and create a client instance.
|
||||
|
||||
```go
|
||||
client, err := infisical.NewInfisicalClient(infisical.Config{
|
||||
SiteUrl: "https://app.infisical.com", // Optional, default is https://api.infisical.com
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("Error: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
```
|
||||
|
||||
### ClientSettings methods
|
||||
|
||||
<ParamField query="options" type="object">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SiteUrl" type="string" optional>
|
||||
The URL of the Infisical API. Default is `https://api.infisical.com`.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="UserAgent" type="string" required>
|
||||
Optionally set the user agent that will be used for HTTP requests. _(Not recommended)_
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
|
||||
</ParamField>
|
||||
|
||||
### Authentication
|
||||
|
||||
The SDK supports a variety of authentication methods. The most common authentication method is Universal Auth, which uses a client ID and client secret to authenticate.
|
||||
|
||||
#### Universal Auth
|
||||
|
||||
**Using environment variables**
|
||||
|
||||
Call `.Auth().UniversalAuthLogin()` with empty arguments to use the following environment variables:
|
||||
|
||||
- `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` - Your machine identity client ID.
|
||||
- `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` - Your machine identity client secret.
|
||||
|
||||
**Using the SDK directly**
|
||||
```go
|
||||
_, err := client.Auth().UniversalAuthLogin("CLIENT_ID", "CLIENT_SECRET")
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
```
|
||||
|
||||
#### GCP ID Token Auth
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on Google Cloud Platform.
|
||||
Please [read more](/documentation/platform/identities/gcp-auth) about this authentication method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
|
||||
Call `.Auth().GcpIdTokenAuthLogin()` with empty arguments to use the following environment variables:
|
||||
|
||||
- `INFISICAL_GCP_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
```go
|
||||
_, err := client.Auth().GcpIdTokenAuthLogin("YOUR_MACHINE_IDENTITY_ID")
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
```
|
||||
|
||||
#### GCP IAM Auth
|
||||
|
||||
**Using environment variables**
|
||||
|
||||
Call `.Auth().GcpIamAuthLogin()` with empty arguments to use the following environment variables:
|
||||
|
||||
- `INFISICAL_GCP_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
- `INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH` - The path to your GCP service account key file.
|
||||
|
||||
**Using the SDK directly**
|
||||
```go
|
||||
_, err = client.Auth().GcpIamAuthLogin("MACHINE_IDENTITY_ID", "SERVICE_ACCOUNT_KEY_FILE_PATH")
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
```
|
||||
|
||||
#### AWS IAM Auth
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on AWS.
|
||||
Please [read more](/documentation/platform/identities/aws-auth) about this authentication method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
|
||||
Call `.Auth().AwsIamAuthLogin()` with empty arguments to use the following environment variables:
|
||||
|
||||
- `INFISICAL_AWS_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
```go
|
||||
_, err = client.Auth().AwsIamAuthLogin("MACHINE_IDENTITY_ID")
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
#### Azure Auth
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on Azure.
|
||||
Please [read more](/documentation/platform/identities/azure-auth) about this authentication method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
|
||||
Call `.Auth().AzureAuthLogin()` with empty arguments to use the following environment variables:
|
||||
|
||||
- `INFISICAL_AZURE_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
|
||||
**Using the SDK directly**
|
||||
```go
|
||||
_, err = client.Auth().AzureAuthLogin("MACHINE_IDENTITY_ID")
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
```
|
||||
|
||||
#### Kubernetes Auth
|
||||
<Info>
|
||||
Please note that this authentication method will only work if you're running your application on Kubernetes.
|
||||
Please [read more](/documentation/platform/identities/kubernetes-auth) about this authentication method.
|
||||
</Info>
|
||||
|
||||
**Using environment variables**
|
||||
|
||||
Call `.Auth().KubernetesAuthLogin()` with empty arguments to use the following environment variables:
|
||||
|
||||
- `INFISICAL_KUBERNETES_IDENTITY_ID` - Your Infisical Machine Identity ID.
|
||||
- `INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH_ENV_NAME` - The environment variable name that contains the path to the service account token. This is optional and will default to `/var/run/secrets/kubernetes.io/serviceaccount/token`.
|
||||
|
||||
**Using the SDK directly**
|
||||
```go
|
||||
// Service account token path will default to /var/run/secrets/kubernetes.io/serviceaccount/token if empty value is passed
|
||||
_, err = client.Auth().KubernetesAuthLogin("MACHINE_IDENTITY_ID", "SERVICE_ACCOUNT_TOKEN_PATH")
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
```
|
||||
|
||||
## Working with Secrets
|
||||
|
||||
### client.Secrets().List(options)
|
||||
|
||||
```go
|
||||
secrets, err := client.Secrets().List(infisical.ListSecretsOptions{
|
||||
ProjectID: "PROJECT_ID",
|
||||
Environment: "dev",
|
||||
SecretPath: "/foo/bar",
|
||||
AttachToProcessEnv: false,
|
||||
})
|
||||
```
|
||||
|
||||
Retrieve all secrets within the Infisical project and environment that client is connected to
|
||||
|
||||
#### Parameters
|
||||
|
||||
<ParamField query="Parameters" type="object">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="ProjectID" type="string">
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secrets should be fetched from.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="AttachToProcessEnv" type="boolean" default="false" optional>
|
||||
Whether or not to set the fetched secrets to the process environment. If true, you can access the secrets like so `System.getenv("SECRET_NAME")`.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="IncludeImports" type="boolean" default="false" optional>
|
||||
Whether or not to include imported secrets from the current path. Read about [secret import](/documentation/platform/secret-reference)
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="Recursive" type="boolean" default="false" optional>
|
||||
Whether or not to fetch secrets recursively from the specified path. Please note that there's a 20-depth limit for recursive fetching.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="ExpandSecretReferences" type="boolean" default="true" optional>
|
||||
Whether or not to expand secret references in the fetched secrets. Read about [secret reference](/documentation/platform/secret-reference)
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
|
||||
</ParamField>
|
||||
|
||||
### client.Secrets().Get(options)
|
||||
|
||||
```go
|
||||
secret, err := client.Secrets().Retrieve(infisical.RetrieveSecretOptions{
|
||||
SecretKey: "API_KEY",
|
||||
ProjectID: "PROJECT_ID",
|
||||
Environment: "dev",
|
||||
})
|
||||
```
|
||||
|
||||
Retrieve a secret from Infisical.
|
||||
|
||||
By default, `Secrets().Get()` fetches and returns a shared secret.
|
||||
|
||||
#### Parameters
|
||||
|
||||
<ParamField query="Parameters" type="object" optional>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SecretKey" type="string" required>
|
||||
The key of the secret to retrieve.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
### client.Secrets().Create(options)
|
||||
|
||||
```go
|
||||
secret, err := client.Secrets().Create(infisical.CreateSecretOptions{
|
||||
ProjectID: "PROJECT_ID",
|
||||
Environment: "dev",
|
||||
|
||||
SecretKey: "NEW_SECRET_KEY",
|
||||
SecretValue: "NEW_SECRET_VALUE",
|
||||
SecretComment: "This is a new secret",
|
||||
})
|
||||
```
|
||||
|
||||
Create a new secret in Infisical.
|
||||
|
||||
#### Parameters
|
||||
|
||||
<ParamField query="Parameters" type="object" optional>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SecretKey" type="string" required>
|
||||
The key of the secret to create.
|
||||
</ParamField>
|
||||
<ParamField query="SecretValue" type="string" required>
|
||||
The value of the secret.
|
||||
</ParamField>
|
||||
<ParamField query="SecretComment" type="string" optional>
|
||||
A comment for the secret.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be created.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
### client.Secrets().Update(options)
|
||||
|
||||
```go
|
||||
secret, err := client.Secrets().Update(infisical.UpdateSecretOptions{
|
||||
ProjectID: "PROJECT_ID",
|
||||
Environment: "dev",
|
||||
SecretKey: "NEW_SECRET_KEY",
|
||||
NewSecretValue: "NEW_SECRET_VALUE",
|
||||
NewSkipMultilineEncoding: false,
|
||||
})
|
||||
```
|
||||
|
||||
Update an existing secret in Infisical.
|
||||
|
||||
#### Parameters
|
||||
|
||||
<ParamField query="Parameters" type="object" optional>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SecretKey" type="string" required>
|
||||
The key of the secret to update.
|
||||
</ParamField>
|
||||
<ParamField query="NewSecretValue" type="string" required>
|
||||
The new value of the secret.
|
||||
</ParamField>
|
||||
<ParamField query="NewSkipMultilineEncoding" type="boolean" default="false" optional>
|
||||
Whether or not to skip multiline encoding for the new secret value.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be updated.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
### client.Secrets().Delete(options)
|
||||
|
||||
```go
|
||||
secret, err := client.Secrets().Delete(infisical.DeleteSecretOptions{
|
||||
ProjectID: "PROJECT_ID",
|
||||
Environment: "dev",
|
||||
SecretKey: "SECRET_KEY",
|
||||
})
|
||||
```
|
||||
|
||||
Delete a secret in Infisical.
|
||||
|
||||
#### Parameters
|
||||
|
||||
<ParamField query="Parameters" type="object" optional>
|
||||
<Expandable title="properties">
|
||||
<ParamField query="SecretKey" type="string">
|
||||
The key of the secret to update.
|
||||
</ParamField>
|
||||
<ParamField query="ProjectID" type="string" required>
|
||||
The project ID where the secret lives in.
|
||||
</ParamField>
|
||||
<ParamField query="Environment" type="string" required>
|
||||
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
|
||||
</ParamField>
|
||||
<ParamField query="SecretPath" type="string" optional>
|
||||
The path from where secret should be deleted.
|
||||
</ParamField>
|
||||
<ParamField query="Type" type="string" optional>
|
||||
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
@@ -318,6 +318,11 @@ SMTP_FROM_NAME=Infisical
|
||||
By default, users can only login via email/password based login method.
|
||||
To login into Infisical with OAuth providers such as Google, configure the associated variables.
|
||||
|
||||
<ParamField query="DEFAULT_SAML_ORG_SLUG" type="string">
|
||||
|
||||
When set, all visits to the Infisical login page will automatically redirect users of your Infisical instance to the SAML identity provider associated with the specified organization slug.
|
||||
</ParamField>
|
||||
|
||||
<Accordion title="Google">
|
||||
Follow detailed guide to configure [Google SSO](/documentation/platform/sso/google)
|
||||
|
||||
@@ -369,11 +374,6 @@ To login into Infisical with OAuth providers such as Google, configure the assoc
|
||||
information.
|
||||
</Accordion>
|
||||
|
||||
<ParamField query="NEXT_PUBLIC_SAML_ORG_SLUG" type="string">
|
||||
Configure SAML organization slug to automatically redirect all users of your
|
||||
Infisical instance to the identity provider.
|
||||
</ParamField>
|
||||
|
||||
## Native secret integrations
|
||||
|
||||
To help you sync secrets from Infisical to services such as Github and Gitlab, Infisical provides native integrations out of the box.
|
||||
|
@@ -4,8 +4,6 @@ scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_POSTHOG_API_KEY
|
||||
|
||||
scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_INTERCOM_ID" "$NEXT_PUBLIC_INTERCOM_ID"
|
||||
|
||||
scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_SAML_ORG_SLUG" "$NEXT_PUBLIC_SAML_ORG_SLUG"
|
||||
|
||||
if [ "$TELEMETRY_ENABLED" != "false" ]; then
|
||||
echo "Telemetry is enabled"
|
||||
scripts/set-standalone-build-telemetry.sh true
|
||||
|
@@ -4,4 +4,5 @@ export type ServerStatus = {
|
||||
emailConfigured: boolean;
|
||||
secretScanningConfigured: boolean;
|
||||
redisConfigured: boolean;
|
||||
samlDefaultOrgSlug: boolean
|
||||
};
|
||||
|
@@ -20,7 +20,6 @@ import {
|
||||
TUpdateWorkspaceIdentityRoleDTO,
|
||||
TUpdateWorkspaceUserRoleDTO,
|
||||
UpdateEnvironmentDTO,
|
||||
UpdatePitVersionLimitDTO,
|
||||
Workspace
|
||||
} from "./types";
|
||||
|
||||
@@ -250,21 +249,6 @@ export const useToggleAutoCapitalization = () => {
|
||||
});
|
||||
};
|
||||
|
||||
export const useUpdateWorkspaceVersionLimit = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return useMutation<{}, {}, UpdatePitVersionLimitDTO>({
|
||||
mutationFn: ({ projectSlug, pitVersionLimit }) => {
|
||||
return apiRequest.put(`/api/v1/workspace/${projectSlug}/version-limit`, {
|
||||
pitVersionLimit
|
||||
});
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries(workspaceKeys.getAllUserWorkspace);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const useDeleteWorkspace = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
|
@@ -16,7 +16,6 @@ export type Workspace = {
|
||||
upgradeStatus: string | null;
|
||||
autoCapitalization: boolean;
|
||||
environments: WorkspaceEnv[];
|
||||
pitVersionLimit: number;
|
||||
slug: string;
|
||||
};
|
||||
|
||||
@@ -49,7 +48,6 @@ export type CreateWorkspaceDTO = {
|
||||
};
|
||||
|
||||
export type RenameWorkspaceDTO = { workspaceID: string; newWorkspaceName: string };
|
||||
export type UpdatePitVersionLimitDTO = { projectSlug: string; pitVersionLimit: number };
|
||||
export type ToggleAutoCapitalizationDTO = { workspaceID: string; state: boolean };
|
||||
|
||||
export type DeleteWorkspaceDTO = { workspaceID: string };
|
||||
@@ -130,4 +128,4 @@ export type TUpdateWorkspaceGroupRoleDTO = {
|
||||
temporaryAccessStartTime: string;
|
||||
}
|
||||
)[];
|
||||
};
|
||||
};
|
@@ -12,6 +12,7 @@ import attemptCliLogin from "@app/components/utilities/attemptCliLogin";
|
||||
import attemptLogin from "@app/components/utilities/attemptLogin";
|
||||
import { Button, Input } from "@app/components/v2";
|
||||
import { useServerConfig } from "@app/context";
|
||||
import { useFetchServerStatus } from "@app/hooks/api";
|
||||
|
||||
import { navigateUserToSelectOrg } from "../../Login.utils";
|
||||
|
||||
@@ -31,21 +32,15 @@ export const InitialStep = ({ setStep, email, setEmail, password, setPassword }:
|
||||
const [loginError, setLoginError] = useState(false);
|
||||
const { config } = useServerConfig();
|
||||
const queryParams = new URLSearchParams(window.location.search);
|
||||
const { data: serverDetails } = useFetchServerStatus();
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
process.env.NEXT_PUBLIC_SAML_ORG_SLUG &&
|
||||
process.env.NEXT_PUBLIC_SAML_ORG_SLUG !== "saml-org-slug-default"
|
||||
) {
|
||||
const callbackPort = queryParams.get("callback_port");
|
||||
window.open(
|
||||
`/api/v1/sso/redirect/saml2/organizations/${process.env.NEXT_PUBLIC_SAML_ORG_SLUG}${
|
||||
callbackPort ? `?callback_port=${callbackPort}` : ""
|
||||
}`
|
||||
);
|
||||
window.close();
|
||||
}
|
||||
}, []);
|
||||
if (serverDetails?.samlDefaultOrgSlug){
|
||||
const callbackPort = queryParams.get("callback_port");
|
||||
const redirectUrl = `/api/v1/sso/redirect/saml2/organizations/${serverDetails?.samlDefaultOrgSlug}${callbackPort ? `?callback_port=${callbackPort}` : ""}`
|
||||
router.push(redirectUrl);
|
||||
}
|
||||
}, [serverDetails?.samlDefaultOrgSlug]);
|
||||
|
||||
const handleLogin = async (e: FormEvent<HTMLFormElement>) => {
|
||||
e.preventDefault();
|
||||
|
@@ -454,12 +454,12 @@ export const SecretOverviewPage = () => {
|
||||
const filteredSecretNames = secKeys
|
||||
?.filter((name) => name.toUpperCase().includes(searchFilter.toUpperCase()))
|
||||
.sort((a, b) => (sortDir === "asc" ? a.localeCompare(b) : b.localeCompare(a)));
|
||||
const filteredFolderNames = folderNames?.filter((name) =>
|
||||
name.toLowerCase().includes(searchFilter.toLowerCase())
|
||||
);
|
||||
const filteredDynamicSecrets = dynamicSecretNames?.filter((name) =>
|
||||
name.toLowerCase().includes(searchFilter.toLowerCase())
|
||||
);
|
||||
const filteredFolderNames = folderNames
|
||||
?.filter((name) => name.toLowerCase().includes(searchFilter.toLowerCase()))
|
||||
.sort((a, b) => (sortDir === "asc" ? a.localeCompare(b) : b.localeCompare(a)));
|
||||
const filteredDynamicSecrets = dynamicSecretNames
|
||||
?.filter((name) => name.toLowerCase().includes(searchFilter.toLowerCase()))
|
||||
.sort((a, b) => (sortDir === "asc" ? a.localeCompare(b) : b.localeCompare(a)));
|
||||
|
||||
const isTableEmpty =
|
||||
!(
|
||||
|
@@ -1,92 +0,0 @@
|
||||
import { Controller, useForm } from "react-hook-form";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { z } from "zod";
|
||||
|
||||
import { createNotification } from "@app/components/notifications";
|
||||
import { Button, FormControl, Input } from "@app/components/v2";
|
||||
import { useProjectPermission, useWorkspace } from "@app/context";
|
||||
import { ProjectMembershipRole } from "@app/hooks/api/roles/types";
|
||||
import { useUpdateWorkspaceVersionLimit } from "@app/hooks/api/workspace/queries";
|
||||
|
||||
const formSchema = z.object({
|
||||
pitVersionLimit: z.coerce.number().min(1).max(100)
|
||||
});
|
||||
|
||||
type TForm = z.infer<typeof formSchema>;
|
||||
|
||||
export const PointInTimeVersionLimitSection = () => {
|
||||
const { mutateAsync: updatePitVersion } = useUpdateWorkspaceVersionLimit();
|
||||
|
||||
const { currentWorkspace } = useWorkspace();
|
||||
const { membership } = useProjectPermission();
|
||||
|
||||
const {
|
||||
control,
|
||||
formState: { isSubmitting, isDirty },
|
||||
handleSubmit
|
||||
} = useForm<TForm>({
|
||||
resolver: zodResolver(formSchema),
|
||||
values: {
|
||||
pitVersionLimit: currentWorkspace?.pitVersionLimit || 10
|
||||
}
|
||||
});
|
||||
|
||||
if (!currentWorkspace) return null;
|
||||
|
||||
const handleVersionLimitSubmit = async ({ pitVersionLimit }: TForm) => {
|
||||
try {
|
||||
await updatePitVersion({
|
||||
pitVersionLimit,
|
||||
projectSlug: currentWorkspace.slug
|
||||
});
|
||||
|
||||
createNotification({
|
||||
text: "Successfully updated version limit",
|
||||
type: "success"
|
||||
});
|
||||
} catch (err) {
|
||||
createNotification({
|
||||
text: "Failed updating project's version limit",
|
||||
type: "error"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const isAdmin = membership.roles.includes(ProjectMembershipRole.Admin);
|
||||
return (
|
||||
<div className="mb-6 rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-4">
|
||||
<div className="flex w-full items-center justify-between">
|
||||
<p className="text-xl font-semibold">Version Retention</p>
|
||||
</div>
|
||||
<p className="mb-4 mt-2 max-w-2xl text-sm text-gray-400">
|
||||
This defines the maximum number of recent secret versions to keep per folder. Excess versions will be removed at midnight (UTC) each day.
|
||||
</p>
|
||||
<form onSubmit={handleSubmit(handleVersionLimitSubmit)} autoComplete="off">
|
||||
<div className="max-w-xs">
|
||||
<Controller
|
||||
control={control}
|
||||
defaultValue={0}
|
||||
name="pitVersionLimit"
|
||||
render={({ field, fieldState: { error } }) => (
|
||||
<FormControl
|
||||
isError={Boolean(error)}
|
||||
errorText={error?.message}
|
||||
label="Recent versions to keep"
|
||||
>
|
||||
<Input {...field} type="number" min={1} step={1} isDisabled={!isAdmin} />
|
||||
</FormControl>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
<Button
|
||||
colorSchema="secondary"
|
||||
type="submit"
|
||||
isLoading={isSubmitting}
|
||||
disabled={!isAdmin || !isDirty}
|
||||
>
|
||||
Save
|
||||
</Button>
|
||||
</form>
|
||||
</div>
|
||||
);
|
||||
};
|
@@ -1 +0,0 @@
|
||||
export { PointInTimeVersionLimitSection } from "./PointInTimeVersionLimitSection";
|
@@ -3,7 +3,6 @@ import { BackfillSecretReferenceSecretion } from "../BackfillSecretReferenceSect
|
||||
import { DeleteProjectSection } from "../DeleteProjectSection";
|
||||
import { E2EESection } from "../E2EESection";
|
||||
import { EnvironmentSection } from "../EnvironmentSection";
|
||||
import { PointInTimeVersionLimitSection } from "../PointInTimeVersionLimitSection";
|
||||
import { ProjectNameChangeSection } from "../ProjectNameChangeSection";
|
||||
import { SecretTagsSection } from "../SecretTagsSection";
|
||||
|
||||
@@ -15,7 +14,6 @@ export const ProjectGeneralTab = () => {
|
||||
<SecretTagsSection />
|
||||
<AutoCapitalizationSection />
|
||||
<E2EESection />
|
||||
<PointInTimeVersionLimitSection />
|
||||
<BackfillSecretReferenceSecretion />
|
||||
<DeleteProjectSection />
|
||||
</div>
|
||||
|
@@ -178,7 +178,7 @@ export const AddShareSecretModal = ({ popUp, handlePopUpToggle }: Props) => {
|
||||
errorText={error?.message}
|
||||
>
|
||||
<SecretInput
|
||||
isVisible
|
||||
isVisible={false}
|
||||
{...field}
|
||||
containerClassName="py-1.5 rounded-md transition-all group-hover:mr-2 text-bunker-300 hover:border-primary-400/50 border border-mineshaft-600 bg-mineshaft-900 px-2 min-h-[100px]"
|
||||
/>
|
||||
|
Reference in New Issue
Block a user