mirror of
https://github.com/Infisical/infisical.git
synced 2025-08-31 15:32:32 +00:00
Compare commits
135 Commits
project-gr
...
misc/cli-d
Author | SHA1 | Date | |
---|---|---|---|
|
4303547d8c | ||
|
f1c8a66d31 | ||
|
0c21c19c95 | ||
|
62308fb0a3 | ||
|
55aa1e87c0 | ||
|
f686882ce6 | ||
|
e35417e11b | ||
|
ff0f4cf46a | ||
|
64093e9175 | ||
|
78fd852588 | ||
|
0c1f761a9a | ||
|
c363f485eb | ||
|
433d83641d | ||
|
35bb7f299c | ||
|
160e2b773b | ||
|
f0a70e23ac | ||
|
a6271a6187 | ||
|
b2fbec740f | ||
|
26bed22b94 | ||
|
86e5f46d89 | ||
|
720789025c | ||
|
811b3d5934 | ||
|
cac702415f | ||
|
dbe7acdc80 | ||
|
b33985b338 | ||
|
670376336e | ||
|
c59eddb00a | ||
|
fe40ba497b | ||
|
c5b7e3d8be | ||
|
47e778a0b8 | ||
|
8b443e0957 | ||
|
f7fb015bd8 | ||
|
0d7cd357c3 | ||
|
e40f65836f | ||
|
2d3c63e8b9 | ||
|
bdb36d6be4 | ||
|
3ee8f7aa20 | ||
|
36a5291dc3 | ||
|
977fd7a057 | ||
|
bf413c75bc | ||
|
3250a18050 | ||
|
2eb1451c56 | ||
|
a24158b187 | ||
|
4cc80e38f4 | ||
|
d5ee74bb1a | ||
|
ec776b94ae | ||
|
14be4eb601 | ||
|
d1faed5672 | ||
|
9c6b300ad4 | ||
|
210ddf506a | ||
|
33d740a4de | ||
|
86dee1ec5d | ||
|
6dfe2851e1 | ||
|
95b843779b | ||
|
219aa3c641 | ||
|
cf5391d6d4 | ||
|
2ca476f21e | ||
|
bf81469341 | ||
|
8445127fad | ||
|
fb1cf3eb02 | ||
|
f8c822eda7 | ||
|
f20e4e189d | ||
|
c7ec6236e1 | ||
|
c4dea2d51f | ||
|
e89b0fdf3f | ||
|
d57f76d230 | ||
|
55efa00b8c | ||
|
29ba92dadb | ||
|
7ba79dec19 | ||
|
6ea8bff224 | ||
|
65f4e1bea1 | ||
|
73ce3b8bb7 | ||
|
e63af81e60 | ||
|
6c2c2b319b | ||
|
82c2be64a1 | ||
|
a064e31117 | ||
|
051d0780a8 | ||
|
5c9563f18b | ||
|
5406871c30 | ||
|
8b89edc277 | ||
|
b394e191a8 | ||
|
92030884ec | ||
|
4583eb1732 | ||
|
4c8bf9bd92 | ||
|
a6554deb80 | ||
|
ae00e74c17 | ||
|
adfd5a1b59 | ||
|
d6c321d34d | ||
|
09a7346f32 | ||
|
1ae82dc460 | ||
|
80fada6b55 | ||
|
e4abac91b4 | ||
|
b4f37193ac | ||
|
c8be5a637a | ||
|
45485f8bd3 | ||
|
545df3bf28 | ||
|
766254c4e3 | ||
|
4bd1eb6f70 | ||
|
6847e5bb89 | ||
|
022ecf75e1 | ||
|
5d35ce6c6c | ||
|
635f027752 | ||
|
6334ad0d07 | ||
|
89e8f200e9 | ||
|
e57935a7d3 | ||
|
617d07177c | ||
|
ac5bfbb6c9 | ||
|
1f80ff040d | ||
|
f8939835e1 | ||
|
d2b0ca94d8 | ||
|
5255f0ac17 | ||
|
4f67834eaa | ||
|
952e60f08a | ||
|
5367d1ac2e | ||
|
92b9abb52b | ||
|
e2680d9aee | ||
|
aa049dc43b | ||
|
419e9ac755 | ||
|
b7b36a475d | ||
|
9159a9fa36 | ||
|
d5f5abef8e | ||
|
f711f8a35c | ||
|
9c8bb71878 | ||
|
d0547c354a | ||
|
88abdd9529 | ||
|
f3a04f1a2f | ||
|
082d6c44c4 | ||
|
a0aafcc1bf | ||
|
b350841b86 | ||
|
ad623f8753 | ||
|
9cedae61a9 | ||
|
f7a4731565 | ||
|
a70aff5f31 | ||
|
d1d5dd29c6 | ||
|
41d7987a6e |
53
.github/workflows/check-non-re2-regex.yml
vendored
Normal file
53
.github/workflows/check-non-re2-regex.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: Detect Non-RE2 Regex
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
|
||||
jobs:
|
||||
check-non-re2-regex:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get diff of backend/*
|
||||
run: |
|
||||
git diff --unified=0 "origin/${{ github.base_ref }}"...HEAD -- backend/ > diff.txt
|
||||
|
||||
- name: Scan backend diff for non-RE2 regex
|
||||
run: |
|
||||
# Extract only added lines (excluding file headers)
|
||||
grep '^+' diff.txt | grep -v '^+++' | sed 's/^\+//' > added_lines.txt
|
||||
|
||||
if [ ! -s added_lines.txt ]; then
|
||||
echo "✅ No added lines in backend/ to check for regex usage."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
regex_usage_pattern='(^|[^A-Za-z0-9_"'"'"'`\.\/\\])(\/(?:\\.|[^\/\n\\])+\/[gimsuyv]*(?=\s*[\.\(;,)\]}:]|$)|new RegExp\()'
|
||||
|
||||
# Find all added lines that contain regex patterns
|
||||
if grep -E "$regex_usage_pattern" added_lines.txt > potential_violations.txt 2>/dev/null; then
|
||||
# Filter out lines that contain 'new RE2' (allowing for whitespace variations)
|
||||
if grep -v -E 'new\s+RE2\s*\(' potential_violations.txt > actual_violations.txt 2>/dev/null && [ -s actual_violations.txt ]; then
|
||||
echo "🚨 ERROR: Found forbidden regex pattern in added/modified backend code."
|
||||
echo ""
|
||||
echo "The following lines use raw regex literals (/.../) or new RegExp(...):"
|
||||
echo "Please replace with 'new RE2(...)' for RE2 compatibility."
|
||||
echo ""
|
||||
echo "Offending lines:"
|
||||
cat actual_violations.txt
|
||||
exit 1
|
||||
else
|
||||
echo "✅ All identified regex usages are correctly using 'new RE2(...)'."
|
||||
fi
|
||||
else
|
||||
echo "✅ No regex patterns found in added/modified backend lines."
|
||||
fi
|
||||
|
||||
- name: Cleanup temporary files
|
||||
if: always()
|
||||
run: |
|
||||
rm -f diff.txt added_lines.txt potential_violations.txt actual_violations.txt
|
@@ -84,6 +84,11 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
|
||||
}
|
||||
};
|
||||
|
||||
const bigIntegerColumns: Record<string, string[]> = {
|
||||
"folder_commits": ["commitId"]
|
||||
};
|
||||
|
||||
|
||||
const main = async () => {
|
||||
const tables = (
|
||||
await db("information_schema.tables")
|
||||
@@ -108,6 +113,9 @@ const main = async () => {
|
||||
const columnName = columnNames[colNum];
|
||||
const colInfo = columns[columnName];
|
||||
let ztype = getZodPrimitiveType(colInfo.type);
|
||||
if (bigIntegerColumns[tableName]?.includes(columnName)) {
|
||||
ztype = "z.coerce.bigint()";
|
||||
}
|
||||
if (["zodBuffer"].includes(ztype)) {
|
||||
zodImportSet.add(ztype);
|
||||
}
|
||||
|
8
backend/src/@types/fastify.d.ts
vendored
8
backend/src/@types/fastify.d.ts
vendored
@@ -26,6 +26,7 @@ import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-con
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TPitServiceFactory } from "@app/ee/services/pit/pit-service";
|
||||
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
@@ -59,6 +60,7 @@ import { TCertificateTemplateServiceFactory } from "@app/services/certificate-te
|
||||
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
|
||||
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
||||
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
||||
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
@@ -119,6 +121,10 @@ declare module "@fastify/request-context" {
|
||||
oidc?: {
|
||||
claims: Record<string, string>;
|
||||
};
|
||||
kubernetes?: {
|
||||
namespace: string;
|
||||
name: string;
|
||||
};
|
||||
};
|
||||
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
|
||||
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };
|
||||
@@ -272,6 +278,8 @@ declare module "fastify" {
|
||||
microsoftTeams: TMicrosoftTeamsServiceFactory;
|
||||
assumePrivileges: TAssumePrivilegeServiceFactory;
|
||||
githubOrgSync: TGithubOrgSyncServiceFactory;
|
||||
folderCommit: TFolderCommitServiceFactory;
|
||||
pit: TPitServiceFactory;
|
||||
secretScanningV2: TSecretScanningV2ServiceFactory;
|
||||
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
|
||||
pkiTemplate: TPkiTemplatesServiceFactory;
|
||||
|
48
backend/src/@types/knex.d.ts
vendored
48
backend/src/@types/knex.d.ts
vendored
@@ -80,6 +80,24 @@ import {
|
||||
TExternalKms,
|
||||
TExternalKmsInsert,
|
||||
TExternalKmsUpdate,
|
||||
TFolderCheckpointResources,
|
||||
TFolderCheckpointResourcesInsert,
|
||||
TFolderCheckpointResourcesUpdate,
|
||||
TFolderCheckpoints,
|
||||
TFolderCheckpointsInsert,
|
||||
TFolderCheckpointsUpdate,
|
||||
TFolderCommitChanges,
|
||||
TFolderCommitChangesInsert,
|
||||
TFolderCommitChangesUpdate,
|
||||
TFolderCommits,
|
||||
TFolderCommitsInsert,
|
||||
TFolderCommitsUpdate,
|
||||
TFolderTreeCheckpointResources,
|
||||
TFolderTreeCheckpointResourcesInsert,
|
||||
TFolderTreeCheckpointResourcesUpdate,
|
||||
TFolderTreeCheckpoints,
|
||||
TFolderTreeCheckpointsInsert,
|
||||
TFolderTreeCheckpointsUpdate,
|
||||
TGateways,
|
||||
TGatewaysInsert,
|
||||
TGatewaysUpdate,
|
||||
@@ -1122,6 +1140,36 @@ declare module "knex/types/tables" {
|
||||
TGithubOrgSyncConfigsInsert,
|
||||
TGithubOrgSyncConfigsUpdate
|
||||
>;
|
||||
[TableName.FolderCommit]: KnexOriginal.CompositeTableType<
|
||||
TFolderCommits,
|
||||
TFolderCommitsInsert,
|
||||
TFolderCommitsUpdate
|
||||
>;
|
||||
[TableName.FolderCommitChanges]: KnexOriginal.CompositeTableType<
|
||||
TFolderCommitChanges,
|
||||
TFolderCommitChangesInsert,
|
||||
TFolderCommitChangesUpdate
|
||||
>;
|
||||
[TableName.FolderCheckpoint]: KnexOriginal.CompositeTableType<
|
||||
TFolderCheckpoints,
|
||||
TFolderCheckpointsInsert,
|
||||
TFolderCheckpointsUpdate
|
||||
>;
|
||||
[TableName.FolderCheckpointResources]: KnexOriginal.CompositeTableType<
|
||||
TFolderCheckpointResources,
|
||||
TFolderCheckpointResourcesInsert,
|
||||
TFolderCheckpointResourcesUpdate
|
||||
>;
|
||||
[TableName.FolderTreeCheckpoint]: KnexOriginal.CompositeTableType<
|
||||
TFolderTreeCheckpoints,
|
||||
TFolderTreeCheckpointsInsert,
|
||||
TFolderTreeCheckpointsUpdate
|
||||
>;
|
||||
[TableName.FolderTreeCheckpointResources]: KnexOriginal.CompositeTableType<
|
||||
TFolderTreeCheckpointResources,
|
||||
TFolderTreeCheckpointResourcesInsert,
|
||||
TFolderTreeCheckpointResourcesUpdate
|
||||
>;
|
||||
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningDataSources,
|
||||
TSecretScanningDataSourcesInsert,
|
||||
|
@@ -0,0 +1,166 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
|
||||
if (!hasFolderCommitTable) {
|
||||
await knex.schema.createTable(TableName.FolderCommit, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.bigIncrements("commitId");
|
||||
t.jsonb("actorMetadata").notNullable();
|
||||
t.string("actorType").notNullable();
|
||||
t.string("message");
|
||||
t.uuid("folderId").notNullable();
|
||||
t.uuid("envId").notNullable();
|
||||
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderId");
|
||||
t.index("envId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
|
||||
if (!hasFolderCommitChangesTable) {
|
||||
await knex.schema.createTable(TableName.FolderCommitChanges, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderCommitId").notNullable();
|
||||
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
|
||||
t.string("changeType").notNullable();
|
||||
t.boolean("isUpdate").notNullable().defaultTo(false);
|
||||
t.uuid("secretVersionId");
|
||||
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
|
||||
t.uuid("folderVersionId");
|
||||
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderCommitId");
|
||||
t.index("secretVersionId");
|
||||
t.index("folderVersionId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
|
||||
if (!hasFolderCheckpointTable) {
|
||||
await knex.schema.createTable(TableName.FolderCheckpoint, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderCommitId").notNullable();
|
||||
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderCommitId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
|
||||
if (!hasFolderCheckpointResourcesTable) {
|
||||
await knex.schema.createTable(TableName.FolderCheckpointResources, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderCheckpointId").notNullable();
|
||||
t.foreign("folderCheckpointId").references("id").inTable(TableName.FolderCheckpoint).onDelete("CASCADE");
|
||||
t.uuid("secretVersionId");
|
||||
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
|
||||
t.uuid("folderVersionId");
|
||||
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderCheckpointId");
|
||||
t.index("secretVersionId");
|
||||
t.index("folderVersionId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
|
||||
if (!hasFolderTreeCheckpointTable) {
|
||||
await knex.schema.createTable(TableName.FolderTreeCheckpoint, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderCommitId").notNullable();
|
||||
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderCommitId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
|
||||
if (!hasFolderTreeCheckpointResourcesTable) {
|
||||
await knex.schema.createTable(TableName.FolderTreeCheckpointResources, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderTreeCheckpointId").notNullable();
|
||||
t.foreign("folderTreeCheckpointId").references("id").inTable(TableName.FolderTreeCheckpoint).onDelete("CASCADE");
|
||||
t.uuid("folderId").notNullable();
|
||||
t.uuid("folderCommitId").notNullable();
|
||||
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderTreeCheckpointId");
|
||||
t.index("folderId");
|
||||
t.index("folderCommitId");
|
||||
});
|
||||
}
|
||||
|
||||
if (!hasFolderCommitTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderCommit);
|
||||
}
|
||||
|
||||
if (!hasFolderCommitChangesTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderCommitChanges);
|
||||
}
|
||||
|
||||
if (!hasFolderCheckpointTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderCheckpoint);
|
||||
}
|
||||
|
||||
if (!hasFolderCheckpointResourcesTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
|
||||
}
|
||||
|
||||
if (!hasFolderTreeCheckpointTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
|
||||
}
|
||||
|
||||
if (!hasFolderTreeCheckpointResourcesTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
|
||||
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
|
||||
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
|
||||
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
|
||||
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
|
||||
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
|
||||
|
||||
if (hasFolderTreeCheckpointResourcesTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpointResources);
|
||||
}
|
||||
|
||||
if (hasFolderCheckpointResourcesTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderCheckpointResources);
|
||||
}
|
||||
|
||||
if (hasFolderTreeCheckpointTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpoint);
|
||||
}
|
||||
|
||||
if (hasFolderCheckpointTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderCheckpoint);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderCheckpoint);
|
||||
}
|
||||
|
||||
if (hasFolderCommitChangesTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderCommitChanges);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderCommitChanges);
|
||||
}
|
||||
|
||||
if (hasFolderCommitTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderCommit);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderCommit);
|
||||
}
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretFolderVersion, "description"))) {
|
||||
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
|
||||
t.string("description").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretFolderVersion, "description")) {
|
||||
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
139
backend/src/db/migrations/20250602155451_fix-secret-versions.ts
Normal file
139
backend/src/db/migrations/20250602155451_fix-secret-versions.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { chunkArray } from "@app/lib/fn";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { SecretType, TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
logger.info("Starting secret version fix migration");
|
||||
|
||||
// Get all shared secret IDs first to optimize versions query
|
||||
const secretIds = await knex(TableName.SecretV2)
|
||||
.where("type", SecretType.Shared)
|
||||
.select("id")
|
||||
.then((rows) => rows.map((row) => row.id));
|
||||
|
||||
logger.info(`Found ${secretIds.length} shared secrets to process`);
|
||||
|
||||
if (secretIds.length === 0) {
|
||||
logger.info("No shared secrets found");
|
||||
return;
|
||||
}
|
||||
|
||||
const secretIdChunks = chunkArray(secretIds, 5000);
|
||||
|
||||
for (let chunkIndex = 0; chunkIndex < secretIdChunks.length; chunkIndex += 1) {
|
||||
const currentSecretIds = secretIdChunks[chunkIndex];
|
||||
logger.info(`Processing chunk ${chunkIndex + 1} of ${secretIdChunks.length}`);
|
||||
|
||||
// Get secrets and versions for current chunk
|
||||
const [sharedSecrets, allVersions] = await Promise.all([
|
||||
knex(TableName.SecretV2).whereIn("id", currentSecretIds).select(selectAllTableCols(TableName.SecretV2)),
|
||||
knex(TableName.SecretVersionV2).whereIn("secretId", currentSecretIds).select("secretId", "version")
|
||||
]);
|
||||
|
||||
const versionsBySecretId = new Map<string, number[]>();
|
||||
|
||||
allVersions.forEach((v) => {
|
||||
const versions = versionsBySecretId.get(v.secretId);
|
||||
if (versions) {
|
||||
versions.push(v.version);
|
||||
} else {
|
||||
versionsBySecretId.set(v.secretId, [v.version]);
|
||||
}
|
||||
});
|
||||
|
||||
const versionsToAdd = [];
|
||||
const secretsToUpdate = [];
|
||||
|
||||
// Process each shared secret
|
||||
for (const secret of sharedSecrets) {
|
||||
const existingVersions = versionsBySecretId.get(secret.id) || [];
|
||||
|
||||
if (existingVersions.length === 0) {
|
||||
// No versions exist - add current version
|
||||
versionsToAdd.push({
|
||||
secretId: secret.id,
|
||||
version: secret.version,
|
||||
key: secret.key,
|
||||
encryptedValue: secret.encryptedValue,
|
||||
encryptedComment: secret.encryptedComment,
|
||||
reminderNote: secret.reminderNote,
|
||||
reminderRepeatDays: secret.reminderRepeatDays,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
metadata: secret.metadata,
|
||||
folderId: secret.folderId,
|
||||
actorType: "platform"
|
||||
});
|
||||
} else {
|
||||
const latestVersion = Math.max(...existingVersions);
|
||||
|
||||
if (latestVersion !== secret.version) {
|
||||
// Latest version doesn't match - create new version and update secret
|
||||
const nextVersion = latestVersion + 1;
|
||||
|
||||
versionsToAdd.push({
|
||||
secretId: secret.id,
|
||||
version: nextVersion,
|
||||
key: secret.key,
|
||||
encryptedValue: secret.encryptedValue,
|
||||
encryptedComment: secret.encryptedComment,
|
||||
reminderNote: secret.reminderNote,
|
||||
reminderRepeatDays: secret.reminderRepeatDays,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
metadata: secret.metadata,
|
||||
folderId: secret.folderId,
|
||||
actorType: "platform"
|
||||
});
|
||||
|
||||
secretsToUpdate.push({
|
||||
id: secret.id,
|
||||
newVersion: nextVersion
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Chunk ${chunkIndex + 1}: Adding ${versionsToAdd.length} versions, updating ${secretsToUpdate.length} secrets`
|
||||
);
|
||||
|
||||
// Batch insert new versions
|
||||
if (versionsToAdd.length > 0) {
|
||||
const insertBatches = chunkArray(versionsToAdd, 9000);
|
||||
for (let i = 0; i < insertBatches.length; i += 1) {
|
||||
await knex.batchInsert(TableName.SecretVersionV2, insertBatches[i]);
|
||||
}
|
||||
}
|
||||
|
||||
if (secretsToUpdate.length > 0) {
|
||||
const updateBatches = chunkArray(secretsToUpdate, 1000);
|
||||
|
||||
for (const updateBatch of updateBatches) {
|
||||
const ids = updateBatch.map((u) => u.id);
|
||||
const versionCases = updateBatch.map((u) => `WHEN '${u.id}' THEN ${u.newVersion}`).join(" ");
|
||||
|
||||
await knex.raw(
|
||||
`
|
||||
UPDATE ${TableName.SecretV2}
|
||||
SET version = CASE id ${versionCases} END,
|
||||
"updatedAt" = NOW()
|
||||
WHERE id IN (${ids.map(() => "?").join(",")})
|
||||
`,
|
||||
ids
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Secret version fix migration completed");
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
logger.info("Rollback not implemented for secret version fix migration");
|
||||
// Note: Rolling back this migration would be complex and potentially destructive
|
||||
// as it would require tracking which version entries were added
|
||||
}
|
@@ -0,0 +1,345 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { chunkArray } from "@app/lib/fn";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { ChangeType } from "@app/services/folder-commit/folder-commit-service";
|
||||
|
||||
import {
|
||||
ProjectType,
|
||||
SecretType,
|
||||
TableName,
|
||||
TFolderCheckpoints,
|
||||
TFolderCommits,
|
||||
TFolderTreeCheckpoints,
|
||||
TSecretFolders
|
||||
} from "../schemas";
|
||||
|
||||
const sortFoldersByHierarchy = (folders: TSecretFolders[]) => {
|
||||
// Create a map for quick lookup of children by parent ID
|
||||
const childrenMap = new Map<string, TSecretFolders[]>();
|
||||
|
||||
// Set of all folder IDs
|
||||
const allFolderIds = new Set<string>();
|
||||
|
||||
// Build the set of all folder IDs
|
||||
folders.forEach((folder) => {
|
||||
if (folder.id) {
|
||||
allFolderIds.add(folder.id);
|
||||
}
|
||||
});
|
||||
|
||||
// Group folders by their parentId
|
||||
folders.forEach((folder) => {
|
||||
if (folder.parentId) {
|
||||
const children = childrenMap.get(folder.parentId) || [];
|
||||
children.push(folder);
|
||||
childrenMap.set(folder.parentId, children);
|
||||
}
|
||||
});
|
||||
|
||||
// Find root folders - those with no parentId or with a parentId that doesn't exist
|
||||
const rootFolders = folders.filter((folder) => !folder.parentId || !allFolderIds.has(folder.parentId));
|
||||
|
||||
// Process each level of the hierarchy
|
||||
const result = [];
|
||||
let currentLevel = rootFolders;
|
||||
|
||||
while (currentLevel.length > 0) {
|
||||
result.push(...currentLevel);
|
||||
|
||||
const nextLevel = [];
|
||||
for (const folder of currentLevel) {
|
||||
if (folder.id) {
|
||||
const children = childrenMap.get(folder.id) || [];
|
||||
nextLevel.push(...children);
|
||||
}
|
||||
}
|
||||
|
||||
currentLevel = nextLevel;
|
||||
}
|
||||
|
||||
return result.reverse();
|
||||
};
|
||||
|
||||
const getSecretsByFolderIds = async (knex: Knex, folderIds: string[]): Promise<Record<string, string[]>> => {
|
||||
const secrets = await knex(TableName.SecretV2)
|
||||
.whereIn(`${TableName.SecretV2}.folderId`, folderIds)
|
||||
.where(`${TableName.SecretV2}.type`, SecretType.Shared)
|
||||
.join<TableName.SecretVersionV2>(TableName.SecretVersionV2, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.SecretVersionV2}.secretId`, `${TableName.SecretV2}.id`)
|
||||
.andOn(`${TableName.SecretVersionV2}.version`, `${TableName.SecretV2}.version`);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.SecretV2))
|
||||
.select(knex.ref("id").withSchema(TableName.SecretVersionV2).as("secretVersionId"));
|
||||
|
||||
const secretsMap: Record<string, string[]> = {};
|
||||
|
||||
secrets.forEach((secret) => {
|
||||
if (!secretsMap[secret.folderId]) {
|
||||
secretsMap[secret.folderId] = [];
|
||||
}
|
||||
secretsMap[secret.folderId].push(secret.secretVersionId);
|
||||
});
|
||||
|
||||
return secretsMap;
|
||||
};
|
||||
|
||||
const getFoldersByParentIds = async (knex: Knex, parentIds: string[]): Promise<Record<string, string[]>> => {
|
||||
const folders = await knex(TableName.SecretFolder)
|
||||
.whereIn(`${TableName.SecretFolder}.parentId`, parentIds)
|
||||
.where(`${TableName.SecretFolder}.isReserved`, false)
|
||||
.join<TableName.SecretFolderVersion>(TableName.SecretFolderVersion, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`)
|
||||
.andOn(`${TableName.SecretFolderVersion}.version`, `${TableName.SecretFolder}.version`);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.SecretFolder))
|
||||
.select(knex.ref("id").withSchema(TableName.SecretFolderVersion).as("folderVersionId"));
|
||||
|
||||
const foldersMap: Record<string, string[]> = {};
|
||||
|
||||
folders.forEach((folder) => {
|
||||
if (!folder.parentId) {
|
||||
return;
|
||||
}
|
||||
if (!foldersMap[folder.parentId]) {
|
||||
foldersMap[folder.parentId] = [];
|
||||
}
|
||||
foldersMap[folder.parentId].push(folder.folderVersionId);
|
||||
});
|
||||
|
||||
return foldersMap;
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
logger.info("Initializing folder commits");
|
||||
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
|
||||
if (hasFolderCommitTable) {
|
||||
// Get Projects to Initialize
|
||||
const projects = await knex(TableName.Project)
|
||||
.where(`${TableName.Project}.version`, 3)
|
||||
.where(`${TableName.Project}.type`, ProjectType.SecretManager)
|
||||
.select(selectAllTableCols(TableName.Project));
|
||||
logger.info(`Found ${projects.length} projects to initialize`);
|
||||
|
||||
// Process Projects in batches of 100
|
||||
const batches = chunkArray(projects, 100);
|
||||
let i = 0;
|
||||
for (const batch of batches) {
|
||||
i += 1;
|
||||
logger.info(`Processing project batch ${i} of ${batches.length}`);
|
||||
let foldersCommitsList = [];
|
||||
|
||||
const rootFoldersMap: Record<string, string> = {};
|
||||
const envRootFoldersMap: Record<string, string> = {};
|
||||
|
||||
// Get All Folders for the Project
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const folders = await knex(TableName.SecretFolder)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.whereIn(
|
||||
`${TableName.Environment}.projectId`,
|
||||
batch.map((project) => project.id)
|
||||
)
|
||||
.where(`${TableName.SecretFolder}.isReserved`, false)
|
||||
.select(selectAllTableCols(TableName.SecretFolder));
|
||||
logger.info(`Found ${folders.length} folders to initialize in project batch ${i} of ${batches.length}`);
|
||||
|
||||
// Sort Folders by Hierarchy (parents before nested folders)
|
||||
const sortedFolders = sortFoldersByHierarchy(folders);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const folderSecretsMap = await getSecretsByFolderIds(
|
||||
knex,
|
||||
sortedFolders.map((folder) => folder.id)
|
||||
);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const folderFoldersMap = await getFoldersByParentIds(
|
||||
knex,
|
||||
sortedFolders.map((folder) => folder.id)
|
||||
);
|
||||
|
||||
// Get folder commit changes
|
||||
for (const folder of sortedFolders) {
|
||||
const subFolderVersionIds = folderFoldersMap[folder.id];
|
||||
const secretVersionIds = folderSecretsMap[folder.id];
|
||||
const changes = [];
|
||||
if (subFolderVersionIds) {
|
||||
changes.push(
|
||||
...subFolderVersionIds.map((folderVersionId) => ({
|
||||
folderId: folder.id,
|
||||
changeType: ChangeType.ADD,
|
||||
secretVersionId: undefined,
|
||||
folderVersionId,
|
||||
isUpdate: false
|
||||
}))
|
||||
);
|
||||
}
|
||||
if (secretVersionIds) {
|
||||
changes.push(
|
||||
...secretVersionIds.map((secretVersionId) => ({
|
||||
folderId: folder.id,
|
||||
changeType: ChangeType.ADD,
|
||||
secretVersionId,
|
||||
folderVersionId: undefined,
|
||||
isUpdate: false
|
||||
}))
|
||||
);
|
||||
}
|
||||
if (changes.length > 0) {
|
||||
const folderCommit = {
|
||||
commit: {
|
||||
actorMetadata: {},
|
||||
actorType: ActorType.PLATFORM,
|
||||
message: "Initialized folder",
|
||||
folderId: folder.id,
|
||||
envId: folder.envId
|
||||
},
|
||||
changes
|
||||
};
|
||||
foldersCommitsList.push(folderCommit);
|
||||
if (!folder.parentId) {
|
||||
rootFoldersMap[folder.id] = folder.envId;
|
||||
envRootFoldersMap[folder.envId] = folder.id;
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info(`Retrieved folder changes for project batch ${i} of ${batches.length}`);
|
||||
|
||||
const filteredBrokenProjectFolders: string[] = [];
|
||||
|
||||
foldersCommitsList = foldersCommitsList.filter((folderCommit) => {
|
||||
if (!envRootFoldersMap[folderCommit.commit.envId]) {
|
||||
filteredBrokenProjectFolders.push(folderCommit.commit.folderId);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
logger.info(
|
||||
`Filtered ${filteredBrokenProjectFolders.length} broken project folders: ${JSON.stringify(filteredBrokenProjectFolders)}`
|
||||
);
|
||||
|
||||
// Insert New Commits in batches of 9000
|
||||
const newCommits = foldersCommitsList.map((folderCommit) => folderCommit.commit);
|
||||
const commitBatches = chunkArray(newCommits, 9000);
|
||||
|
||||
let j = 0;
|
||||
for (const commitBatch of commitBatches) {
|
||||
j += 1;
|
||||
logger.info(`Inserting folder commits - batch ${j} of ${commitBatches.length}`);
|
||||
// Create folder commit
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const newCommitsInserted = (await knex
|
||||
.batchInsert(TableName.FolderCommit, commitBatch)
|
||||
.returning("*")) as TFolderCommits[];
|
||||
|
||||
logger.info(`Finished inserting folder commits - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
const newCommitsMap: Record<string, string> = {};
|
||||
const newCommitsMapInverted: Record<string, string> = {};
|
||||
const newCheckpointsMap: Record<string, string> = {};
|
||||
newCommitsInserted.forEach((commit) => {
|
||||
newCommitsMap[commit.folderId] = commit.id;
|
||||
newCommitsMapInverted[commit.id] = commit.folderId;
|
||||
});
|
||||
|
||||
// Create folder checkpoints
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const newCheckpoints = (await knex
|
||||
.batchInsert(
|
||||
TableName.FolderCheckpoint,
|
||||
Object.values(newCommitsMap).map((commitId) => ({
|
||||
folderCommitId: commitId
|
||||
}))
|
||||
)
|
||||
.returning("*")) as TFolderCheckpoints[];
|
||||
|
||||
logger.info(`Finished inserting folder checkpoints - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
newCheckpoints.forEach((checkpoint) => {
|
||||
newCheckpointsMap[newCommitsMapInverted[checkpoint.folderCommitId]] = checkpoint.id;
|
||||
});
|
||||
|
||||
// Create folder commit changes
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex.batchInsert(
|
||||
TableName.FolderCommitChanges,
|
||||
foldersCommitsList
|
||||
.map((folderCommit) => folderCommit.changes)
|
||||
.flat()
|
||||
.map((change) => ({
|
||||
folderCommitId: newCommitsMap[change.folderId],
|
||||
changeType: change.changeType,
|
||||
secretVersionId: change.secretVersionId,
|
||||
folderVersionId: change.folderVersionId,
|
||||
isUpdate: false
|
||||
}))
|
||||
);
|
||||
|
||||
logger.info(`Finished inserting folder commit changes - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
// Create folder checkpoint resources
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex.batchInsert(
|
||||
TableName.FolderCheckpointResources,
|
||||
foldersCommitsList
|
||||
.map((folderCommit) => folderCommit.changes)
|
||||
.flat()
|
||||
.map((change) => ({
|
||||
folderCheckpointId: newCheckpointsMap[change.folderId],
|
||||
folderVersionId: change.folderVersionId,
|
||||
secretVersionId: change.secretVersionId
|
||||
}))
|
||||
);
|
||||
|
||||
logger.info(`Finished inserting folder checkpoint resources - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
// Create Folder Tree Checkpoint
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const newTreeCheckpoints = (await knex
|
||||
.batchInsert(
|
||||
TableName.FolderTreeCheckpoint,
|
||||
Object.keys(rootFoldersMap).map((folderId) => ({
|
||||
folderCommitId: newCommitsMap[folderId]
|
||||
}))
|
||||
)
|
||||
.returning("*")) as TFolderTreeCheckpoints[];
|
||||
|
||||
logger.info(`Finished inserting folder tree checkpoints - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
const newTreeCheckpointsMap: Record<string, string> = {};
|
||||
newTreeCheckpoints.forEach((checkpoint) => {
|
||||
newTreeCheckpointsMap[rootFoldersMap[newCommitsMapInverted[checkpoint.folderCommitId]]] = checkpoint.id;
|
||||
});
|
||||
|
||||
// Create Folder Tree Checkpoint Resources
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex
|
||||
.batchInsert(
|
||||
TableName.FolderTreeCheckpointResources,
|
||||
newCommitsInserted.map((folderCommit) => ({
|
||||
folderTreeCheckpointId: newTreeCheckpointsMap[folderCommit.envId],
|
||||
folderId: folderCommit.folderId,
|
||||
folderCommitId: folderCommit.id
|
||||
}))
|
||||
)
|
||||
.returning("*");
|
||||
|
||||
logger.info(`Finished inserting folder tree checkpoint resources - batch ${j} of ${commitBatches.length}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info("Folder commits initialized");
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
|
||||
if (hasFolderCommitTable) {
|
||||
// delete all existing entries
|
||||
await knex(TableName.FolderCommit).del();
|
||||
}
|
||||
}
|
@@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
|
||||
if (!hasShowSnapshotsLegacyColumn) {
|
||||
await knex.schema.table(TableName.Project, (table) => {
|
||||
table.boolean("showSnapshotsLegacy").notNullable().defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
|
||||
if (hasShowSnapshotsLegacyColumn) {
|
||||
await knex.schema.table(TableName.Project, (table) => {
|
||||
table.dropColumn("showSnapshotsLegacy");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasConfigColumn = await knex.schema.hasColumn(TableName.DynamicSecretLease, "config");
|
||||
if (!hasConfigColumn) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecretLease, (table) => {
|
||||
table.jsonb("config");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasConfigColumn = await knex.schema.hasColumn(TableName.DynamicSecretLease, "config");
|
||||
if (hasConfigColumn) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecretLease, (table) => {
|
||||
table.dropColumn("config");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,45 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 1000;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasKubernetesHostColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "kubernetesHost");
|
||||
|
||||
if (hasKubernetesHostColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.string("kubernetesHost").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasKubernetesHostColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "kubernetesHost");
|
||||
|
||||
// find all rows where kubernetesHost is null
|
||||
const rows = await knex(TableName.IdentityKubernetesAuth)
|
||||
.whereNull("kubernetesHost")
|
||||
.select(selectAllTableCols(TableName.IdentityKubernetesAuth));
|
||||
|
||||
if (rows.length > 0) {
|
||||
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
||||
const batch = rows.slice(i, i + BATCH_SIZE);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityKubernetesAuth)
|
||||
.whereIn(
|
||||
"id",
|
||||
batch.map((row) => row.id)
|
||||
)
|
||||
.update({ kubernetesHost: "" });
|
||||
}
|
||||
}
|
||||
|
||||
if (hasKubernetesHostColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.string("kubernetesHost").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
@@ -3,12 +3,27 @@ import { Knex } from "knex";
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
|
||||
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
|
||||
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
|
||||
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
|
||||
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
|
||||
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
|
||||
import { identityDALFactory } from "@app/services/identity/identity-dal";
|
||||
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
|
||||
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
|
||||
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
|
||||
import { kmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { orgDALFactory } from "@app/services/org/org-dal";
|
||||
import { projectDALFactory } from "@app/services/project/project-dal";
|
||||
import { resourceMetadataDALFactory } from "@app/services/resource-metadata/resource-metadata-dal";
|
||||
import { secretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
import { secretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal";
|
||||
import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
|
||||
import { secretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal";
|
||||
import { secretVersionV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
|
||||
import { userDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TMigrationEnvConfig } from "./env-config";
|
||||
|
||||
@@ -50,3 +65,77 @@ export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }
|
||||
|
||||
return { kmsService };
|
||||
};
|
||||
|
||||
export const getMigrationPITServices = async ({
|
||||
db,
|
||||
keyStore,
|
||||
envConfig
|
||||
}: {
|
||||
db: Knex;
|
||||
keyStore: TKeyStoreFactory;
|
||||
envConfig: TMigrationEnvConfig;
|
||||
}) => {
|
||||
const projectDAL = projectDALFactory(db);
|
||||
const folderCommitDAL = folderCommitDALFactory(db);
|
||||
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
|
||||
const folderCheckpointDAL = folderCheckpointDALFactory(db);
|
||||
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
|
||||
const userDAL = userDALFactory(db);
|
||||
const identityDAL = identityDALFactory(db);
|
||||
const folderDAL = secretFolderDALFactory(db);
|
||||
const folderVersionDAL = secretFolderVersionDALFactory(db);
|
||||
const secretVersionV2BridgeDAL = secretVersionV2BridgeDALFactory(db);
|
||||
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
|
||||
const secretV2BridgeDAL = secretV2BridgeDALFactory({ db, keyStore });
|
||||
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
|
||||
const secretTagDAL = secretTagDALFactory(db);
|
||||
|
||||
const orgDAL = orgDALFactory(db);
|
||||
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
|
||||
const kmsDAL = kmskeyDALFactory(db);
|
||||
const internalKmsDAL = internalKmsDALFactory(db);
|
||||
const resourceMetadataDAL = resourceMetadataDALFactory(db);
|
||||
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule: hsmModule.getModule(),
|
||||
envConfig
|
||||
});
|
||||
|
||||
const kmsService = kmsServiceFactory({
|
||||
kmsRootConfigDAL,
|
||||
keyStore,
|
||||
kmsDAL,
|
||||
internalKmsDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
hsmService,
|
||||
envConfig
|
||||
});
|
||||
|
||||
await hsmService.startService();
|
||||
await kmsService.startService();
|
||||
|
||||
const folderCommitService = folderCommitServiceFactory({
|
||||
folderCommitDAL,
|
||||
folderCommitChangesDAL,
|
||||
folderCheckpointDAL,
|
||||
folderTreeCheckpointDAL,
|
||||
userDAL,
|
||||
identityDAL,
|
||||
folderDAL,
|
||||
folderVersionDAL,
|
||||
secretVersionV2BridgeDAL,
|
||||
projectDAL,
|
||||
folderCheckpointResourcesDAL,
|
||||
secretV2BridgeDAL,
|
||||
folderTreeCheckpointResourcesDAL,
|
||||
kmsService,
|
||||
secretTagDAL,
|
||||
resourceMetadataDAL
|
||||
});
|
||||
|
||||
return { folderCommitService };
|
||||
};
|
||||
|
@@ -16,7 +16,8 @@ export const DynamicSecretLeasesSchema = z.object({
|
||||
statusDetails: z.string().nullable().optional(),
|
||||
dynamicSecretId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
config: z.unknown().nullable().optional()
|
||||
});
|
||||
|
||||
export type TDynamicSecretLeases = z.infer<typeof DynamicSecretLeasesSchema>;
|
||||
|
23
backend/src/db/schemas/folder-checkpoint-resources.ts
Normal file
23
backend/src/db/schemas/folder-checkpoint-resources.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderCheckpointResourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderCheckpointId: z.string().uuid(),
|
||||
secretVersionId: z.string().uuid().nullable().optional(),
|
||||
folderVersionId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderCheckpointResources = z.infer<typeof FolderCheckpointResourcesSchema>;
|
||||
export type TFolderCheckpointResourcesInsert = Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>;
|
||||
export type TFolderCheckpointResourcesUpdate = Partial<
|
||||
Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
19
backend/src/db/schemas/folder-checkpoints.ts
Normal file
19
backend/src/db/schemas/folder-checkpoints.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderCheckpointsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderCommitId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderCheckpoints = z.infer<typeof FolderCheckpointsSchema>;
|
||||
export type TFolderCheckpointsInsert = Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>;
|
||||
export type TFolderCheckpointsUpdate = Partial<Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>>;
|
23
backend/src/db/schemas/folder-commit-changes.ts
Normal file
23
backend/src/db/schemas/folder-commit-changes.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderCommitChangesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderCommitId: z.string().uuid(),
|
||||
changeType: z.string(),
|
||||
isUpdate: z.boolean().default(false),
|
||||
secretVersionId: z.string().uuid().nullable().optional(),
|
||||
folderVersionId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderCommitChanges = z.infer<typeof FolderCommitChangesSchema>;
|
||||
export type TFolderCommitChangesInsert = Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>;
|
||||
export type TFolderCommitChangesUpdate = Partial<Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>>;
|
24
backend/src/db/schemas/folder-commits.ts
Normal file
24
backend/src/db/schemas/folder-commits.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderCommitsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
commitId: z.coerce.bigint(),
|
||||
actorMetadata: z.unknown(),
|
||||
actorType: z.string(),
|
||||
message: z.string().nullable().optional(),
|
||||
folderId: z.string().uuid(),
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderCommits = z.infer<typeof FolderCommitsSchema>;
|
||||
export type TFolderCommitsInsert = Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>;
|
||||
export type TFolderCommitsUpdate = Partial<Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>>;
|
26
backend/src/db/schemas/folder-tree-checkpoint-resources.ts
Normal file
26
backend/src/db/schemas/folder-tree-checkpoint-resources.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderTreeCheckpointResourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderTreeCheckpointId: z.string().uuid(),
|
||||
folderId: z.string().uuid(),
|
||||
folderCommitId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderTreeCheckpointResources = z.infer<typeof FolderTreeCheckpointResourcesSchema>;
|
||||
export type TFolderTreeCheckpointResourcesInsert = Omit<
|
||||
z.input<typeof FolderTreeCheckpointResourcesSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TFolderTreeCheckpointResourcesUpdate = Partial<
|
||||
Omit<z.input<typeof FolderTreeCheckpointResourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
19
backend/src/db/schemas/folder-tree-checkpoints.ts
Normal file
19
backend/src/db/schemas/folder-tree-checkpoints.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderTreeCheckpointsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderCommitId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderTreeCheckpoints = z.infer<typeof FolderTreeCheckpointsSchema>;
|
||||
export type TFolderTreeCheckpointsInsert = Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>;
|
||||
export type TFolderTreeCheckpointsUpdate = Partial<Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>>;
|
@@ -18,7 +18,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
kubernetesHost: z.string(),
|
||||
kubernetesHost: z.string().nullable().optional(),
|
||||
encryptedCaCert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
|
@@ -24,6 +24,12 @@ export * from "./dynamic-secrets";
|
||||
export * from "./external-certificate-authorities";
|
||||
export * from "./external-group-org-role-mappings";
|
||||
export * from "./external-kms";
|
||||
export * from "./folder-checkpoint-resources";
|
||||
export * from "./folder-checkpoints";
|
||||
export * from "./folder-commit-changes";
|
||||
export * from "./folder-commits";
|
||||
export * from "./folder-tree-checkpoint-resources";
|
||||
export * from "./folder-tree-checkpoints";
|
||||
export * from "./gateways";
|
||||
export * from "./git-app-install-sessions";
|
||||
export * from "./git-app-org";
|
||||
|
@@ -160,6 +160,12 @@ export enum TableName {
|
||||
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
|
||||
SecretReminderRecipients = "secret_reminder_recipients",
|
||||
GithubOrgSyncConfig = "github_org_sync_configs",
|
||||
FolderCommit = "folder_commits",
|
||||
FolderCommitChanges = "folder_commit_changes",
|
||||
FolderCheckpoint = "folder_checkpoints",
|
||||
FolderCheckpointResources = "folder_checkpoint_resources",
|
||||
FolderTreeCheckpoint = "folder_tree_checkpoints",
|
||||
FolderTreeCheckpointResources = "folder_tree_checkpoint_resources",
|
||||
SecretScanningDataSource = "secret_scanning_data_sources",
|
||||
SecretScanningResource = "secret_scanning_resources",
|
||||
SecretScanningScan = "secret_scanning_scans",
|
||||
@@ -167,7 +173,7 @@ export enum TableName {
|
||||
SecretScanningConfig = "secret_scanning_configs"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt" | "commitId";
|
||||
|
||||
export const UserDeviceSchema = z
|
||||
.object({
|
||||
|
@@ -28,7 +28,8 @@ export const ProjectsSchema = z.object({
|
||||
type: z.string(),
|
||||
enforceCapitalization: z.boolean().default(false),
|
||||
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
|
||||
secretSharing: z.boolean().default(true)
|
||||
secretSharing: z.boolean().default(true),
|
||||
showSnapshotsLegacy: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
@@ -14,7 +14,8 @@ export const SecretFolderVersionsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
folderId: z.string().uuid()
|
||||
folderId: z.string().uuid(),
|
||||
description: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretFolderVersions = z.infer<typeof SecretFolderVersionsSchema>;
|
||||
|
@@ -36,7 +36,8 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
|
||||
}),
|
||||
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
|
||||
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.path)
|
||||
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.environmentSlug),
|
||||
config: z.any().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@@ -0,0 +1,67 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
import { ApiDocsTags, DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerKubernetesDynamicSecretLeaseRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.DynamicSecrets],
|
||||
body: z.object({
|
||||
dynamicSecretName: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.dynamicSecretName).toLowerCase(),
|
||||
projectSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.projectSlug),
|
||||
ttl: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(DYNAMIC_SECRET_LEASES.CREATE.ttl)
|
||||
.superRefine((val, ctx) => {
|
||||
if (!val) return;
|
||||
const valMs = ms(val);
|
||||
if (valMs < 60 * 1000)
|
||||
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be greater than 1min" });
|
||||
if (valMs > daysToMillisecond(1))
|
||||
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
|
||||
}),
|
||||
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
|
||||
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.environmentSlug),
|
||||
config: z
|
||||
.object({
|
||||
namespace: z.string().min(1).optional().describe(DYNAMIC_SECRET_LEASES.KUBERNETES.CREATE.config.namespace)
|
||||
})
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
lease: DynamicSecretLeasesSchema,
|
||||
dynamicSecret: SanitizedDynamicSecretSchema,
|
||||
data: z.unknown()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { data, lease, dynamicSecret } = await server.services.dynamicSecretLease.create({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.body.dynamicSecretName,
|
||||
...req.body
|
||||
});
|
||||
return { lease, data, dynamicSecret };
|
||||
}
|
||||
});
|
||||
};
|
@@ -23,7 +23,10 @@ const validateUsernameTemplateCharacters = characterValidator([
|
||||
CharacterType.CloseBrace,
|
||||
CharacterType.CloseBracket,
|
||||
CharacterType.OpenBracket,
|
||||
CharacterType.Fullstop
|
||||
CharacterType.Fullstop,
|
||||
CharacterType.SingleQuote,
|
||||
CharacterType.Spaces,
|
||||
CharacterType.Pipe
|
||||
]);
|
||||
|
||||
const userTemplateSchema = z
|
||||
@@ -33,7 +36,7 @@ const userTemplateSchema = z
|
||||
.refine((el) => validateUsernameTemplateCharacters(el))
|
||||
.refine((el) =>
|
||||
isValidHandleBarTemplate(el, {
|
||||
allowedExpressions: (val) => ["randomUsername", "unixTimestamp"].includes(val)
|
||||
allowedExpressions: (val) => ["randomUsername", "unixTimestamp", "identity.name"].includes(val)
|
||||
})
|
||||
);
|
||||
|
||||
|
@@ -6,6 +6,7 @@ import { registerAssumePrivilegeRouter } from "./assume-privilege-router";
|
||||
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
|
||||
import { registerCaCrlRouter } from "./certificate-authority-crl-router";
|
||||
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
|
||||
import { registerKubernetesDynamicSecretLeaseRouter } from "./dynamic-secret-lease-routers/kubernetes-lease-router";
|
||||
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
|
||||
import { registerExternalKmsRouter } from "./external-kms-router";
|
||||
import { registerGatewayRouter } from "./gateway-router";
|
||||
@@ -18,6 +19,7 @@ import { registerLdapRouter } from "./ldap-router";
|
||||
import { registerLicenseRouter } from "./license-router";
|
||||
import { registerOidcRouter } from "./oidc-router";
|
||||
import { registerOrgRoleRouter } from "./org-role-router";
|
||||
import { registerPITRouter } from "./pit-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
import { registerProjectRouter } from "./project-router";
|
||||
import { registerRateLimitRouter } from "./rate-limit-router";
|
||||
@@ -53,6 +55,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
{ prefix: "/workspace" }
|
||||
);
|
||||
await server.register(registerSnapshotRouter, { prefix: "/secret-snapshot" });
|
||||
await server.register(registerPITRouter, { prefix: "/pit" });
|
||||
await server.register(registerSecretApprovalPolicyRouter, { prefix: "/secret-approvals" });
|
||||
await server.register(registerSecretApprovalRequestRouter, {
|
||||
prefix: "/secret-approval-requests"
|
||||
@@ -69,6 +72,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
async (dynamicSecretRouter) => {
|
||||
await dynamicSecretRouter.register(registerDynamicSecretRouter);
|
||||
await dynamicSecretRouter.register(registerDynamicSecretLeaseRouter, { prefix: "/leases" });
|
||||
await dynamicSecretRouter.register(registerKubernetesDynamicSecretLeaseRouter, { prefix: "/leases/kubernetes" });
|
||||
},
|
||||
{ prefix: "/dynamic-secrets" }
|
||||
);
|
||||
|
416
backend/src/ee/routes/v1/pit-router.ts
Normal file
416
backend/src/ee/routes/v1/pit-router.ts
Normal file
@@ -0,0 +1,416 @@
|
||||
/* eslint-disable @typescript-eslint/no-base-to-string */
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { booleanSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { commitChangesResponseSchema, resourceChangeSchema } from "@app/services/folder-commit/folder-commit-schemas";
|
||||
|
||||
const commitHistoryItemSchema = z.object({
|
||||
id: z.string(),
|
||||
folderId: z.string(),
|
||||
actorType: z.string(),
|
||||
actorMetadata: z.unknown().optional(),
|
||||
message: z.string().optional().nullable(),
|
||||
commitId: z.string(),
|
||||
createdAt: z.string().or(z.date()),
|
||||
envId: z.string()
|
||||
});
|
||||
|
||||
const folderStateSchema = z.array(
|
||||
z.object({
|
||||
type: z.string(),
|
||||
id: z.string(),
|
||||
versionId: z.string(),
|
||||
secretKey: z.string().optional(),
|
||||
secretVersion: z.number().optional(),
|
||||
folderName: z.string().optional(),
|
||||
folderVersion: z.number().optional()
|
||||
})
|
||||
);
|
||||
|
||||
export const registerPITRouter = async (server: FastifyZodProvider) => {
|
||||
// Get commits count for a folder
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/commits/count",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
environment: z.string().trim(),
|
||||
path: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
count: z.number(),
|
||||
folderId: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.getCommitsCount({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.query.projectId,
|
||||
environment: req.query.environment,
|
||||
path: req.query.path
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT,
|
||||
metadata: {
|
||||
environment: req.query.environment,
|
||||
path: req.query.path,
|
||||
commitCount: result.count.toString()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
// Get all commits for a folder
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/commits",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
environment: z.string().trim(),
|
||||
path: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
projectId: z.string().trim(),
|
||||
offset: z.coerce.number().min(0).default(0),
|
||||
limit: z.coerce.number().min(1).max(100).default(20),
|
||||
search: z.string().trim().optional(),
|
||||
sort: z.enum(["asc", "desc"]).default("desc")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
commits: commitHistoryItemSchema.array(),
|
||||
total: z.number(),
|
||||
hasMore: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.getCommitsForFolder({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.query.projectId,
|
||||
environment: req.query.environment,
|
||||
path: req.query.path,
|
||||
offset: req.query.offset,
|
||||
limit: req.query.limit,
|
||||
search: req.query.search,
|
||||
sort: req.query.sort
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_PROJECT_PIT_COMMITS,
|
||||
metadata: {
|
||||
environment: req.query.environment,
|
||||
path: req.query.path,
|
||||
commitCount: result.commits.length.toString(),
|
||||
offset: req.query.offset.toString(),
|
||||
limit: req.query.limit.toString(),
|
||||
search: req.query.search,
|
||||
sort: req.query.sort
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
// Get commit changes for a specific commit
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/commits/:commitId/changes",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
commitId: z.string().trim()
|
||||
}),
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: commitChangesResponseSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.getCommitChanges({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.query.projectId,
|
||||
commitId: req.params.commitId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES,
|
||||
metadata: {
|
||||
commitId: req.params.commitId,
|
||||
changesCount: (result.changes.changes?.length || 0).toString()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
// Retrieve rollback changes for a commit
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/commits/:commitId/compare",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
commitId: z.string().trim()
|
||||
}),
|
||||
querystring: z.object({
|
||||
folderId: z.string().trim(),
|
||||
environment: z.string().trim(),
|
||||
deepRollback: booleanSchema.default(false),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.array(
|
||||
z.object({
|
||||
folderId: z.string(),
|
||||
folderName: z.string(),
|
||||
folderPath: z.string().optional(),
|
||||
changes: z.array(resourceChangeSchema)
|
||||
})
|
||||
)
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.compareCommitChanges({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.query.projectId,
|
||||
commitId: req.params.commitId,
|
||||
folderId: req.query.folderId,
|
||||
environment: req.query.environment,
|
||||
deepRollback: req.query.deepRollback,
|
||||
secretPath: req.query.secretPath
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.PIT_COMPARE_FOLDER_STATES,
|
||||
metadata: {
|
||||
targetCommitId: req.params.commitId,
|
||||
folderId: req.query.folderId,
|
||||
deepRollback: req.query.deepRollback,
|
||||
diffsCount: result.length.toString(),
|
||||
environment: req.query.environment,
|
||||
folderPath: req.query.secretPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
// Rollback to a previous commit
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/commits/:commitId/rollback",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
commitId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
folderId: z.string().trim(),
|
||||
deepRollback: z.boolean().default(false),
|
||||
message: z.string().max(256).trim().optional(),
|
||||
environment: z.string().trim(),
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
success: z.boolean(),
|
||||
secretChangesCount: z.number().optional(),
|
||||
folderChangesCount: z.number().optional(),
|
||||
totalChanges: z.number().optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.rollbackToCommit({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.body.projectId,
|
||||
commitId: req.params.commitId,
|
||||
folderId: req.body.folderId,
|
||||
deepRollback: req.body.deepRollback,
|
||||
message: req.body.message,
|
||||
environment: req.body.environment
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.body.projectId,
|
||||
event: {
|
||||
type: EventType.PIT_ROLLBACK_COMMIT,
|
||||
metadata: {
|
||||
targetCommitId: req.params.commitId,
|
||||
environment: req.body.environment,
|
||||
folderId: req.body.folderId,
|
||||
deepRollback: req.body.deepRollback,
|
||||
message: req.body.message || "Rollback to previous commit",
|
||||
totalChanges: result.totalChanges?.toString() || "0"
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
// Revert commit
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/commits/:commitId/revert",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
commitId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
success: z.boolean(),
|
||||
message: z.string(),
|
||||
originalCommitId: z.string(),
|
||||
revertCommitId: z.string().optional(),
|
||||
changesReverted: z.number().optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.revertCommit({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.body.projectId,
|
||||
commitId: req.params.commitId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.body.projectId,
|
||||
event: {
|
||||
type: EventType.PIT_REVERT_COMMIT,
|
||||
metadata: {
|
||||
commitId: req.params.commitId,
|
||||
revertCommitId: result.revertCommitId,
|
||||
changesReverted: result.changesReverted?.toString()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
// Folder state at commit
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/commits/:commitId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
commitId: z.string().trim()
|
||||
}),
|
||||
querystring: z.object({
|
||||
folderId: z.string().trim(),
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: folderStateSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.getFolderStateAtCommit({
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
projectId: req.query.projectId,
|
||||
commitId: req.params.commitId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.PIT_GET_FOLDER_STATE,
|
||||
metadata: {
|
||||
commitId: req.params.commitId,
|
||||
folderId: req.query.folderId,
|
||||
resourceCount: result.length.toString()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
};
|
@@ -65,9 +65,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
hide: true,
|
||||
deprecated: true,
|
||||
tags: [ApiDocsTags.Projects],
|
||||
description: "Roll back project secrets to those captured in a secret snapshot version.",
|
||||
description: "(Deprecated) Roll back project secrets to those captured in a secret snapshot version.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
@@ -84,6 +85,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
throw new Error(
|
||||
"This endpoint is deprecated. Please use the new PIT recovery system. More information is available at: https://infisical.com/docs/documentation/platform/pit-recovery."
|
||||
);
|
||||
|
||||
const secretSnapshot = await server.services.snapshot.rollbackSnapshot({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
|
@@ -44,6 +44,7 @@ import {
|
||||
TSecretSyncRaw,
|
||||
TUpdateSecretSyncDTO
|
||||
} from "@app/services/secret-sync/secret-sync-types";
|
||||
import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
|
||||
import { WorkflowIntegration } from "@app/services/workflow-integration/workflow-integration-types";
|
||||
|
||||
import { KmipPermission } from "../kmip/kmip-enum";
|
||||
@@ -206,6 +207,7 @@ export enum EventType {
|
||||
CREATE_WEBHOOK = "create-webhook",
|
||||
UPDATE_WEBHOOK_STATUS = "update-webhook-status",
|
||||
DELETE_WEBHOOK = "delete-webhook",
|
||||
WEBHOOK_TRIGGERED = "webhook-triggered",
|
||||
GET_SECRET_IMPORTS = "get-secret-imports",
|
||||
GET_SECRET_IMPORT = "get-secret-import",
|
||||
CREATE_SECRET_IMPORT = "create-secret-import",
|
||||
@@ -393,6 +395,13 @@ export enum EventType {
|
||||
PROJECT_ASSUME_PRIVILEGE_SESSION_START = "project-assume-privileges-session-start",
|
||||
PROJECT_ASSUME_PRIVILEGE_SESSION_END = "project-assume-privileges-session-end",
|
||||
|
||||
GET_PROJECT_PIT_COMMITS = "get-project-pit-commits",
|
||||
GET_PROJECT_PIT_COMMIT_CHANGES = "get-project-pit-commit-changes",
|
||||
GET_PROJECT_PIT_COMMIT_COUNT = "get-project-pit-commit-count",
|
||||
PIT_ROLLBACK_COMMIT = "pit-rollback-commit",
|
||||
PIT_REVERT_COMMIT = "pit-revert-commit",
|
||||
PIT_GET_FOLDER_STATE = "pit-get-folder-state",
|
||||
PIT_COMPARE_FOLDER_STATES = "pit-compare-folder-states",
|
||||
SECRET_SCANNING_DATA_SOURCE_LIST = "secret-scanning-data-source-list",
|
||||
SECRET_SCANNING_DATA_SOURCE_CREATE = "secret-scanning-data-source-create",
|
||||
SECRET_SCANNING_DATA_SOURCE_UPDATE = "secret-scanning-data-source-update",
|
||||
@@ -1440,6 +1449,14 @@ interface DeleteWebhookEvent {
|
||||
};
|
||||
}
|
||||
|
||||
export interface WebhookTriggeredEvent {
|
||||
type: EventType.WEBHOOK_TRIGGERED;
|
||||
metadata: {
|
||||
webhookId: string;
|
||||
status: string;
|
||||
} & TWebhookPayloads;
|
||||
}
|
||||
|
||||
interface GetSecretImportsEvent {
|
||||
type: EventType.GET_SECRET_IMPORTS;
|
||||
metadata: {
|
||||
@@ -2979,6 +2996,78 @@ interface MicrosoftTeamsWorkflowIntegrationUpdateEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetProjectPitCommitsEvent {
|
||||
type: EventType.GET_PROJECT_PIT_COMMITS;
|
||||
metadata: {
|
||||
commitCount: string;
|
||||
environment: string;
|
||||
path: string;
|
||||
offset: string;
|
||||
limit: string;
|
||||
search?: string;
|
||||
sort: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetProjectPitCommitChangesEvent {
|
||||
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES;
|
||||
metadata: {
|
||||
changesCount: string;
|
||||
commitId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetProjectPitCommitCountEvent {
|
||||
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT;
|
||||
metadata: {
|
||||
environment: string;
|
||||
path: string;
|
||||
commitCount: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface PitRollbackCommitEvent {
|
||||
type: EventType.PIT_ROLLBACK_COMMIT;
|
||||
metadata: {
|
||||
targetCommitId: string;
|
||||
folderId: string;
|
||||
deepRollback: boolean;
|
||||
message: string;
|
||||
totalChanges: string;
|
||||
environment: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface PitRevertCommitEvent {
|
||||
type: EventType.PIT_REVERT_COMMIT;
|
||||
metadata: {
|
||||
commitId: string;
|
||||
revertCommitId?: string;
|
||||
changesReverted?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface PitGetFolderStateEvent {
|
||||
type: EventType.PIT_GET_FOLDER_STATE;
|
||||
metadata: {
|
||||
commitId: string;
|
||||
folderId: string;
|
||||
resourceCount: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface PitCompareFolderStatesEvent {
|
||||
type: EventType.PIT_COMPARE_FOLDER_STATES;
|
||||
metadata: {
|
||||
targetCommitId: string;
|
||||
folderId: string;
|
||||
deepRollback: boolean;
|
||||
diffsCount: string;
|
||||
environment: string;
|
||||
folderPath: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceListEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST;
|
||||
metadata: {
|
||||
@@ -3221,6 +3310,7 @@ export type Event =
|
||||
| CreateWebhookEvent
|
||||
| UpdateWebhookStatusEvent
|
||||
| DeleteWebhookEvent
|
||||
| WebhookTriggeredEvent
|
||||
| GetSecretImportsEvent
|
||||
| GetSecretImportEvent
|
||||
| CreateSecretImportEvent
|
||||
@@ -3397,6 +3487,13 @@ export type Event =
|
||||
| MicrosoftTeamsWorkflowIntegrationGetEvent
|
||||
| MicrosoftTeamsWorkflowIntegrationListEvent
|
||||
| MicrosoftTeamsWorkflowIntegrationUpdateEvent
|
||||
| GetProjectPitCommitsEvent
|
||||
| GetProjectPitCommitChangesEvent
|
||||
| PitRollbackCommitEvent
|
||||
| GetProjectPitCommitCountEvent
|
||||
| PitRevertCommitEvent
|
||||
| PitCompareFolderStatesEvent
|
||||
| PitGetFolderStateEvent
|
||||
| SecretScanningDataSourceListEvent
|
||||
| SecretScanningDataSourceGetEvent
|
||||
| SecretScanningDataSourceCreateEvent
|
||||
|
@@ -10,6 +10,7 @@ import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
|
||||
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
|
||||
import { DynamicSecretProviders, TDynamicProviderFns } from "../dynamic-secret/providers/models";
|
||||
import { TDynamicSecretLeaseDALFactory } from "./dynamic-secret-lease-dal";
|
||||
import { TDynamicSecretLeaseConfig } from "./dynamic-secret-lease-types";
|
||||
|
||||
type TDynamicSecretLeaseQueueServiceFactoryDep = {
|
||||
queueService: TQueueServiceFactory;
|
||||
@@ -99,7 +100,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
|
||||
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
|
||||
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId, {
|
||||
projectId: folder.projectId
|
||||
});
|
||||
await dynamicSecretLeaseDAL.deleteById(dynamicSecretLease.id);
|
||||
return;
|
||||
}
|
||||
@@ -132,8 +135,15 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
|
||||
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
|
||||
await Promise.all(
|
||||
dynamicSecretLeases.map(({ externalEntityId }) =>
|
||||
selectedProvider.revoke(decryptedStoredInput, externalEntityId)
|
||||
dynamicSecretLeases.map(({ externalEntityId, config }) =>
|
||||
selectedProvider.revoke(
|
||||
decryptedStoredInput,
|
||||
externalEntityId,
|
||||
{
|
||||
projectId: folder.projectId
|
||||
},
|
||||
config as TDynamicSecretLeaseConfig
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
@@ -11,10 +12,13 @@ import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityDALFactory } from "@app/services/identity/identity-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
|
||||
import { DynamicSecretProviders, TDynamicProviderFns } from "../dynamic-secret/providers/models";
|
||||
@@ -25,6 +29,7 @@ import {
|
||||
TCreateDynamicSecretLeaseDTO,
|
||||
TDeleteDynamicSecretLeaseDTO,
|
||||
TDetailsDynamicSecretLeaseDTO,
|
||||
TDynamicSecretLeaseConfig,
|
||||
TListDynamicSecretLeasesDTO,
|
||||
TRenewDynamicSecretLeaseDTO
|
||||
} from "./dynamic-secret-lease-types";
|
||||
@@ -39,6 +44,8 @@ type TDynamicSecretLeaseServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
userDAL: Pick<TUserDALFactory, "findById">;
|
||||
identityDAL: TIdentityDALFactory;
|
||||
};
|
||||
|
||||
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
|
||||
@@ -52,8 +59,16 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
dynamicSecretQueueService,
|
||||
projectDAL,
|
||||
licenseService,
|
||||
kmsService
|
||||
kmsService,
|
||||
userDAL,
|
||||
identityDAL
|
||||
}: TDynamicSecretLeaseServiceFactoryDep) => {
|
||||
const extractEmailUsername = (email: string) => {
|
||||
const regex = new RE2(/^([^@]+)/);
|
||||
const match = email.match(regex);
|
||||
return match ? match[1] : email;
|
||||
};
|
||||
|
||||
const create = async ({
|
||||
environmentSlug,
|
||||
path,
|
||||
@@ -63,7 +78,8 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
ttl
|
||||
ttl,
|
||||
config
|
||||
}: TCreateDynamicSecretLeaseDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
@@ -132,10 +148,25 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
|
||||
let result;
|
||||
try {
|
||||
const identity: { name: string } = { name: "" };
|
||||
if (actor === ActorType.USER) {
|
||||
const user = await userDAL.findById(actorId);
|
||||
if (user) {
|
||||
identity.name = extractEmailUsername(user.username);
|
||||
}
|
||||
} else if (actor === ActorType.Machine) {
|
||||
const machineIdentity = await identityDAL.findById(actorId);
|
||||
if (machineIdentity) {
|
||||
identity.name = machineIdentity.name;
|
||||
}
|
||||
}
|
||||
result = await selectedProvider.create({
|
||||
inputs: decryptedStoredInput,
|
||||
expireAt: expireAt.getTime(),
|
||||
usernameTemplate: dynamicSecretCfg.usernameTemplate
|
||||
usernameTemplate: dynamicSecretCfg.usernameTemplate,
|
||||
identity,
|
||||
metadata: { projectId },
|
||||
config
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error && typeof error === "object" && error !== null && "sqlMessage" in error) {
|
||||
@@ -149,8 +180,10 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
expireAt,
|
||||
version: 1,
|
||||
dynamicSecretId: dynamicSecretCfg.id,
|
||||
externalEntityId: entityId
|
||||
externalEntityId: entityId,
|
||||
config
|
||||
});
|
||||
|
||||
await dynamicSecretQueueService.setLeaseRevocation(dynamicSecretLease.id, Number(expireAt) - Number(new Date()));
|
||||
return { lease: dynamicSecretLease, dynamicSecret: dynamicSecretCfg, data };
|
||||
};
|
||||
@@ -237,7 +270,8 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
const { entityId } = await selectedProvider.renew(
|
||||
decryptedStoredInput,
|
||||
dynamicSecretLease.externalEntityId,
|
||||
expireAt.getTime()
|
||||
expireAt.getTime(),
|
||||
{ projectId }
|
||||
);
|
||||
|
||||
await dynamicSecretQueueService.unsetLeaseRevocation(dynamicSecretLease.id);
|
||||
@@ -313,7 +347,12 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
) as object;
|
||||
|
||||
const revokeResponse = await selectedProvider
|
||||
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId)
|
||||
.revoke(
|
||||
decryptedStoredInput,
|
||||
dynamicSecretLease.externalEntityId,
|
||||
{ projectId },
|
||||
dynamicSecretLease.config as TDynamicSecretLeaseConfig
|
||||
)
|
||||
.catch(async (err) => {
|
||||
// only propogate this error if forced is false
|
||||
if (!isForced) return { error: err as Error };
|
||||
|
@@ -10,6 +10,7 @@ export type TCreateDynamicSecretLeaseDTO = {
|
||||
environmentSlug: string;
|
||||
ttl?: string;
|
||||
projectSlug: string;
|
||||
config?: TDynamicSecretLeaseConfig;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDetailsDynamicSecretLeaseDTO = {
|
||||
@@ -41,3 +42,9 @@ export type TRenewDynamicSecretLeaseDTO = {
|
||||
ttl?: string;
|
||||
projectSlug: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDynamicSecretKubernetesLeaseConfig = {
|
||||
namespace?: string;
|
||||
};
|
||||
|
||||
export type TDynamicSecretLeaseConfig = TDynamicSecretKubernetesLeaseConfig;
|
||||
|
@@ -116,7 +116,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
|
||||
|
||||
const selectedProvider = dynamicSecretProviders[provider.type];
|
||||
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
|
||||
const inputs = await selectedProvider.validateProviderInputs(provider.inputs, { projectId });
|
||||
|
||||
let selectedGatewayId: string | null = null;
|
||||
if (inputs && typeof inputs === "object" && "gatewayId" in inputs && inputs.gatewayId) {
|
||||
@@ -146,7 +146,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
selectedGatewayId = gateway.id;
|
||||
}
|
||||
|
||||
const isConnected = await selectedProvider.validateConnection(provider.inputs);
|
||||
const isConnected = await selectedProvider.validateConnection(provider.inputs, { projectId });
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
@@ -272,7 +272,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
|
||||
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
|
||||
const updatedInput = await selectedProvider.validateProviderInputs(newInput, { projectId });
|
||||
|
||||
let selectedGatewayId: string | null = null;
|
||||
if (updatedInput && typeof updatedInput === "object" && "gatewayId" in updatedInput && updatedInput?.gatewayId) {
|
||||
@@ -301,7 +301,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
selectedGatewayId = gateway.id;
|
||||
}
|
||||
|
||||
const isConnected = await selectedProvider.validateConnection(newInput);
|
||||
const isConnected = await selectedProvider.validateConnection(newInput, { projectId });
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
|
||||
const updatedDynamicCfg = await dynamicSecretDAL.transaction(async (tx) => {
|
||||
@@ -472,7 +472,9 @@ export const dynamicSecretServiceFactory = ({
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
|
||||
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput, {
|
||||
projectId
|
||||
})) as object;
|
||||
|
||||
return { ...dynamicSecretCfg, inputs: providerInputs };
|
||||
};
|
||||
|
@@ -16,6 +16,7 @@ import { BadRequestError } from "@app/lib/errors";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
const CreateElastiCacheUserSchema = z.object({
|
||||
UserId: z.string().trim().min(1),
|
||||
@@ -132,14 +133,14 @@ const generatePassword = () => {
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
const charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-";
|
||||
const randomUsername = `inf-${customAlphabet(charset, 32)()}`;
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity
|
||||
});
|
||||
};
|
||||
|
||||
@@ -174,14 +175,21 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
|
||||
const { inputs, expireAt, usernameTemplate } = data;
|
||||
const create = async (data: {
|
||||
inputs: unknown;
|
||||
expireAt: number;
|
||||
usernameTemplate?: string | null;
|
||||
identity?: {
|
||||
name: string;
|
||||
};
|
||||
}) => {
|
||||
const { inputs, expireAt, usernameTemplate, identity } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!(await validateConnection(providerInputs))) {
|
||||
throw new BadRequestError({ message: "Failed to establish connection" });
|
||||
}
|
||||
|
||||
const leaseUsername = generateUsername(usernameTemplate);
|
||||
const leaseUsername = generateUsername(usernameTemplate, identity);
|
||||
const leasePassword = generatePassword();
|
||||
const leaseExpiration = new Date(expireAt).toISOString();
|
||||
|
||||
|
@@ -16,21 +16,25 @@ import {
|
||||
PutUserPolicyCommand,
|
||||
RemoveUserFromGroupCommand
|
||||
} from "@aws-sdk/client-iam";
|
||||
import handlebars from "handlebars";
|
||||
import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts";
|
||||
import { randomUUID } from "crypto";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
|
||||
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
const randomUsername = alphaNumericNanoId(32);
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity
|
||||
});
|
||||
};
|
||||
|
||||
@@ -40,7 +44,43 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>, projectId: string) => {
|
||||
const appCfg = getConfig();
|
||||
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
|
||||
const stsClient = new STSClient({
|
||||
region: providerInputs.region,
|
||||
credentials:
|
||||
appCfg.DYNAMIC_SECRET_AWS_ACCESS_KEY_ID && appCfg.DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY
|
||||
? {
|
||||
accessKeyId: appCfg.DYNAMIC_SECRET_AWS_ACCESS_KEY_ID,
|
||||
secretAccessKey: appCfg.DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY
|
||||
}
|
||||
: undefined // if hosting on AWS
|
||||
});
|
||||
|
||||
const command = new AssumeRoleCommand({
|
||||
RoleArn: providerInputs.roleArn,
|
||||
RoleSessionName: `infisical-dynamic-secret-${randomUUID()}`,
|
||||
DurationSeconds: 900, // 15 mins
|
||||
ExternalId: projectId
|
||||
});
|
||||
|
||||
const assumeRes = await stsClient.send(command);
|
||||
|
||||
if (!assumeRes.Credentials?.AccessKeyId || !assumeRes.Credentials?.SecretAccessKey) {
|
||||
throw new BadRequestError({ message: "Failed to assume role - verify credentials and role configuration" });
|
||||
}
|
||||
const client = new IAMClient({
|
||||
region: providerInputs.region,
|
||||
credentials: {
|
||||
accessKeyId: assumeRes.Credentials?.AccessKeyId,
|
||||
secretAccessKey: assumeRes.Credentials?.SecretAccessKey,
|
||||
sessionToken: assumeRes.Credentials?.SessionToken
|
||||
}
|
||||
});
|
||||
return client;
|
||||
}
|
||||
|
||||
const client = new IAMClient({
|
||||
region: providerInputs.region,
|
||||
credentials: {
|
||||
@@ -52,21 +92,41 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const validateConnection = async (inputs: unknown, { projectId }: { projectId: string }) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
|
||||
const client = await $getClient(providerInputs, projectId);
|
||||
const isConnected = await client
|
||||
.send(new GetUserCommand({}))
|
||||
.then(() => true)
|
||||
.catch((err) => {
|
||||
const message = (err as Error)?.message;
|
||||
if (
|
||||
providerInputs.method === AwsIamAuthType.AssumeRole &&
|
||||
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
|
||||
message.includes("Must specify userName when calling with non-User credentials")
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
|
||||
const { inputs, usernameTemplate } = data;
|
||||
const create = async (data: {
|
||||
inputs: unknown;
|
||||
expireAt: number;
|
||||
usernameTemplate?: string | null;
|
||||
identity?: {
|
||||
name: string;
|
||||
};
|
||||
metadata: { projectId: string };
|
||||
}) => {
|
||||
const { inputs, usernameTemplate, metadata, identity } = data;
|
||||
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs, metadata.projectId);
|
||||
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
|
||||
const createUserRes = await client.send(
|
||||
new CreateUserCommand({
|
||||
@@ -76,6 +136,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
UserName: username
|
||||
})
|
||||
);
|
||||
|
||||
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
|
||||
if (userGroups) {
|
||||
await Promise.all(
|
||||
@@ -125,9 +186,9 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const revoke = async (inputs: unknown, entityId: string, metadata: { projectId: string }) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs, metadata.projectId);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
|
@@ -8,19 +8,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity
|
||||
});
|
||||
};
|
||||
|
||||
@@ -75,12 +76,17 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
|
||||
const { inputs, expireAt, usernameTemplate } = data;
|
||||
const create = async (data: {
|
||||
inputs: unknown;
|
||||
expireAt: number;
|
||||
usernameTemplate?: string | null;
|
||||
identity?: { name: string };
|
||||
}) => {
|
||||
const { inputs, expireAt, usernameTemplate, identity } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
const { keyspace } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
@@ -1,5 +1,4 @@
|
||||
import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
|
||||
import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
@@ -7,19 +6,20 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity
|
||||
});
|
||||
};
|
||||
|
||||
@@ -71,12 +71,12 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
return infoResponse;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
|
||||
const { inputs, usernameTemplate } = data;
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
const { inputs, usernameTemplate, identity } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
|
||||
await connection.security.putUser({
|
||||
|
@@ -1,24 +1,46 @@
|
||||
import axios from "axios";
|
||||
import axios, { AxiosError } from "axios";
|
||||
import handlebars from "handlebars";
|
||||
import https from "https";
|
||||
|
||||
import { InternalServerError } from "@app/lib/errors";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { TKubernetesTokenRequest } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-types";
|
||||
|
||||
import { TDynamicSecretKubernetesLeaseConfig } from "../../dynamic-secret-lease/dynamic-secret-lease-types";
|
||||
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
|
||||
import { DynamicSecretKubernetesSchema, TDynamicProviderFns } from "./models";
|
||||
import {
|
||||
DynamicSecretKubernetesSchema,
|
||||
KubernetesAuthMethod,
|
||||
KubernetesCredentialType,
|
||||
KubernetesRoleType,
|
||||
TDynamicProviderFns
|
||||
} from "./models";
|
||||
|
||||
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
||||
|
||||
// This value is just a placeholder. When using gateway auth method, the url is irrelevant.
|
||||
const GATEWAY_AUTH_DEFAULT_URL = "https://kubernetes.default.svc.cluster.local";
|
||||
|
||||
type TKubernetesProviderDTO = {
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const randomUsername = `dynamic-secret-sa-${alphaNumericNanoId(10).toLowerCase()}`;
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
});
|
||||
};
|
||||
|
||||
export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretKubernetesSchema.parseAsync(inputs);
|
||||
if (!providerInputs.gatewayId) {
|
||||
if (!providerInputs.gatewayId && providerInputs.url) {
|
||||
await blockLocalAndPrivateIpAddresses(providerInputs.url);
|
||||
}
|
||||
|
||||
@@ -30,20 +52,27 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
gatewayId: string;
|
||||
targetHost: string;
|
||||
targetPort: number;
|
||||
caCert?: string;
|
||||
reviewTokenThroughGateway: boolean;
|
||||
enableSsl: boolean;
|
||||
},
|
||||
gatewayCallback: (host: string, port: number) => Promise<T>
|
||||
gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent) => Promise<T>
|
||||
): Promise<T> => {
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
|
||||
const callbackResult = await withGatewayProxy(
|
||||
async (port) => {
|
||||
async (port, httpsAgent) => {
|
||||
// Needs to be https protocol or the kubernetes API server will fail with "Client sent an HTTP request to an HTTPS server"
|
||||
const res = await gatewayCallback("https://localhost", port);
|
||||
const res = await gatewayCallback(
|
||||
inputs.reviewTokenThroughGateway ? "http://localhost" : "https://localhost",
|
||||
port,
|
||||
httpsAgent
|
||||
);
|
||||
return res;
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Tcp,
|
||||
protocol: inputs.reviewTokenThroughGateway ? GatewayProxyProtocol.Http : GatewayProxyProtocol.Tcp,
|
||||
targetHost: inputs.targetHost,
|
||||
targetPort: inputs.targetPort,
|
||||
relayHost,
|
||||
@@ -54,7 +83,12 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
},
|
||||
// we always pass this, because its needed for both tcp and http protocol
|
||||
httpsAgent: new https.Agent({
|
||||
ca: inputs.caCert,
|
||||
rejectUnauthorized: inputs.enableSsl
|
||||
})
|
||||
}
|
||||
);
|
||||
|
||||
@@ -64,7 +98,189 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const serviceAccountGetCallback = async (host: string, port: number) => {
|
||||
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
|
||||
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
|
||||
throw new Error("invalid callback");
|
||||
}
|
||||
|
||||
const baseUrl = port ? `${host}:${port}` : host;
|
||||
const serviceAccountName = generateUsername();
|
||||
const roleBindingName = `${serviceAccountName}-role-binding`;
|
||||
|
||||
const namespaces = providerInputs.namespace.split(",").map((namespace) => namespace.trim());
|
||||
|
||||
// Test each namespace sequentially instead of in parallel to simplify cleanup
|
||||
for await (const namespace of namespaces) {
|
||||
try {
|
||||
// 1. Create a test service account
|
||||
await axios.post(
|
||||
`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts`,
|
||||
{
|
||||
metadata: {
|
||||
name: serviceAccountName,
|
||||
namespace
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
}
|
||||
);
|
||||
|
||||
// 2. Create a test role binding
|
||||
const roleBindingUrl =
|
||||
providerInputs.roleType === KubernetesRoleType.ClusterRole
|
||||
? `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings`
|
||||
: `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings`;
|
||||
|
||||
const roleBindingMetadata = {
|
||||
name: roleBindingName,
|
||||
...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace })
|
||||
};
|
||||
|
||||
await axios.post(
|
||||
roleBindingUrl,
|
||||
{
|
||||
metadata: roleBindingMetadata,
|
||||
roleRef: {
|
||||
kind: providerInputs.roleType === KubernetesRoleType.ClusterRole ? "ClusterRole" : "Role",
|
||||
name: providerInputs.role,
|
||||
apiGroup: "rbac.authorization.k8s.io"
|
||||
},
|
||||
subjects: [
|
||||
{
|
||||
kind: "ServiceAccount",
|
||||
name: serviceAccountName,
|
||||
namespace
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
}
|
||||
);
|
||||
|
||||
// 3. Request a token for the test service account
|
||||
await axios.post(
|
||||
`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}/token`,
|
||||
{
|
||||
spec: {
|
||||
expirationSeconds: 600, // 10 minutes
|
||||
...(providerInputs.audiences?.length ? { audiences: providerInputs.audiences } : {})
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
}
|
||||
);
|
||||
|
||||
// 4. Cleanup: delete role binding and service account
|
||||
if (providerInputs.roleType === KubernetesRoleType.Role) {
|
||||
await axios.delete(
|
||||
`${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings/${roleBindingName}`,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
}
|
||||
);
|
||||
} else {
|
||||
await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
});
|
||||
}
|
||||
|
||||
await axios.delete(`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}`, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
});
|
||||
} catch (error) {
|
||||
const cleanupInfo = `You may need to manually clean up the following resources in namespace "${namespace}": Service Account - ${serviceAccountName}, ${providerInputs.roleType === KubernetesRoleType.Role ? "Role" : "Cluster Role"} Binding - ${roleBindingName}.`;
|
||||
let mainErrorMessage = "Unknown error";
|
||||
if (error instanceof AxiosError) {
|
||||
mainErrorMessage = (error.response?.data as { message: string })?.message;
|
||||
} else if (error instanceof Error) {
|
||||
mainErrorMessage = error.message;
|
||||
}
|
||||
|
||||
throw new Error(`${mainErrorMessage}. ${cleanupInfo}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const serviceAccountStaticCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
|
||||
if (providerInputs.credentialType !== KubernetesCredentialType.Static) {
|
||||
throw new Error("invalid callback");
|
||||
}
|
||||
|
||||
const baseUrl = port ? `${host}:${port}` : host;
|
||||
|
||||
await axios.get(
|
||||
@@ -72,36 +288,63 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${providerInputs.clusterToken}`
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT,
|
||||
httpsAgent: new https.Agent({
|
||||
ca: providerInputs.ca,
|
||||
rejectUnauthorized: providerInputs.sslEnabled
|
||||
})
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const url = new URL(providerInputs.url);
|
||||
const rawUrl =
|
||||
providerInputs.authMethod === KubernetesAuthMethod.Gateway ? GATEWAY_AUTH_DEFAULT_URL : providerInputs.url || "";
|
||||
const url = new URL(rawUrl);
|
||||
const k8sGatewayHost = url.hostname;
|
||||
const k8sPort = url.port ? Number(url.port) : 443;
|
||||
const k8sHost = `${url.protocol}//${url.hostname}`;
|
||||
|
||||
try {
|
||||
if (providerInputs.gatewayId) {
|
||||
const k8sHost = url.hostname;
|
||||
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sHost,
|
||||
targetPort: k8sPort
|
||||
},
|
||||
serviceAccountGetCallback
|
||||
);
|
||||
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sHost,
|
||||
targetPort: k8sPort,
|
||||
enableSsl: providerInputs.sslEnabled,
|
||||
caCert: providerInputs.ca,
|
||||
reviewTokenThroughGateway: true
|
||||
},
|
||||
providerInputs.credentialType === KubernetesCredentialType.Static
|
||||
? serviceAccountStaticCallback
|
||||
: serviceAccountDynamicCallback
|
||||
);
|
||||
} else {
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sGatewayHost,
|
||||
targetPort: k8sPort,
|
||||
enableSsl: providerInputs.sslEnabled,
|
||||
caCert: providerInputs.ca,
|
||||
reviewTokenThroughGateway: false
|
||||
},
|
||||
providerInputs.credentialType === KubernetesCredentialType.Static
|
||||
? serviceAccountStaticCallback
|
||||
: serviceAccountDynamicCallback
|
||||
);
|
||||
}
|
||||
} else if (providerInputs.credentialType === KubernetesCredentialType.Static) {
|
||||
await serviceAccountStaticCallback(k8sHost, k8sPort);
|
||||
} else {
|
||||
const k8sHost = `${url.protocol}//${url.hostname}`;
|
||||
await serviceAccountGetCallback(k8sHost, k8sPort);
|
||||
await serviceAccountDynamicCallback(k8sHost, k8sPort);
|
||||
}
|
||||
|
||||
return true;
|
||||
@@ -117,10 +360,153 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
}
|
||||
};
|
||||
|
||||
const create = async ({ inputs, expireAt }: { inputs: unknown; expireAt: number }) => {
|
||||
const create = async ({
|
||||
inputs,
|
||||
expireAt,
|
||||
usernameTemplate,
|
||||
config
|
||||
}: {
|
||||
inputs: unknown;
|
||||
expireAt: number;
|
||||
usernameTemplate?: string | null;
|
||||
config?: TDynamicSecretKubernetesLeaseConfig;
|
||||
}) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const tokenRequestCallback = async (host: string, port: number) => {
|
||||
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
|
||||
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
|
||||
throw new Error("invalid callback");
|
||||
}
|
||||
|
||||
const baseUrl = port ? `${host}:${port}` : host;
|
||||
const serviceAccountName = generateUsername(usernameTemplate);
|
||||
const roleBindingName = `${serviceAccountName}-role-binding`;
|
||||
const allowedNamespaces = providerInputs.namespace.split(",").map((namespace) => namespace.trim());
|
||||
|
||||
if (config?.namespace && !allowedNamespaces?.includes(config?.namespace)) {
|
||||
throw new BadRequestError({
|
||||
message: `Namespace ${config?.namespace} is not allowed. Allowed namespaces: ${allowedNamespaces?.join(", ")}`
|
||||
});
|
||||
}
|
||||
|
||||
const namespace = config?.namespace || allowedNamespaces[0];
|
||||
if (!namespace) {
|
||||
throw new BadRequestError({
|
||||
message: "No namespace provided"
|
||||
});
|
||||
}
|
||||
|
||||
// 1. Create the service account
|
||||
await axios.post(
|
||||
`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts`,
|
||||
{
|
||||
metadata: {
|
||||
name: serviceAccountName,
|
||||
namespace
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
}
|
||||
);
|
||||
|
||||
// 2. Create the role binding
|
||||
const roleBindingUrl =
|
||||
providerInputs.roleType === KubernetesRoleType.ClusterRole
|
||||
? `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings`
|
||||
: `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings`;
|
||||
|
||||
const roleBindingMetadata = {
|
||||
name: roleBindingName,
|
||||
...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace })
|
||||
};
|
||||
|
||||
await axios.post(
|
||||
roleBindingUrl,
|
||||
{
|
||||
metadata: roleBindingMetadata,
|
||||
roleRef: {
|
||||
kind: providerInputs.roleType === KubernetesRoleType.ClusterRole ? "ClusterRole" : "Role",
|
||||
name: providerInputs.role,
|
||||
apiGroup: "rbac.authorization.k8s.io"
|
||||
},
|
||||
subjects: [
|
||||
{
|
||||
kind: "ServiceAccount",
|
||||
name: serviceAccountName,
|
||||
namespace
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
}
|
||||
);
|
||||
|
||||
// 3. Request a token for the service account
|
||||
const res = await axios.post<TKubernetesTokenRequest>(
|
||||
`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}/token`,
|
||||
{
|
||||
spec: {
|
||||
expirationSeconds: Math.floor((expireAt - Date.now()) / 1000),
|
||||
...(providerInputs.audiences?.length ? { audiences: providerInputs.audiences } : {})
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
}
|
||||
);
|
||||
|
||||
return { ...res.data, serviceAccountName };
|
||||
};
|
||||
|
||||
const tokenRequestStaticCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
|
||||
if (providerInputs.credentialType !== KubernetesCredentialType.Static) {
|
||||
throw new Error("invalid callback");
|
||||
}
|
||||
|
||||
if (config?.namespace && config.namespace !== providerInputs.namespace) {
|
||||
throw new BadRequestError({
|
||||
message: `Namespace ${config?.namespace} is not allowed. Allowed namespace: ${providerInputs.namespace}.`
|
||||
});
|
||||
}
|
||||
|
||||
const baseUrl = port ? `${host}:${port}` : host;
|
||||
|
||||
const res = await axios.post<TKubernetesTokenRequest>(
|
||||
@@ -134,39 +520,71 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${providerInputs.clusterToken}`
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT,
|
||||
httpsAgent: new https.Agent({
|
||||
ca: providerInputs.ca,
|
||||
rejectUnauthorized: providerInputs.sslEnabled
|
||||
})
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
}
|
||||
);
|
||||
|
||||
return res.data;
|
||||
return { ...res.data, serviceAccountName: providerInputs.serviceAccountName };
|
||||
};
|
||||
|
||||
const url = new URL(providerInputs.url);
|
||||
const rawUrl =
|
||||
providerInputs.authMethod === KubernetesAuthMethod.Gateway ? GATEWAY_AUTH_DEFAULT_URL : providerInputs.url || "";
|
||||
const url = new URL(rawUrl);
|
||||
const k8sHost = `${url.protocol}//${url.hostname}`;
|
||||
const k8sGatewayHost = url.hostname;
|
||||
const k8sPort = url.port ? Number(url.port) : 443;
|
||||
|
||||
try {
|
||||
const tokenData = providerInputs.gatewayId
|
||||
? await $gatewayProxyWrapper(
|
||||
let tokenData;
|
||||
if (providerInputs.gatewayId) {
|
||||
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
|
||||
tokenData = await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sHost,
|
||||
targetPort: k8sPort,
|
||||
enableSsl: providerInputs.sslEnabled,
|
||||
caCert: providerInputs.ca,
|
||||
reviewTokenThroughGateway: true
|
||||
},
|
||||
providerInputs.credentialType === KubernetesCredentialType.Static
|
||||
? tokenRequestStaticCallback
|
||||
: serviceAccountDynamicCallback
|
||||
);
|
||||
} else {
|
||||
tokenData = await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sGatewayHost,
|
||||
targetPort: k8sPort
|
||||
targetPort: k8sPort,
|
||||
enableSsl: providerInputs.sslEnabled,
|
||||
caCert: providerInputs.ca,
|
||||
reviewTokenThroughGateway: false
|
||||
},
|
||||
tokenRequestCallback
|
||||
)
|
||||
: await tokenRequestCallback(k8sHost, k8sPort);
|
||||
providerInputs.credentialType === KubernetesCredentialType.Static
|
||||
? tokenRequestStaticCallback
|
||||
: serviceAccountDynamicCallback
|
||||
);
|
||||
}
|
||||
} else {
|
||||
tokenData =
|
||||
providerInputs.credentialType === KubernetesCredentialType.Static
|
||||
? await tokenRequestStaticCallback(k8sHost, k8sPort)
|
||||
: await serviceAccountDynamicCallback(k8sHost, k8sPort);
|
||||
}
|
||||
|
||||
return {
|
||||
entityId: providerInputs.serviceAccountName,
|
||||
entityId: tokenData.serviceAccountName,
|
||||
data: { TOKEN: tokenData.status.token }
|
||||
};
|
||||
} catch (error) {
|
||||
@@ -181,7 +599,122 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||
const revoke = async (
|
||||
inputs: unknown,
|
||||
entityId: string,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
_metadata: { projectId: string },
|
||||
config?: TDynamicSecretKubernetesLeaseConfig
|
||||
) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
|
||||
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
|
||||
throw new Error("invalid callback");
|
||||
}
|
||||
|
||||
const baseUrl = port ? `${host}:${port}` : host;
|
||||
const roleBindingName = `${entityId}-role-binding`;
|
||||
|
||||
const namespace = config?.namespace ?? providerInputs.namespace.split(",")[0].trim();
|
||||
|
||||
if (providerInputs.roleType === KubernetesRoleType.Role) {
|
||||
await axios.delete(
|
||||
`${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings/${roleBindingName}`,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
}
|
||||
);
|
||||
} else {
|
||||
await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
});
|
||||
}
|
||||
|
||||
// Delete the service account
|
||||
await axios.delete(`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${entityId}`, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
|
||||
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
|
||||
},
|
||||
...(providerInputs.authMethod === KubernetesAuthMethod.Api
|
||||
? {
|
||||
httpsAgent
|
||||
}
|
||||
: {}),
|
||||
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
|
||||
timeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
});
|
||||
};
|
||||
|
||||
if (providerInputs.credentialType === KubernetesCredentialType.Dynamic) {
|
||||
const rawUrl =
|
||||
providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? GATEWAY_AUTH_DEFAULT_URL
|
||||
: providerInputs.url || "";
|
||||
|
||||
const url = new URL(rawUrl);
|
||||
const k8sGatewayHost = url.hostname;
|
||||
const k8sPort = url.port ? Number(url.port) : 443;
|
||||
const k8sHost = `${url.protocol}//${url.hostname}`;
|
||||
|
||||
if (providerInputs.gatewayId) {
|
||||
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sHost,
|
||||
targetPort: k8sPort,
|
||||
enableSsl: providerInputs.sslEnabled,
|
||||
caCert: providerInputs.ca,
|
||||
reviewTokenThroughGateway: true
|
||||
},
|
||||
serviceAccountDynamicCallback
|
||||
);
|
||||
} else {
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sGatewayHost,
|
||||
targetPort: k8sPort,
|
||||
enableSsl: providerInputs.sslEnabled,
|
||||
caCert: providerInputs.ca,
|
||||
reviewTokenThroughGateway: false
|
||||
},
|
||||
serviceAccountDynamicCallback
|
||||
);
|
||||
}
|
||||
} else {
|
||||
await serviceAccountDynamicCallback(k8sHost, k8sPort);
|
||||
}
|
||||
}
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@@ -9,6 +9,7 @@ import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
@@ -22,13 +23,13 @@ const encodePassword = (password?: string) => {
|
||||
return base64Password;
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity
|
||||
});
|
||||
};
|
||||
|
||||
@@ -196,8 +197,8 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
return dnArray;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
|
||||
const { inputs, usernameTemplate } = data;
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
const { inputs, usernameTemplate, identity } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
@@ -224,7 +225,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });
|
||||
|
||||
|
@@ -1,5 +1,10 @@
|
||||
import RE2 from "re2";
|
||||
import { z } from "zod";
|
||||
|
||||
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
|
||||
|
||||
import { TDynamicSecretLeaseConfig } from "../../dynamic-secret-lease/dynamic-secret-lease-types";
|
||||
|
||||
export type PasswordRequirements = {
|
||||
length: number;
|
||||
required: {
|
||||
@@ -20,6 +25,11 @@ export enum SqlProviders {
|
||||
Vertica = "vertica"
|
||||
}
|
||||
|
||||
export enum AwsIamAuthType {
|
||||
AssumeRole = "assume-role",
|
||||
AccessKey = "access-key"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
User = "user",
|
||||
ApiKey = "api-key"
|
||||
@@ -31,7 +41,18 @@ export enum LdapCredentialType {
|
||||
}
|
||||
|
||||
export enum KubernetesCredentialType {
|
||||
Static = "static"
|
||||
Static = "static",
|
||||
Dynamic = "dynamic"
|
||||
}
|
||||
|
||||
export enum KubernetesRoleType {
|
||||
ClusterRole = "cluster-role",
|
||||
Role = "role"
|
||||
}
|
||||
|
||||
export enum KubernetesAuthMethod {
|
||||
Gateway = "gateway",
|
||||
Api = "api"
|
||||
}
|
||||
|
||||
export enum TotpConfigType {
|
||||
@@ -168,16 +189,38 @@ export const DynamicSecretSapAseSchema = z.object({
|
||||
revocationStatement: z.string().trim()
|
||||
});
|
||||
|
||||
export const DynamicSecretAwsIamSchema = z.object({
|
||||
accessKey: z.string().trim().min(1),
|
||||
secretAccessKey: z.string().trim().min(1),
|
||||
region: z.string().trim().min(1),
|
||||
awsPath: z.string().trim().optional(),
|
||||
permissionBoundaryPolicyArn: z.string().trim().optional(),
|
||||
policyDocument: z.string().trim().optional(),
|
||||
userGroups: z.string().trim().optional(),
|
||||
policyArns: z.string().trim().optional()
|
||||
});
|
||||
export const DynamicSecretAwsIamSchema = z.preprocess(
|
||||
(val) => {
|
||||
if (typeof val === "object" && val !== null && !Object.hasOwn(val, "method")) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
(val as { method: string }).method = AwsIamAuthType.AccessKey;
|
||||
}
|
||||
return val;
|
||||
},
|
||||
z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z.literal(AwsIamAuthType.AccessKey),
|
||||
accessKey: z.string().trim().min(1),
|
||||
secretAccessKey: z.string().trim().min(1),
|
||||
region: z.string().trim().min(1),
|
||||
awsPath: z.string().trim().optional(),
|
||||
permissionBoundaryPolicyArn: z.string().trim().optional(),
|
||||
policyDocument: z.string().trim().optional(),
|
||||
userGroups: z.string().trim().optional(),
|
||||
policyArns: z.string().trim().optional()
|
||||
}),
|
||||
z.object({
|
||||
method: z.literal(AwsIamAuthType.AssumeRole),
|
||||
roleArn: z.string().trim().min(1, "Role ARN required"),
|
||||
region: z.string().trim().min(1),
|
||||
awsPath: z.string().trim().optional(),
|
||||
permissionBoundaryPolicyArn: z.string().trim().optional(),
|
||||
policyDocument: z.string().trim().optional(),
|
||||
userGroups: z.string().trim().optional(),
|
||||
policyArns: z.string().trim().optional()
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
export const DynamicSecretMongoAtlasSchema = z.object({
|
||||
adminPublicKey: z.string().trim().min(1).describe("Admin user public api key"),
|
||||
@@ -282,17 +325,89 @@ export const LdapSchema = z.union([
|
||||
})
|
||||
]);
|
||||
|
||||
export const DynamicSecretKubernetesSchema = z.object({
|
||||
url: z.string().url().trim().min(1),
|
||||
gatewayId: z.string().nullable().optional(),
|
||||
sslEnabled: z.boolean().default(true),
|
||||
clusterToken: z.string().trim().min(1),
|
||||
ca: z.string().optional(),
|
||||
serviceAccountName: z.string().trim().min(1),
|
||||
credentialType: z.literal(KubernetesCredentialType.Static),
|
||||
namespace: z.string().trim().min(1),
|
||||
audiences: z.array(z.string().trim().min(1))
|
||||
});
|
||||
export const DynamicSecretKubernetesSchema = z
|
||||
.discriminatedUnion("credentialType", [
|
||||
z.object({
|
||||
url: z
|
||||
.string()
|
||||
.optional()
|
||||
.refine((val: string | undefined) => !val || new RE2(/^https?:\/\/.+/).test(val), {
|
||||
message: "Invalid URL. Must start with http:// or https:// (e.g. https://example.com)"
|
||||
}),
|
||||
clusterToken: z.string().trim().optional(),
|
||||
ca: z.string().optional(),
|
||||
sslEnabled: z.boolean().default(false),
|
||||
credentialType: z.literal(KubernetesCredentialType.Static),
|
||||
serviceAccountName: z.string().trim().min(1),
|
||||
namespace: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.refine((val) => !val.includes(","), "Namespace must be a single value, not a comma-separated list")
|
||||
.refine(
|
||||
(val) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(val),
|
||||
"Invalid namespace format"
|
||||
),
|
||||
gatewayId: z.string().optional(),
|
||||
audiences: z.array(z.string().trim().min(1)),
|
||||
authMethod: z.nativeEnum(KubernetesAuthMethod).default(KubernetesAuthMethod.Api)
|
||||
}),
|
||||
z.object({
|
||||
url: z
|
||||
.string()
|
||||
.url()
|
||||
.optional()
|
||||
.refine((val: string | undefined) => !val || new RE2(/^https?:\/\/.+/).test(val), {
|
||||
message: "Invalid URL. Must start with http:// or https:// (e.g. https://example.com)"
|
||||
}),
|
||||
clusterToken: z.string().trim().optional(),
|
||||
ca: z.string().optional(),
|
||||
sslEnabled: z.boolean().default(false),
|
||||
credentialType: z.literal(KubernetesCredentialType.Dynamic),
|
||||
namespace: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.refine((val) => {
|
||||
const namespaces = val.split(",").map((ns) => ns.trim());
|
||||
return (
|
||||
namespaces.length > 0 &&
|
||||
namespaces.every((ns) => ns.length > 0) &&
|
||||
namespaces.every((ns) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(ns))
|
||||
);
|
||||
}, "Must be a valid comma-separated list of namespace values"),
|
||||
gatewayId: z.string().optional(),
|
||||
audiences: z.array(z.string().trim().min(1)),
|
||||
roleType: z.nativeEnum(KubernetesRoleType),
|
||||
role: z.string().trim().min(1),
|
||||
authMethod: z.nativeEnum(KubernetesAuthMethod).default(KubernetesAuthMethod.Api)
|
||||
})
|
||||
])
|
||||
.superRefine((data, ctx) => {
|
||||
if (data.authMethod === KubernetesAuthMethod.Gateway && !data.gatewayId) {
|
||||
ctx.addIssue({
|
||||
path: ["gatewayId"],
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "When auth method is set to Gateway, a gateway must be selected"
|
||||
});
|
||||
}
|
||||
if (data.authMethod === KubernetesAuthMethod.Api || !data.authMethod) {
|
||||
if (!data.clusterToken) {
|
||||
ctx.addIssue({
|
||||
path: ["clusterToken"],
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "When auth method is set to Token, a cluster token must be provided"
|
||||
});
|
||||
}
|
||||
if (!data.url) {
|
||||
ctx.addIssue({
|
||||
path: ["url"],
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "When auth method is set to Token, a cluster URL must be provided"
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
export const DynamicSecretVerticaSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
@@ -400,9 +515,24 @@ export type TDynamicProviderFns = {
|
||||
inputs: unknown;
|
||||
expireAt: number;
|
||||
usernameTemplate?: string | null;
|
||||
identity?: {
|
||||
name: string;
|
||||
};
|
||||
metadata: { projectId: string };
|
||||
config?: TDynamicSecretLeaseConfig;
|
||||
}) => Promise<{ entityId: string; data: unknown }>;
|
||||
validateConnection: (inputs: unknown) => Promise<boolean>;
|
||||
validateProviderInputs: (inputs: object) => Promise<unknown>;
|
||||
revoke: (inputs: unknown, entityId: string) => Promise<{ entityId: string }>;
|
||||
renew: (inputs: unknown, entityId: string, expireAt: number) => Promise<{ entityId: string }>;
|
||||
validateConnection: (inputs: unknown, metadata: { projectId: string }) => Promise<boolean>;
|
||||
validateProviderInputs: (inputs: object, metadata: { projectId: string }) => Promise<unknown>;
|
||||
revoke: (
|
||||
inputs: unknown,
|
||||
entityId: string,
|
||||
metadata: { projectId: string },
|
||||
config?: TDynamicSecretLeaseConfig
|
||||
) => Promise<{ entityId: string }>;
|
||||
renew: (
|
||||
inputs: unknown,
|
||||
entityId: string,
|
||||
expireAt: number,
|
||||
metadata: { projectId: string }
|
||||
) => Promise<{ entityId: string }>;
|
||||
};
|
||||
|
@@ -1,5 +1,4 @@
|
||||
import axios, { AxiosError } from "axios";
|
||||
import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
@@ -7,19 +6,20 @@ import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
const randomUsername = alphaNumericNanoId(32);
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity
|
||||
});
|
||||
};
|
||||
|
||||
@@ -64,12 +64,17 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
|
||||
const { inputs, expireAt, usernameTemplate } = data;
|
||||
const create = async (data: {
|
||||
inputs: unknown;
|
||||
expireAt: number;
|
||||
usernameTemplate?: string | null;
|
||||
identity?: { name: string };
|
||||
}) => {
|
||||
const { inputs, expireAt, usernameTemplate, identity } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
await client({
|
||||
|
@@ -1,4 +1,3 @@
|
||||
import handlebars from "handlebars";
|
||||
import { MongoClient } from "mongodb";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
@@ -7,19 +6,20 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
const randomUsername = alphaNumericNanoId(32);
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity
|
||||
});
|
||||
};
|
||||
|
||||
@@ -60,12 +60,12 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
|
||||
const { inputs, usernameTemplate } = data;
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
const { inputs, usernameTemplate, identity } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
|
||||
const db = client.db(providerInputs.database);
|
||||
|
@@ -1,5 +1,4 @@
|
||||
import axios, { Axios } from "axios";
|
||||
import handlebars from "handlebars";
|
||||
import https from "https";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
@@ -9,19 +8,20 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity
|
||||
});
|
||||
};
|
||||
|
||||
@@ -117,12 +117,12 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
return infoResponse;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
|
||||
const { inputs, usernameTemplate } = data;
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
const { inputs, usernameTemplate, identity } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
|
||||
await createRabbitMqUser({
|
||||
|
@@ -9,19 +9,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity
|
||||
});
|
||||
};
|
||||
|
||||
@@ -121,12 +122,17 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return pingResponse;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
|
||||
const { inputs, expireAt, usernameTemplate } = data;
|
||||
const create = async (data: {
|
||||
inputs: unknown;
|
||||
expireAt: number;
|
||||
usernameTemplate?: string | null;
|
||||
identity?: { name: string };
|
||||
}) => {
|
||||
const { inputs, expireAt, usernameTemplate, identity } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
|
@@ -9,19 +9,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
const randomUsername = `inf_${alphaNumericNanoId(25)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity
|
||||
});
|
||||
};
|
||||
|
||||
@@ -87,11 +88,11 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
|
||||
const { inputs, usernameTemplate } = data;
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
const { inputs, usernameTemplate, identity } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
|
@@ -15,19 +15,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity
|
||||
});
|
||||
};
|
||||
|
||||
@@ -97,11 +98,16 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
return testResult;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
|
||||
const { inputs, expireAt, usernameTemplate } = data;
|
||||
const create = async (data: {
|
||||
inputs: unknown;
|
||||
expireAt: number;
|
||||
usernameTemplate?: string | null;
|
||||
identity?: { name: string };
|
||||
}) => {
|
||||
const { inputs, expireAt, usernameTemplate, identity } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
|
@@ -8,6 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
// destroy client requires callback...
|
||||
const noop = () => {};
|
||||
@@ -17,13 +18,13 @@ const generatePassword = (size = 48) => {
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = (usernameTemplate?: string | null) => {
|
||||
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
const randomUsername = `infisical_${alphaNumericNanoId(32)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity
|
||||
});
|
||||
};
|
||||
|
||||
@@ -88,13 +89,18 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
return isValidConnection;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
|
||||
const { inputs, expireAt, usernameTemplate } = data;
|
||||
const create = async (data: {
|
||||
inputs: unknown;
|
||||
expireAt: number;
|
||||
usernameTemplate?: string | null;
|
||||
identity?: { name: string };
|
||||
}) => {
|
||||
const { inputs, expireAt, usernameTemplate, identity } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(usernameTemplate);
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
|
||||
try {
|
||||
|
@@ -10,6 +10,7 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
|
||||
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSqlDBSchema, PasswordRequirements, SqlProviders, TDynamicProviderFns } from "./models";
|
||||
import { compileUsernameTemplate } from "./templateUtils";
|
||||
|
||||
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
||||
|
||||
@@ -104,9 +105,8 @@ const generatePassword = (provider: SqlProviders, requirements?: PasswordRequire
|
||||
}
|
||||
};
|
||||
|
||||
const generateUsername = (provider: SqlProviders, usernameTemplate?: string | null) => {
|
||||
const generateUsername = (provider: SqlProviders, usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||
let randomUsername = "";
|
||||
|
||||
// For oracle, the client assumes everything is upper case when not using quotes around the password
|
||||
if (provider === SqlProviders.Oracle) {
|
||||
randomUsername = alphaNumericNanoId(32).toUpperCase();
|
||||
@@ -114,10 +114,13 @@ const generateUsername = (provider: SqlProviders, usernameTemplate?: string | nu
|
||||
randomUsername = alphaNumericNanoId(32);
|
||||
}
|
||||
if (!usernameTemplate) return randomUsername;
|
||||
|
||||
return handlebars.compile(usernameTemplate)({
|
||||
return compileUsernameTemplate({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
unixTimestamp: Math.floor(Date.now() / 100)
|
||||
identity,
|
||||
options: {
|
||||
toUpperCase: provider === SqlProviders.Oracle
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
@@ -221,11 +224,16 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
|
||||
const { inputs, expireAt, usernameTemplate } = data;
|
||||
const create = async (data: {
|
||||
inputs: unknown;
|
||||
expireAt: number;
|
||||
usernameTemplate?: string | null;
|
||||
identity?: { name: string };
|
||||
}) => {
|
||||
const { inputs, expireAt, usernameTemplate, identity } = data;
|
||||
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const username = generateUsername(providerInputs.client, usernameTemplate);
|
||||
const username = generateUsername(providerInputs.client, usernameTemplate, identity);
|
||||
|
||||
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
|
@@ -0,0 +1,80 @@
|
||||
/* eslint-disable func-names */
|
||||
import handlebars from "handlebars";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
export const compileUsernameTemplate = ({
|
||||
usernameTemplate,
|
||||
randomUsername,
|
||||
identity,
|
||||
unixTimestamp,
|
||||
options
|
||||
}: {
|
||||
usernameTemplate: string;
|
||||
randomUsername: string;
|
||||
identity?: { name: string };
|
||||
unixTimestamp?: number;
|
||||
options?: {
|
||||
toUpperCase?: boolean;
|
||||
};
|
||||
}): string => {
|
||||
// Create isolated handlebars instance
|
||||
const hbs = handlebars.create();
|
||||
|
||||
// Register random helper on local instance
|
||||
hbs.registerHelper("random", function (length: number) {
|
||||
if (typeof length !== "number" || length <= 0 || length > 100) {
|
||||
return "";
|
||||
}
|
||||
return alphaNumericNanoId(length);
|
||||
});
|
||||
|
||||
// Register replace helper on local instance
|
||||
hbs.registerHelper("replace", function (text: string, searchValue: string, replaceValue: string) {
|
||||
// Convert to string if it's not already
|
||||
const textStr = String(text || "");
|
||||
if (!textStr) {
|
||||
return textStr;
|
||||
}
|
||||
|
||||
try {
|
||||
const re2Pattern = new RE2(searchValue, "g");
|
||||
// Replace all occurrences
|
||||
return re2Pattern.replace(textStr, replaceValue);
|
||||
} catch (error) {
|
||||
logger.error(error, "RE2 pattern failed, using original template");
|
||||
return textStr;
|
||||
}
|
||||
});
|
||||
|
||||
// Register truncate helper on local instance
|
||||
hbs.registerHelper("truncate", function (text: string, length: number) {
|
||||
// Convert to string if it's not already
|
||||
const textStr = String(text || "");
|
||||
if (!textStr) {
|
||||
return textStr;
|
||||
}
|
||||
|
||||
if (typeof length !== "number" || length <= 0) return textStr;
|
||||
return textStr.substring(0, length);
|
||||
});
|
||||
|
||||
// Compile template with context using local instance
|
||||
const context = {
|
||||
randomUsername,
|
||||
unixTimestamp: unixTimestamp || Math.floor(Date.now() / 100),
|
||||
identity: {
|
||||
name: identity?.name
|
||||
}
|
||||
};
|
||||
|
||||
const result = hbs.compile(usernameTemplate)(context);
|
||||
|
||||
if (options?.toUpperCase) {
|
||||
return result.toUpperCase();
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
@@ -709,6 +709,10 @@ export const licenseServiceFactory = ({
|
||||
return licenses;
|
||||
};
|
||||
|
||||
const invalidateGetPlan = async (orgId: string) => {
|
||||
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
|
||||
};
|
||||
|
||||
return {
|
||||
generateOrgCustomerId,
|
||||
removeOrgCustomer,
|
||||
@@ -723,6 +727,7 @@ export const licenseServiceFactory = ({
|
||||
return onPremFeatures;
|
||||
},
|
||||
getPlan,
|
||||
invalidateGetPlan,
|
||||
updateSubscriptionOrgMemberCount,
|
||||
refreshPlan,
|
||||
getOrgPlan,
|
||||
|
@@ -4,6 +4,7 @@ import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionCertificateActions,
|
||||
ProjectPermissionCmekActions,
|
||||
ProjectPermissionCommitsActions,
|
||||
ProjectPermissionDynamicSecretActions,
|
||||
ProjectPermissionGroupActions,
|
||||
ProjectPermissionIdentityActions,
|
||||
@@ -90,6 +91,11 @@ const buildAdminPermissionRules = () => {
|
||||
ProjectPermissionSub.Certificates
|
||||
);
|
||||
|
||||
can(
|
||||
[ProjectPermissionCommitsActions.Read, ProjectPermissionCommitsActions.PerformRollback],
|
||||
ProjectPermissionSub.Commits
|
||||
);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSshHostActions.Edit,
|
||||
@@ -292,6 +298,11 @@ const buildMemberPermissionRules = () => {
|
||||
ProjectPermissionSub.SecretImports
|
||||
);
|
||||
|
||||
can(
|
||||
[ProjectPermissionCommitsActions.Read, ProjectPermissionCommitsActions.PerformRollback],
|
||||
ProjectPermissionSub.Commits
|
||||
);
|
||||
|
||||
can([ProjectPermissionActions.Read], ProjectPermissionSub.SecretApproval);
|
||||
can([ProjectPermissionSecretRotationActions.Read], ProjectPermissionSub.SecretRotation);
|
||||
|
||||
@@ -479,6 +490,7 @@ const buildViewerPermissionRules = () => {
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificates);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificateTemplates);
|
||||
can(ProjectPermissionSecretSyncActions.Read, ProjectPermissionSub.SecretSyncs);
|
||||
can(ProjectPermissionCommitsActions.Read, ProjectPermissionSub.Commits);
|
||||
|
||||
can(
|
||||
[
|
||||
|
@@ -17,6 +17,11 @@ export enum ProjectPermissionActions {
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionCommitsActions {
|
||||
Read = "read",
|
||||
PerformRollback = "perform-rollback"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionCertificateActions {
|
||||
Read = "read",
|
||||
Create = "create",
|
||||
@@ -172,6 +177,7 @@ export enum ProjectPermissionSub {
|
||||
SecretRollback = "secret-rollback",
|
||||
SecretApproval = "secret-approval",
|
||||
SecretRotation = "secret-rotation",
|
||||
Commits = "commits",
|
||||
Identity = "identity",
|
||||
CertificateAuthorities = "certificate-authorities",
|
||||
Certificates = "certificates",
|
||||
@@ -325,6 +331,7 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
|
||||
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
|
||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms]
|
||||
| [ProjectPermissionCommitsActions, ProjectPermissionSub.Commits]
|
||||
| [ProjectPermissionSecretScanningDataSourceActions, ProjectPermissionSub.SecretScanningDataSources]
|
||||
| [ProjectPermissionSecretScanningFindingActions, ProjectPermissionSub.SecretScanningFindings]
|
||||
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs];
|
||||
@@ -376,7 +383,8 @@ const DynamicSecretConditionV2Schema = z
|
||||
.object({
|
||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
|
||||
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
|
||||
})
|
||||
.partial()
|
||||
]),
|
||||
@@ -404,6 +412,23 @@ const DynamicSecretConditionV2Schema = z
|
||||
})
|
||||
.partial();
|
||||
|
||||
const SecretImportConditionSchema = z
|
||||
.object({
|
||||
environment: z.union([
|
||||
z.string(),
|
||||
z
|
||||
.object({
|
||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
|
||||
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
|
||||
})
|
||||
.partial()
|
||||
]),
|
||||
secretPath: SECRET_PATH_PERMISSION_OPERATOR_SCHEMA
|
||||
})
|
||||
.partial();
|
||||
|
||||
const SecretConditionV2Schema = z
|
||||
.object({
|
||||
environment: z.union([
|
||||
@@ -658,6 +683,12 @@ const GeneralPermissionSchema = [
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Commits).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionCommitsActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z
|
||||
.literal(ProjectPermissionSub.SecretScanningDataSources)
|
||||
@@ -741,7 +772,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: SecretConditionV1Schema.describe(
|
||||
conditions: SecretImportConditionSchema.describe(
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
|
485
backend/src/ee/services/pit/pit-service.ts
Normal file
485
backend/src/ee/services/pit/pit-service.ts
Normal file
@@ -0,0 +1,485 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { ProjectPermissionCommitsActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { ResourceType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import {
|
||||
isFolderCommitChange,
|
||||
isSecretCommitChange
|
||||
} from "@app/services/folder-commit-changes/folder-commit-changes-dal";
|
||||
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
|
||||
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
|
||||
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
|
||||
type TPitServiceFactoryDep = {
|
||||
folderCommitService: TFolderCommitServiceFactory;
|
||||
secretService: Pick<TSecretServiceFactory, "getSecretVersionsV2ByIds" | "getChangeVersions">;
|
||||
folderService: Pick<TSecretFolderServiceFactory, "getFolderById" | "getFolderVersions">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findSecretPathByFolderIds">;
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
|
||||
};
|
||||
|
||||
export type TPitServiceFactory = ReturnType<typeof pitServiceFactory>;
|
||||
|
||||
export const pitServiceFactory = ({
|
||||
folderCommitService,
|
||||
secretService,
|
||||
folderService,
|
||||
permissionService,
|
||||
folderDAL,
|
||||
projectEnvDAL
|
||||
}: TPitServiceFactoryDep) => {
|
||||
const getCommitsCount = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
environment,
|
||||
path
|
||||
}: {
|
||||
actor: ActorType;
|
||||
actorId: string;
|
||||
actorOrgId: string;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
projectId: string;
|
||||
environment: string;
|
||||
path: string;
|
||||
}) => {
|
||||
const result = await folderCommitService.getCommitsCount({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
environment,
|
||||
path
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const getCommitsForFolder = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
environment,
|
||||
path,
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
sort
|
||||
}: {
|
||||
actor: ActorType;
|
||||
actorId: string;
|
||||
actorOrgId: string;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
projectId: string;
|
||||
environment: string;
|
||||
path: string;
|
||||
offset: number;
|
||||
limit: number;
|
||||
search?: string;
|
||||
sort: "asc" | "desc";
|
||||
}) => {
|
||||
const result = await folderCommitService.getCommitsForFolder({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
environment,
|
||||
path,
|
||||
offset,
|
||||
limit,
|
||||
search,
|
||||
sort
|
||||
});
|
||||
|
||||
return {
|
||||
commits: result.commits.map((commit) => ({
|
||||
...commit,
|
||||
commitId: commit.commitId.toString()
|
||||
})),
|
||||
total: result.total,
|
||||
hasMore: result.hasMore
|
||||
};
|
||||
};
|
||||
|
||||
const getCommitChanges = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
commitId
|
||||
}: {
|
||||
actor: ActorType;
|
||||
actorId: string;
|
||||
actorOrgId: string;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
projectId: string;
|
||||
commitId: string;
|
||||
}) => {
|
||||
const changes = await folderCommitService.getCommitChanges({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
commitId
|
||||
});
|
||||
|
||||
const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(projectId, [changes.folderId]);
|
||||
|
||||
for (const change of changes.changes) {
|
||||
if (isSecretCommitChange(change)) {
|
||||
change.versions = await secretService.getChangeVersions(
|
||||
{
|
||||
secretVersion: change.secretVersion,
|
||||
secretId: change.secretId,
|
||||
id: change.id,
|
||||
isUpdate: change.isUpdate,
|
||||
changeType: change.changeType
|
||||
},
|
||||
(Number.parseInt(change.secretVersion, 10) - 1).toString(),
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
changes.envId,
|
||||
projectId,
|
||||
folderWithPath?.path || ""
|
||||
);
|
||||
} else if (isFolderCommitChange(change)) {
|
||||
change.versions = await folderService.getFolderVersions(
|
||||
change,
|
||||
(Number.parseInt(change.folderVersion, 10) - 1).toString(),
|
||||
change.folderChangeId
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
changes: {
|
||||
...changes,
|
||||
commitId: changes.commitId.toString()
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const compareCommitChanges = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
commitId,
|
||||
folderId,
|
||||
environment,
|
||||
deepRollback,
|
||||
secretPath
|
||||
}: {
|
||||
actor: ActorType;
|
||||
actorId: string;
|
||||
actorOrgId: string;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
projectId: string;
|
||||
commitId: string;
|
||||
folderId: string;
|
||||
environment: string;
|
||||
deepRollback: boolean;
|
||||
secretPath: string;
|
||||
}) => {
|
||||
const latestCommit = await folderCommitService.getLatestCommit({
|
||||
folderId,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId
|
||||
});
|
||||
|
||||
const targetCommit = await folderCommitService.getCommitById({
|
||||
commitId,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId
|
||||
});
|
||||
|
||||
const env = await projectEnvDAL.findOne({
|
||||
projectId,
|
||||
slug: environment
|
||||
});
|
||||
|
||||
if (!latestCommit) {
|
||||
throw new NotFoundError({ message: "Latest commit not found" });
|
||||
}
|
||||
|
||||
let diffs;
|
||||
if (deepRollback) {
|
||||
diffs = await folderCommitService.deepCompareFolder({
|
||||
targetCommitId: targetCommit.id,
|
||||
envId: env.id,
|
||||
projectId
|
||||
});
|
||||
} else {
|
||||
const folderData = await folderService.getFolderById({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
id: folderId
|
||||
});
|
||||
|
||||
diffs = [
|
||||
{
|
||||
folderId: folderData.id,
|
||||
folderName: folderData.name,
|
||||
folderPath: secretPath,
|
||||
changes: await folderCommitService.compareFolderStates({
|
||||
targetCommitId: commitId,
|
||||
currentCommitId: latestCommit.id
|
||||
})
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
for (const diff of diffs) {
|
||||
for (const change of diff.changes) {
|
||||
// Use discriminated union type checking
|
||||
if (change.type === ResourceType.SECRET) {
|
||||
// TypeScript now knows this is a SecretChange
|
||||
if (change.secretKey && change.secretVersion && change.secretId) {
|
||||
change.versions = await secretService.getChangeVersions(
|
||||
{
|
||||
secretVersion: change.secretVersion,
|
||||
secretId: change.secretId,
|
||||
id: change.id,
|
||||
isUpdate: change.isUpdate,
|
||||
changeType: change.changeType
|
||||
},
|
||||
change.fromVersion || "1",
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
env.id,
|
||||
projectId,
|
||||
diff.folderPath || ""
|
||||
);
|
||||
}
|
||||
} else if (change.type === ResourceType.FOLDER) {
|
||||
// TypeScript now knows this is a FolderChange
|
||||
if (change.folderVersion) {
|
||||
change.versions = await folderService.getFolderVersions(change, change.fromVersion || "1", change.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return diffs;
|
||||
};
|
||||
|
||||
const rollbackToCommit = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
commitId,
|
||||
folderId,
|
||||
deepRollback,
|
||||
message,
|
||||
environment
|
||||
}: {
|
||||
actor: ActorType;
|
||||
actorId: string;
|
||||
actorOrgId: string;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
projectId: string;
|
||||
commitId: string;
|
||||
folderId: string;
|
||||
deepRollback: boolean;
|
||||
message?: string;
|
||||
environment: string;
|
||||
}) => {
|
||||
const { permission: userPermission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
ForbiddenError.from(userPermission).throwUnlessCan(
|
||||
ProjectPermissionCommitsActions.PerformRollback,
|
||||
ProjectPermissionSub.Commits
|
||||
);
|
||||
|
||||
const latestCommit = await folderCommitService.getLatestCommit({
|
||||
folderId,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!latestCommit) {
|
||||
throw new NotFoundError({ message: "Latest commit not found" });
|
||||
}
|
||||
|
||||
logger.info(`PIT - Attempting to rollback folder ${folderId} from commit ${latestCommit.id} to commit ${commitId}`);
|
||||
|
||||
const targetCommit = await folderCommitService.getCommitById({
|
||||
commitId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
projectId
|
||||
});
|
||||
|
||||
const env = await projectEnvDAL.findOne({
|
||||
projectId,
|
||||
slug: environment
|
||||
});
|
||||
|
||||
if (!targetCommit || targetCommit.folderId !== folderId || targetCommit.envId !== env.id) {
|
||||
throw new NotFoundError({ message: "Target commit not found" });
|
||||
}
|
||||
|
||||
if (!latestCommit || latestCommit.envId !== env.id) {
|
||||
throw new NotFoundError({ message: "Latest commit not found" });
|
||||
}
|
||||
|
||||
if (deepRollback) {
|
||||
await folderCommitService.deepRollbackFolder(commitId, env.id, actorId, actor, projectId, message);
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
const diff = await folderCommitService.compareFolderStates({
|
||||
currentCommitId: latestCommit.id,
|
||||
targetCommitId: commitId
|
||||
});
|
||||
|
||||
const response = await folderCommitService.applyFolderStateDifferences({
|
||||
differences: diff,
|
||||
actorInfo: {
|
||||
actorType: actor,
|
||||
actorId,
|
||||
message: message || "Rollback to previous commit"
|
||||
},
|
||||
folderId,
|
||||
projectId,
|
||||
reconstructNewFolders: deepRollback
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
secretChangesCount: response.secretChangesCount,
|
||||
folderChangesCount: response.folderChangesCount,
|
||||
totalChanges: response.totalChanges
|
||||
};
|
||||
};
|
||||
|
||||
const revertCommit = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
commitId
|
||||
}: {
|
||||
actor: ActorType;
|
||||
actorId: string;
|
||||
actorOrgId: string;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
projectId: string;
|
||||
commitId: string;
|
||||
}) => {
|
||||
const response = await folderCommitService.revertCommitChanges({
|
||||
commitId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
projectId
|
||||
});
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
const getFolderStateAtCommit = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
commitId
|
||||
}: {
|
||||
actor: ActorType;
|
||||
actorId: string;
|
||||
actorOrgId: string;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
projectId: string;
|
||||
commitId: string;
|
||||
}) => {
|
||||
const commit = await folderCommitService.getCommitById({
|
||||
commitId,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!commit) {
|
||||
throw new NotFoundError({ message: `Commit with ID ${commitId} not found` });
|
||||
}
|
||||
|
||||
const response = await folderCommitService.reconstructFolderState(commitId);
|
||||
|
||||
return response.map((item) => {
|
||||
if (item.type === ResourceType.SECRET) {
|
||||
return {
|
||||
...item,
|
||||
secretVersion: Number(item.secretVersion)
|
||||
};
|
||||
}
|
||||
|
||||
if (item.type === ResourceType.FOLDER) {
|
||||
return {
|
||||
...item,
|
||||
folderVersion: Number(item.folderVersion)
|
||||
};
|
||||
}
|
||||
|
||||
return item;
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
getCommitsCount,
|
||||
getCommitsForFolder,
|
||||
getCommitChanges,
|
||||
compareCommitChanges,
|
||||
rollbackToCommit,
|
||||
revertCommit,
|
||||
getFolderStateAtCommit
|
||||
};
|
||||
};
|
@@ -20,6 +20,7 @@ import { EnforcementLevel } from "@app/lib/types";
|
||||
import { triggerWorkflowIntegrationNotification } from "@app/lib/workflow-integrations/trigger-notification";
|
||||
import { TriggerFeature } from "@app/lib/workflow-integrations/types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TMicrosoftTeamsServiceFactory } from "@app/services/microsoft-teams/microsoft-teams-service";
|
||||
@@ -130,6 +131,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
projectMicrosoftTeamsConfigDAL: Pick<TProjectMicrosoftTeamsConfigDALFactory, "getIntegrationDetailsByProject">;
|
||||
microsoftTeamsService: Pick<TMicrosoftTeamsServiceFactory, "sendNotification">;
|
||||
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
|
||||
};
|
||||
|
||||
export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretApprovalRequestServiceFactory>;
|
||||
@@ -161,7 +163,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
projectSlackConfigDAL,
|
||||
resourceMetadataDAL,
|
||||
projectMicrosoftTeamsConfigDAL,
|
||||
microsoftTeamsService
|
||||
microsoftTeamsService,
|
||||
folderCommitService
|
||||
}: TSecretApprovalRequestServiceFactoryDep) => {
|
||||
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
|
||||
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
|
||||
@@ -597,6 +600,10 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
? await fnSecretV2BridgeBulkInsert({
|
||||
tx,
|
||||
folderId,
|
||||
actor: {
|
||||
actorId,
|
||||
type: actor
|
||||
},
|
||||
orgId: actorOrgId,
|
||||
inputSecrets: secretCreationCommits.map((el) => ({
|
||||
tagIds: el?.tags.map(({ id }) => id),
|
||||
@@ -619,13 +626,18 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
secretVersionDAL: secretVersionV2BridgeDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL: secretVersionTagV2BridgeDAL
|
||||
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
|
||||
folderCommitService
|
||||
})
|
||||
: [];
|
||||
const updatedSecrets = secretUpdationCommits.length
|
||||
? await fnSecretV2BridgeBulkUpdate({
|
||||
folderId,
|
||||
orgId: actorOrgId,
|
||||
actor: {
|
||||
actorId,
|
||||
type: actor
|
||||
},
|
||||
tx,
|
||||
inputSecrets: secretUpdationCommits.map((el) => {
|
||||
const encryptedValue =
|
||||
@@ -659,7 +671,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretVersionDAL: secretVersionV2BridgeDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
|
||||
resourceMetadataDAL
|
||||
resourceMetadataDAL,
|
||||
folderCommitService
|
||||
})
|
||||
: [];
|
||||
const deletedSecret = secretDeletionCommits.length
|
||||
@@ -667,10 +680,13 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
projectId,
|
||||
folderId,
|
||||
tx,
|
||||
actorId: "",
|
||||
actorId,
|
||||
actorType: actor,
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
secretQueueService,
|
||||
inputSecrets: secretDeletionCommits.map(({ key }) => ({ secretKey: key, type: SecretType.Shared }))
|
||||
inputSecrets: secretDeletionCommits.map(({ key }) => ({ secretKey: key, type: SecretType.Shared })),
|
||||
folderCommitService,
|
||||
secretVersionDAL: secretVersionV2BridgeDAL
|
||||
})
|
||||
: [];
|
||||
const updatedSecretApproval = await secretApprovalRequestDAL.updateById(
|
||||
|
@@ -10,6 +10,7 @@ import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
@@ -87,6 +88,7 @@ type TSecretReplicationServiceFactoryDep = {
|
||||
|
||||
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
|
||||
};
|
||||
|
||||
export type TSecretReplicationServiceFactory = ReturnType<typeof secretReplicationServiceFactory>;
|
||||
@@ -132,6 +134,7 @@ export const secretReplicationServiceFactory = ({
|
||||
secretVersionV2BridgeDAL,
|
||||
secretV2BridgeDAL,
|
||||
kmsService,
|
||||
folderCommitService,
|
||||
resourceMetadataDAL
|
||||
}: TSecretReplicationServiceFactoryDep) => {
|
||||
const $getReplicatedSecrets = (
|
||||
@@ -419,7 +422,7 @@ export const secretReplicationServiceFactory = ({
|
||||
return {
|
||||
op: operation,
|
||||
requestId: approvalRequestDoc.id,
|
||||
metadata: doc.metadata,
|
||||
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
|
||||
secretMetadata: JSON.stringify(doc.secretMetadata),
|
||||
key: doc.key,
|
||||
encryptedValue: doc.encryptedValue,
|
||||
@@ -446,11 +449,12 @@ export const secretReplicationServiceFactory = ({
|
||||
tx,
|
||||
secretTagDAL,
|
||||
resourceMetadataDAL,
|
||||
folderCommitService,
|
||||
secretVersionTagDAL: secretVersionV2TagBridgeDAL,
|
||||
inputSecrets: locallyCreatedSecrets.map((doc) => {
|
||||
return {
|
||||
type: doc.type,
|
||||
metadata: doc.metadata,
|
||||
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
|
||||
key: doc.key,
|
||||
encryptedValue: doc.encryptedValue,
|
||||
encryptedComment: doc.encryptedComment,
|
||||
@@ -466,6 +470,7 @@ export const secretReplicationServiceFactory = ({
|
||||
orgId,
|
||||
folderId: destinationReplicationFolderId,
|
||||
secretVersionDAL: secretVersionV2BridgeDAL,
|
||||
folderCommitService,
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
tx,
|
||||
resourceMetadataDAL,
|
||||
@@ -479,7 +484,7 @@ export const secretReplicationServiceFactory = ({
|
||||
},
|
||||
data: {
|
||||
type: doc.type,
|
||||
metadata: doc.metadata,
|
||||
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
|
||||
key: doc.key,
|
||||
encryptedValue: doc.encryptedValue as Buffer,
|
||||
encryptedComment: doc.encryptedComment,
|
||||
|
@@ -63,6 +63,7 @@ import { TAppConnectionDALFactory } from "@app/services/app-connection/app-conne
|
||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
@@ -98,7 +99,7 @@ export type TSecretRotationV2ServiceFactoryDep = {
|
||||
TSecretV2BridgeDALFactory,
|
||||
"bulkUpdate" | "insertMany" | "deleteMany" | "upsertSecretReferences" | "find" | "invalidateSecretCacheByProjectId"
|
||||
>;
|
||||
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany">;
|
||||
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
|
||||
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
|
||||
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "deleteTagsToSecretV2" | "find">;
|
||||
@@ -106,6 +107,7 @@ export type TSecretRotationV2ServiceFactoryDep = {
|
||||
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
|
||||
queueService: Pick<TQueueServiceFactory, "queuePg">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
|
||||
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
|
||||
};
|
||||
|
||||
export type TSecretRotationV2ServiceFactory = ReturnType<typeof secretRotationV2ServiceFactory>;
|
||||
@@ -145,6 +147,7 @@ export const secretRotationV2ServiceFactory = ({
|
||||
snapshotService,
|
||||
keyStore,
|
||||
queueService,
|
||||
folderCommitService,
|
||||
appConnectionDAL
|
||||
}: TSecretRotationV2ServiceFactoryDep) => {
|
||||
const $queueSendSecretRotationStatusNotification = async (secretRotation: TSecretRotationV2Raw) => {
|
||||
@@ -538,7 +541,12 @@ export const secretRotationV2ServiceFactory = ({
|
||||
secretVersionDAL: secretVersionV2BridgeDAL,
|
||||
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
|
||||
secretTagDAL,
|
||||
resourceMetadataDAL
|
||||
folderCommitService,
|
||||
resourceMetadataDAL,
|
||||
actor: {
|
||||
type: actor.type,
|
||||
actorId: actor.id
|
||||
}
|
||||
});
|
||||
|
||||
await secretRotationV2DAL.insertSecretMappings(
|
||||
@@ -674,7 +682,12 @@ export const secretRotationV2ServiceFactory = ({
|
||||
secretVersionDAL: secretVersionV2BridgeDAL,
|
||||
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
|
||||
secretTagDAL,
|
||||
resourceMetadataDAL
|
||||
folderCommitService,
|
||||
resourceMetadataDAL,
|
||||
actor: {
|
||||
type: actor.type,
|
||||
actorId: actor.id
|
||||
}
|
||||
});
|
||||
|
||||
secretsMappingUpdated = true;
|
||||
@@ -792,6 +805,9 @@ export const secretRotationV2ServiceFactory = ({
|
||||
projectId,
|
||||
folderId,
|
||||
actorId: actor.id, // not actually used since rotated secrets are shared
|
||||
actorType: actor.type,
|
||||
folderCommitService,
|
||||
secretVersionDAL: secretVersionV2BridgeDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
@@ -935,6 +951,10 @@ export const secretRotationV2ServiceFactory = ({
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
secretVersionDAL: secretVersionV2BridgeDAL,
|
||||
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
|
||||
folderCommitService,
|
||||
actor: {
|
||||
type: ActorType.PLATFORM
|
||||
},
|
||||
secretTagDAL,
|
||||
resourceMetadataDAL
|
||||
});
|
||||
|
@@ -14,6 +14,7 @@ import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { CommitType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
@@ -53,6 +54,7 @@ type TSecretRotationQueueFactoryDep = {
|
||||
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
|
||||
telemetryService: Pick<TTelemetryServiceFactory, "sendPostHogEvents">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
|
||||
};
|
||||
|
||||
// These error should stop the repeatable job and ask user to reconfigure rotation
|
||||
@@ -77,6 +79,7 @@ export const secretRotationQueueFactory = ({
|
||||
telemetryService,
|
||||
secretV2BridgeDAL,
|
||||
secretVersionV2BridgeDAL,
|
||||
folderCommitService,
|
||||
kmsService
|
||||
}: TSecretRotationQueueFactoryDep) => {
|
||||
const addToQueue = async (rotationId: string, interval: number) => {
|
||||
@@ -330,7 +333,7 @@ export const secretRotationQueueFactory = ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
await secretVersionV2BridgeDAL.insertMany(
|
||||
const secretVersions = await secretVersionV2BridgeDAL.insertMany(
|
||||
updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({
|
||||
...el,
|
||||
actorType: ActorType.PLATFORM,
|
||||
@@ -338,6 +341,22 @@ export const secretRotationQueueFactory = ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: ActorType.PLATFORM
|
||||
},
|
||||
message: "Changed by Secret rotation",
|
||||
folderId: secretVersions[0].folderId,
|
||||
changes: secretVersions.map((sv) => ({
|
||||
type: CommitType.ADD,
|
||||
isUpdate: true,
|
||||
secretVersionId: sv.id
|
||||
}))
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(secretRotation.projectId);
|
||||
|
@@ -8,6 +8,7 @@ import { InternalServerError, NotFoundError } from "@app/lib/errors";
|
||||
import { groupBy } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { CommitType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
@@ -51,8 +52,8 @@ type TSecretSnapshotServiceFactoryDep = {
|
||||
snapshotSecretV2BridgeDAL: TSnapshotSecretV2DALFactory;
|
||||
snapshotFolderDAL: TSnapshotFolderDALFactory;
|
||||
secretVersionDAL: Pick<TSecretVersionDALFactory, "insertMany" | "findLatestVersionByFolderId">;
|
||||
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionByFolderId">;
|
||||
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "findLatestVersionByFolderId" | "insertMany">;
|
||||
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionByFolderId" | "findOne">;
|
||||
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "findLatestVersionByFolderId" | "insertMany" | "findOne">;
|
||||
secretDAL: Pick<TSecretDALFactory, "delete" | "insertMany">;
|
||||
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "delete" | "insertMany">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecret" | "saveTagsToSecretV2">;
|
||||
@@ -63,6 +64,7 @@ type TSecretSnapshotServiceFactoryDep = {
|
||||
licenseService: Pick<TLicenseServiceFactory, "isValidLicense">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
||||
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
|
||||
};
|
||||
|
||||
export type TSecretSnapshotServiceFactory = ReturnType<typeof secretSnapshotServiceFactory>;
|
||||
@@ -84,7 +86,8 @@ export const secretSnapshotServiceFactory = ({
|
||||
snapshotSecretV2BridgeDAL,
|
||||
secretVersionV2TagBridgeDAL,
|
||||
kmsService,
|
||||
projectBotService
|
||||
projectBotService,
|
||||
folderCommitService
|
||||
}: TSecretSnapshotServiceFactoryDep) => {
|
||||
const projectSecretSnapshotCount = async ({
|
||||
environment,
|
||||
@@ -403,6 +406,18 @@ export const secretSnapshotServiceFactory = ({
|
||||
.filter((el) => el.isRotatedSecret)
|
||||
.map((el) => el.secretId);
|
||||
|
||||
const deletedSecretsChanges = new Map(); // secretId -> version info
|
||||
const deletedFoldersChanges = new Map(); // folderId -> version info
|
||||
const addedSecretsChanges = new Map(); // secretId -> version info
|
||||
const addedFoldersChanges = new Map(); // folderId -> version info
|
||||
const commitChanges: {
|
||||
type: string;
|
||||
secretVersionId?: string;
|
||||
folderVersionId?: string;
|
||||
isUpdate?: boolean;
|
||||
folderId?: string;
|
||||
}[] = [];
|
||||
|
||||
// this will remove all secrets in current folder except rotated secrets which we ignore
|
||||
const deletedTopLevelSecs = await secretV2BridgeDAL.delete(
|
||||
{
|
||||
@@ -424,7 +439,35 @@ export const secretSnapshotServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
deletedTopLevelSecs.map(async (sec) => {
|
||||
const version = await secretVersionV2BridgeDAL.findOne({ secretId: sec.id, version: sec.version }, tx);
|
||||
deletedSecretsChanges.set(sec.id, {
|
||||
id: sec.id,
|
||||
version: sec.version,
|
||||
// Store the version ID if available from the snapshot
|
||||
versionId: version?.id
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
const deletedTopLevelSecsGroupById = groupBy(deletedTopLevelSecs, (item) => item.id);
|
||||
|
||||
const deletedFoldersData = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
|
||||
|
||||
await Promise.all(
|
||||
deletedFoldersData.map(async (folder) => {
|
||||
const version = await folderVersionDAL.findOne({ folderId: folder.id, version: folder.version }, tx);
|
||||
deletedFoldersChanges.set(folder.id, {
|
||||
id: folder.id,
|
||||
version: folder.version,
|
||||
// Store the version ID if available
|
||||
versionId: version?.id
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// this will remove all secrets and folders on child
|
||||
// due to sql foreign key and link list connection removing the folders removes everything below too
|
||||
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
|
||||
@@ -489,14 +532,21 @@ export const secretSnapshotServiceFactory = ({
|
||||
});
|
||||
await secretTagDAL.saveTagsToSecretV2(secretTagsToBeInsert, tx);
|
||||
const folderVersions = await folderVersionDAL.insertMany(
|
||||
folders.map(({ version, name, id, envId }) => ({
|
||||
folders.map(({ version, name, id, envId, description }) => ({
|
||||
name,
|
||||
version,
|
||||
folderId: id,
|
||||
envId
|
||||
envId,
|
||||
description
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
||||
// Track added folders
|
||||
folderVersions.forEach((fv) => {
|
||||
addedFoldersChanges.set(fv.folderId, fv);
|
||||
});
|
||||
|
||||
const userActorId = actor === ActorType.USER ? actorId : undefined;
|
||||
const identityActorId = actor !== ActorType.USER ? actorId : undefined;
|
||||
const actorType = actor || ActorType.PLATFORM;
|
||||
@@ -511,6 +561,11 @@ export const secretSnapshotServiceFactory = ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
||||
secretVersions.forEach((sv) => {
|
||||
addedSecretsChanges.set(sv.secretId, sv);
|
||||
});
|
||||
|
||||
await secretVersionV2TagBridgeDAL.insertMany(
|
||||
secretVersions.flatMap(({ secretId, id }) =>
|
||||
secretVerTagToBeInsert?.[secretId]?.length
|
||||
@@ -522,6 +577,70 @@ export const secretSnapshotServiceFactory = ({
|
||||
),
|
||||
tx
|
||||
);
|
||||
|
||||
// Compute commit changes
|
||||
// Handle secrets
|
||||
deletedSecretsChanges.forEach((deletedInfo, secretId) => {
|
||||
const addedSecret = addedSecretsChanges.get(secretId);
|
||||
if (addedSecret) {
|
||||
// Secret was deleted and re-added - this is an update only if versions are different
|
||||
if (deletedInfo.versionId !== addedSecret.id) {
|
||||
commitChanges.push({
|
||||
type: CommitType.ADD, // In the commit system, updates are tracked as "add" with isUpdate=true
|
||||
secretVersionId: addedSecret.id,
|
||||
isUpdate: true
|
||||
});
|
||||
}
|
||||
// Remove from addedSecrets since we've handled it
|
||||
addedSecretsChanges.delete(secretId);
|
||||
} else if (deletedInfo.versionId) {
|
||||
// Secret was only deleted
|
||||
commitChanges.push({
|
||||
type: CommitType.DELETE,
|
||||
secretVersionId: deletedInfo.versionId
|
||||
});
|
||||
}
|
||||
});
|
||||
// Add remaining new secrets (not updates)
|
||||
addedSecretsChanges.forEach((addedSecret) => {
|
||||
commitChanges.push({
|
||||
type: CommitType.ADD,
|
||||
secretVersionId: addedSecret.id
|
||||
});
|
||||
});
|
||||
|
||||
// Handle folders
|
||||
deletedFoldersChanges.forEach((deletedInfo, folderId) => {
|
||||
const addedFolder = addedFoldersChanges.get(folderId);
|
||||
if (addedFolder) {
|
||||
// Folder was deleted and re-added - this is an update only if versions are different
|
||||
if (deletedInfo.versionId !== addedFolder.id) {
|
||||
commitChanges.push({
|
||||
type: CommitType.ADD,
|
||||
folderVersionId: addedFolder.id,
|
||||
isUpdate: true
|
||||
});
|
||||
}
|
||||
// Remove from addedFolders since we've handled it
|
||||
addedFoldersChanges.delete(folderId);
|
||||
} else if (deletedInfo.versionId) {
|
||||
// Folder was only deleted
|
||||
commitChanges.push({
|
||||
type: CommitType.DELETE,
|
||||
folderVersionId: deletedInfo.versionId,
|
||||
folderId: deletedInfo.id
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Add remaining new folders (not updates)
|
||||
addedFoldersChanges.forEach((addedFolder) => {
|
||||
commitChanges.push({
|
||||
type: CommitType.ADD,
|
||||
folderVersionId: addedFolder.id
|
||||
});
|
||||
});
|
||||
|
||||
const newSnapshot = await snapshotDAL.create(
|
||||
{
|
||||
folderId: snapshot.folderId,
|
||||
@@ -550,6 +669,22 @@ export const secretSnapshotServiceFactory = ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
if (commitChanges.length > 0) {
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actorType,
|
||||
metadata: {
|
||||
id: userActorId || identityActorId
|
||||
}
|
||||
},
|
||||
message: "Rollback to snapshot",
|
||||
folderId: snapshot.folderId,
|
||||
changes: commitChanges
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return { ...newSnapshot, snapshotSecrets, snapshotFolders };
|
||||
});
|
||||
@@ -609,11 +744,12 @@ export const secretSnapshotServiceFactory = ({
|
||||
});
|
||||
await secretTagDAL.saveTagsToSecret(secretTagsToBeInsert, tx);
|
||||
const folderVersions = await folderVersionDAL.insertMany(
|
||||
folders.map(({ version, name, id, envId }) => ({
|
||||
folders.map(({ version, name, id, envId, description }) => ({
|
||||
name,
|
||||
version,
|
||||
folderId: id,
|
||||
envId
|
||||
envId,
|
||||
description
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
@@ -27,6 +27,7 @@ export const KeyStorePrefixes = {
|
||||
KmsOrgDataKeyCreation: "kms-org-data-key-creation-lock",
|
||||
WaitUntilReadyKmsOrgKeyCreation: "wait-until-ready-kms-org-key-creation-",
|
||||
WaitUntilReadyKmsOrgDataKeyCreation: "wait-until-ready-kms-org-data-key-creation-",
|
||||
FolderTreeCheckpoint: (envId: string) => `folder-tree-checkpoint-${envId}`,
|
||||
|
||||
WaitUntilReadyProjectEnvironmentOperation: (projectId: string) =>
|
||||
`wait-until-ready-project-environments-operation-${projectId}`,
|
||||
|
@@ -626,7 +626,8 @@ export const PROJECTS = {
|
||||
autoCapitalization: "Disable or enable auto-capitalization for the project.",
|
||||
slug: "An optional slug for the project. (must be unique within the organization)",
|
||||
hasDeleteProtection: "Enable or disable delete protection for the project.",
|
||||
secretSharing: "Enable or disable secret sharing for the project."
|
||||
secretSharing: "Enable or disable secret sharing for the project.",
|
||||
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project."
|
||||
},
|
||||
GET_KEY: {
|
||||
workspaceId: "The ID of the project to get the key from."
|
||||
@@ -1112,6 +1113,14 @@ export const DYNAMIC_SECRET_LEASES = {
|
||||
leaseId: "The ID of the dynamic secret lease.",
|
||||
isForced:
|
||||
"A boolean flag to delete the the dynamic secret from Infisical without trying to remove it from external provider. Used when the dynamic secret got modified externally."
|
||||
},
|
||||
KUBERNETES: {
|
||||
CREATE: {
|
||||
config: {
|
||||
namespace:
|
||||
"The Kubernetes namespace to create the lease in. If not specified, the first namespace defined in the configuration will be used."
|
||||
}
|
||||
}
|
||||
}
|
||||
} as const;
|
||||
export const SECRET_TAGS = {
|
||||
@@ -2277,7 +2286,8 @@ export const SecretSyncs = {
|
||||
},
|
||||
GCP: {
|
||||
scope: "The Google project scope that secrets should be synced to.",
|
||||
projectId: "The ID of the Google project secrets should be synced to."
|
||||
projectId: "The ID of the Google project secrets should be synced to.",
|
||||
locationId: 'The ID of the Google project location secrets should be synced to (ie "us-west4").'
|
||||
},
|
||||
DATABRICKS: {
|
||||
scope: "The Databricks secret scope that secrets should be synced to."
|
||||
|
@@ -213,6 +213,12 @@ const envSchema = z
|
||||
GATEWAY_RELAY_AUTH_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
DYNAMIC_SECRET_ALLOW_INTERNAL_IP: zodStrBool.default("false"),
|
||||
DYNAMIC_SECRET_AWS_ACCESS_KEY_ID: zpStr(z.string().optional()).default(
|
||||
process.env.INF_APP_CONNECTION_AWS_ACCESS_KEY_ID
|
||||
),
|
||||
DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY: zpStr(z.string().optional()).default(
|
||||
process.env.INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY
|
||||
),
|
||||
/* ----------------------------------------------------------------------------- */
|
||||
|
||||
/* App Connections ----------------------------------------------------------------------------- */
|
||||
@@ -255,6 +261,10 @@ const envSchema = z
|
||||
DATADOG_SERVICE: zpStr(z.string().optional().default("infisical-core")),
|
||||
DATADOG_HOSTNAME: zpStr(z.string().optional()),
|
||||
|
||||
// PIT
|
||||
PIT_CHECKPOINT_WINDOW: zpStr(z.string().optional().default("2")),
|
||||
PIT_TREE_CHECKPOINT_WINDOW: zpStr(z.string().optional().default("30")),
|
||||
|
||||
/* CORS ----------------------------------------------------------------------------- */
|
||||
CORS_ALLOWED_ORIGINS: zpStr(
|
||||
z
|
||||
|
@@ -149,8 +149,8 @@ const setupProxyServer = async ({
|
||||
protocol = GatewayProxyProtocol.Tcp,
|
||||
httpsAgent
|
||||
}: {
|
||||
targetHost: string;
|
||||
targetPort: number;
|
||||
targetHost?: string;
|
||||
targetPort?: number;
|
||||
relayPort: number;
|
||||
relayHost: string;
|
||||
tlsOptions: TGatewayTlsOptions;
|
||||
@@ -183,27 +183,44 @@ const setupProxyServer = async ({
|
||||
let command: string;
|
||||
|
||||
if (protocol === GatewayProxyProtocol.Http) {
|
||||
const targetUrl = `${targetHost}:${targetPort}`; // note(daniel): targetHost MUST include the scheme (https|http)
|
||||
command = `FORWARD-HTTP ${targetUrl}`;
|
||||
logger.debug(`Using HTTP proxy mode: ${command.trim()}`);
|
||||
if (!targetHost && !targetPort) {
|
||||
command = `FORWARD-HTTP`;
|
||||
logger.debug(`Using HTTP proxy mode, no target URL provided [command=${command.trim()}]`);
|
||||
} else {
|
||||
if (!targetHost || targetPort === undefined) {
|
||||
throw new BadRequestError({
|
||||
message: `Target host and port are required for HTTP proxy mode with custom target`
|
||||
});
|
||||
}
|
||||
|
||||
// extract ca certificate from httpsAgent if present
|
||||
if (httpsAgent && targetHost.startsWith("https://")) {
|
||||
const agentOptions = httpsAgent.options;
|
||||
if (agentOptions && agentOptions.ca) {
|
||||
const caCert = Array.isArray(agentOptions.ca) ? agentOptions.ca.join("\n") : agentOptions.ca;
|
||||
const caB64 = Buffer.from(caCert as string).toString("base64");
|
||||
command += ` ca=${caB64}`;
|
||||
const targetUrl = `${targetHost}:${targetPort}`; // note(daniel): targetHost MUST include the scheme (https|http)
|
||||
command = `FORWARD-HTTP ${targetUrl}`;
|
||||
logger.debug(`Using HTTP proxy mode, custom target URL provided [command=${command.trim()}]`);
|
||||
|
||||
const rejectUnauthorized = agentOptions.rejectUnauthorized !== false;
|
||||
command += ` verify=${rejectUnauthorized}`;
|
||||
// extract ca certificate from httpsAgent if present
|
||||
if (httpsAgent && targetHost.startsWith("https://")) {
|
||||
const agentOptions = httpsAgent.options;
|
||||
if (agentOptions && agentOptions.ca) {
|
||||
const caCert = Array.isArray(agentOptions.ca) ? agentOptions.ca.join("\n") : agentOptions.ca;
|
||||
const caB64 = Buffer.from(caCert as string).toString("base64");
|
||||
command += ` ca=${caB64}`;
|
||||
|
||||
logger.debug(`Using HTTP proxy mode [command=${command.trim()}]`);
|
||||
const rejectUnauthorized = agentOptions.rejectUnauthorized !== false;
|
||||
command += ` verify=${rejectUnauthorized}`;
|
||||
|
||||
logger.debug(`Using HTTP proxy mode, custom target URL provided [command=${command.trim()}]`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
command += "\n";
|
||||
} else if (protocol === GatewayProxyProtocol.Tcp) {
|
||||
if (!targetHost || !targetPort) {
|
||||
throw new BadRequestError({
|
||||
message: `Target host and port are required for TCP proxy mode`
|
||||
});
|
||||
}
|
||||
|
||||
// For TCP mode, send FORWARD-TCP with host:port
|
||||
command = `FORWARD-TCP ${targetHost}:${targetPort}\n`;
|
||||
logger.debug(`Using TCP proxy mode: ${command.trim()}`);
|
||||
|
@@ -10,12 +10,13 @@ export enum GatewayProxyProtocol {
|
||||
}
|
||||
|
||||
export enum GatewayHttpProxyActions {
|
||||
InjectGatewayK8sServiceAccountToken = "inject-k8s-sa-auth-token"
|
||||
InjectGatewayK8sServiceAccountToken = "inject-k8s-sa-auth-token",
|
||||
UseGatewayK8sServiceAccount = "use-k8s-sa"
|
||||
}
|
||||
|
||||
export interface IGatewayProxyOptions {
|
||||
targetHost: string;
|
||||
targetPort: number;
|
||||
targetHost?: string;
|
||||
targetPort?: number;
|
||||
relayHost: string;
|
||||
relayPort: number;
|
||||
tlsOptions: TGatewayTlsOptions;
|
||||
|
@@ -7,13 +7,24 @@ type SanitizationArg = {
|
||||
allowedExpressions?: (arg: string) => boolean;
|
||||
};
|
||||
|
||||
const isValidExpression = (expression: string, dto: SanitizationArg): boolean => {
|
||||
// Allow helper functions (replace, truncate)
|
||||
const allowedHelpers = ["replace", "truncate", "random"];
|
||||
if (allowedHelpers.includes(expression)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check regular allowed expressions
|
||||
return dto?.allowedExpressions?.(expression) || false;
|
||||
};
|
||||
|
||||
export const validateHandlebarTemplate = (templateName: string, template: string, dto: SanitizationArg) => {
|
||||
const parsedAst = handlebars.parse(template);
|
||||
parsedAst.body.forEach((el) => {
|
||||
if (el.type === "ContentStatement") return;
|
||||
if (el.type === "MustacheStatement" && "path" in el) {
|
||||
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
|
||||
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return;
|
||||
if (path.type === "PathExpression" && isValidExpression(path.original, dto)) return;
|
||||
}
|
||||
logger.error(el, "Template sanitization failed");
|
||||
throw new BadRequestError({ message: `Template sanitization failed: ${templateName}` });
|
||||
@@ -26,7 +37,7 @@ export const isValidHandleBarTemplate = (template: string, dto: SanitizationArg)
|
||||
if (el.type === "ContentStatement") return true;
|
||||
if (el.type === "MustacheStatement" && "path" in el) {
|
||||
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
|
||||
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return true;
|
||||
if (path.type === "PathExpression" && isValidExpression(path.original, dto)) return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
@@ -60,6 +60,7 @@ export enum QueueName {
|
||||
ImportSecretsFromExternalSource = "import-secrets-from-external-source",
|
||||
AppConnectionSecretSync = "app-connection-secret-sync",
|
||||
SecretRotationV2 = "secret-rotation-v2",
|
||||
FolderTreeCheckpoint = "folder-tree-checkpoint",
|
||||
InvalidateCache = "invalidate-cache",
|
||||
SecretScanningV2 = "secret-scanning-v2"
|
||||
}
|
||||
@@ -94,6 +95,7 @@ export enum QueueJobs {
|
||||
SecretRotationV2QueueRotations = "secret-rotation-v2-queue-rotations",
|
||||
SecretRotationV2RotateSecrets = "secret-rotation-v2-rotate-secrets",
|
||||
SecretRotationV2SendNotification = "secret-rotation-v2-send-notification",
|
||||
CreateFolderTreeCheckpoint = "create-folder-tree-checkpoint",
|
||||
InvalidateCache = "invalidate-cache",
|
||||
SecretScanningV2FullScan = "secret-scanning-v2-full-scan",
|
||||
SecretScanningV2DiffScan = "secret-scanning-v2-diff-scan",
|
||||
@@ -209,6 +211,12 @@ export type TQueueJobTypes = {
|
||||
name: QueueJobs.ProjectV3Migration;
|
||||
payload: { projectId: string };
|
||||
};
|
||||
[QueueName.FolderTreeCheckpoint]: {
|
||||
name: QueueJobs.CreateFolderTreeCheckpoint;
|
||||
payload: {
|
||||
envId: string;
|
||||
};
|
||||
};
|
||||
[QueueName.ImportSecretsFromExternalSource]: {
|
||||
name: QueueJobs.ImportSecretsFromExternalSource;
|
||||
payload: {
|
||||
|
@@ -155,6 +155,12 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
|
||||
oidc: token?.identityAuth?.oidc
|
||||
});
|
||||
}
|
||||
if (token?.identityAuth?.kubernetes) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
identityId: identity.identityId,
|
||||
kubernetes: token?.identityAuth?.kubernetes
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
case AuthMode.SERVICE_TOKEN: {
|
||||
|
@@ -57,9 +57,12 @@ export const registerServeUI = async (
|
||||
reply.callNotFound();
|
||||
return;
|
||||
}
|
||||
// reference: https://github.com/fastify/fastify-static?tab=readme-ov-file#managing-cache-control-headers
|
||||
// to avoid ui bundle skew on new deployment
|
||||
return reply.sendFile("index.html", { maxAge: 0, immutable: false });
|
||||
|
||||
// This should help avoid caching any chunks (temp fix)
|
||||
void reply.header("Cache-Control", "no-cache, no-store, must-revalidate, private, max-age=0");
|
||||
void reply.header("Pragma", "no-cache");
|
||||
void reply.header("Expires", "0");
|
||||
return reply.sendFile("index.html");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@@ -60,6 +60,7 @@ import { oidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
|
||||
import { oidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { permissionDALFactory } from "@app/ee/services/permission/permission-dal";
|
||||
import { permissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { pitServiceFactory } from "@app/ee/services/pit/pit-service";
|
||||
import { projectTemplateDALFactory } from "@app/ee/services/project-template/project-template-dal";
|
||||
import { projectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
|
||||
import { projectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
@@ -154,6 +155,14 @@ import { externalGroupOrgRoleMappingDALFactory } from "@app/services/external-gr
|
||||
import { externalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
||||
import { externalMigrationQueueFactory } from "@app/services/external-migration/external-migration-queue";
|
||||
import { externalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
||||
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
|
||||
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
|
||||
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
|
||||
import { folderCommitQueueServiceFactory } from "@app/services/folder-commit/folder-commit-queue";
|
||||
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
|
||||
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
|
||||
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
|
||||
import { groupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { groupProjectMembershipRoleDALFactory } from "@app/services/group-project/group-project-membership-role-dal";
|
||||
import { groupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
@@ -583,6 +592,41 @@ export const registerRoutes = async (
|
||||
projectRoleDAL,
|
||||
permissionService
|
||||
});
|
||||
|
||||
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
|
||||
const folderCheckpointDAL = folderCheckpointDALFactory(db);
|
||||
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
|
||||
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
|
||||
const folderCommitDAL = folderCommitDALFactory(db);
|
||||
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
|
||||
const folderCommitQueueService = folderCommitQueueServiceFactory({
|
||||
queueService,
|
||||
folderTreeCheckpointDAL,
|
||||
keyStore,
|
||||
folderTreeCheckpointResourcesDAL,
|
||||
folderCommitDAL,
|
||||
folderDAL
|
||||
});
|
||||
const folderCommitService = folderCommitServiceFactory({
|
||||
folderCommitDAL,
|
||||
folderCommitChangesDAL,
|
||||
folderCheckpointDAL,
|
||||
folderTreeCheckpointDAL,
|
||||
userDAL,
|
||||
identityDAL,
|
||||
folderDAL,
|
||||
folderVersionDAL,
|
||||
secretVersionV2BridgeDAL,
|
||||
projectDAL,
|
||||
folderCheckpointResourcesDAL,
|
||||
secretV2BridgeDAL,
|
||||
folderTreeCheckpointResourcesDAL,
|
||||
folderCommitQueueService,
|
||||
permissionService,
|
||||
kmsService,
|
||||
secretTagDAL,
|
||||
resourceMetadataDAL
|
||||
});
|
||||
const scimService = scimServiceFactory({
|
||||
licenseService,
|
||||
scimDAL,
|
||||
@@ -987,6 +1031,7 @@ export const registerRoutes = async (
|
||||
projectMembershipDAL,
|
||||
projectBotDAL,
|
||||
secretDAL,
|
||||
folderCommitService,
|
||||
secretBlindIndexDAL,
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
@@ -1034,6 +1079,7 @@ export const registerRoutes = async (
|
||||
secretReminderRecipientsDAL,
|
||||
orgService,
|
||||
resourceMetadataDAL,
|
||||
folderCommitService,
|
||||
secretSyncQueue
|
||||
});
|
||||
|
||||
@@ -1110,6 +1156,7 @@ export const registerRoutes = async (
|
||||
snapshotDAL,
|
||||
snapshotFolderDAL,
|
||||
snapshotSecretDAL,
|
||||
folderCommitService,
|
||||
secretVersionDAL,
|
||||
folderVersionDAL,
|
||||
secretTagDAL,
|
||||
@@ -1136,7 +1183,8 @@ export const registerRoutes = async (
|
||||
folderVersionDAL,
|
||||
projectEnvDAL,
|
||||
snapshotService,
|
||||
projectDAL
|
||||
projectDAL,
|
||||
folderCommitService
|
||||
});
|
||||
|
||||
const secretImportService = secretImportServiceFactory({
|
||||
@@ -1161,6 +1209,7 @@ export const registerRoutes = async (
|
||||
const secretV2BridgeService = secretV2BridgeServiceFactory({
|
||||
folderDAL,
|
||||
secretVersionDAL: secretVersionV2BridgeDAL,
|
||||
folderCommitService,
|
||||
secretQueueService,
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
permissionService,
|
||||
@@ -1204,7 +1253,8 @@ export const registerRoutes = async (
|
||||
projectSlackConfigDAL,
|
||||
resourceMetadataDAL,
|
||||
projectMicrosoftTeamsConfigDAL,
|
||||
microsoftTeamsService
|
||||
microsoftTeamsService,
|
||||
folderCommitService
|
||||
});
|
||||
|
||||
const secretService = secretServiceFactory({
|
||||
@@ -1291,7 +1341,8 @@ export const registerRoutes = async (
|
||||
secretV2BridgeDAL,
|
||||
secretVersionV2TagBridgeDAL: secretVersionTagV2BridgeDAL,
|
||||
secretVersionV2BridgeDAL,
|
||||
resourceMetadataDAL
|
||||
resourceMetadataDAL,
|
||||
folderCommitService
|
||||
});
|
||||
|
||||
const secretRotationQueue = secretRotationQueueFactory({
|
||||
@@ -1303,6 +1354,7 @@ export const registerRoutes = async (
|
||||
projectBotService,
|
||||
secretVersionV2BridgeDAL,
|
||||
secretV2BridgeDAL,
|
||||
folderCommitService,
|
||||
kmsService
|
||||
});
|
||||
|
||||
@@ -1454,6 +1506,15 @@ export const registerRoutes = async (
|
||||
permissionService
|
||||
});
|
||||
|
||||
const pitService = pitServiceFactory({
|
||||
folderCommitService,
|
||||
secretService,
|
||||
folderService,
|
||||
permissionService,
|
||||
folderDAL,
|
||||
projectEnvDAL
|
||||
});
|
||||
|
||||
const identityOidcAuthService = identityOidcAuthServiceFactory({
|
||||
identityOidcAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
@@ -1516,7 +1577,9 @@ export const registerRoutes = async (
|
||||
dynamicSecretProviders,
|
||||
folderDAL,
|
||||
licenseService,
|
||||
kmsService
|
||||
kmsService,
|
||||
userDAL,
|
||||
identityDAL
|
||||
});
|
||||
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
|
||||
auditLogDAL,
|
||||
@@ -1595,7 +1658,9 @@ export const registerRoutes = async (
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
queueService,
|
||||
secretV2BridgeService,
|
||||
resourceMetadataDAL
|
||||
resourceMetadataDAL,
|
||||
folderCommitService,
|
||||
folderVersionDAL
|
||||
});
|
||||
|
||||
const migrationService = externalMigrationServiceFactory({
|
||||
@@ -1705,6 +1770,7 @@ export const registerRoutes = async (
|
||||
auditLogService,
|
||||
secretV2BridgeDAL,
|
||||
secretTagDAL,
|
||||
folderCommitService,
|
||||
secretVersionTagV2BridgeDAL,
|
||||
secretVersionV2BridgeDAL,
|
||||
keyStore,
|
||||
@@ -1893,6 +1959,7 @@ export const registerRoutes = async (
|
||||
certificateTemplate: certificateTemplateService,
|
||||
certificateAuthorityCrl: certificateAuthorityCrlService,
|
||||
certificateEst: certificateEstService,
|
||||
pit: pitService,
|
||||
pkiAlert: pkiAlertService,
|
||||
pkiCollection: pkiCollectionService,
|
||||
pkiSubscriber: pkiSubscriberService,
|
||||
@@ -1927,6 +1994,7 @@ export const registerRoutes = async (
|
||||
microsoftTeams: microsoftTeamsService,
|
||||
assumePrivileges: assumePrivilegeService,
|
||||
githubOrgSync: githubOrgSyncConfigService,
|
||||
folderCommit: folderCommitService,
|
||||
secretScanningV2: secretScanningV2Service
|
||||
});
|
||||
|
||||
|
@@ -262,7 +262,8 @@ export const SanitizedProjectSchema = ProjectsSchema.pick({
|
||||
kmsCertificateKeyId: true,
|
||||
auditLogsRetentionDays: true,
|
||||
hasDeleteProtection: true,
|
||||
secretSharing: true
|
||||
secretSharing: true,
|
||||
showSnapshotsLegacy: true
|
||||
});
|
||||
|
||||
export const SanitizedTagSchema = SecretTagsSchema.pick({
|
||||
|
@@ -45,4 +45,37 @@ export const registerGcpConnectionRouter = async (server: FastifyZodProvider) =>
|
||||
return projects;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/secret-manager-project-locations`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
querystring: z.object({
|
||||
projectId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ displayName: z.string(), locationId: z.string() }).array()
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
params: { connectionId },
|
||||
query: { projectId }
|
||||
} = req;
|
||||
|
||||
const locations = await server.services.appConnection.gcp.listSecretManagerProjectLocations(
|
||||
{ connectionId, projectId },
|
||||
req.permission
|
||||
);
|
||||
|
||||
return locations;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -108,17 +108,21 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.nullable()
|
||||
.describe(KUBERNETES_AUTH.ATTACH.kubernetesHost)
|
||||
.refine(
|
||||
(val) =>
|
||||
characterValidator([
|
||||
(val) => {
|
||||
if (val === null) return true;
|
||||
|
||||
return characterValidator([
|
||||
CharacterType.Alphabets,
|
||||
CharacterType.Numbers,
|
||||
CharacterType.Colon,
|
||||
CharacterType.Period,
|
||||
CharacterType.ForwardSlash,
|
||||
CharacterType.Hyphen
|
||||
])(val),
|
||||
])(val);
|
||||
},
|
||||
{
|
||||
message:
|
||||
"Kubernetes host must only contain alphabets, numbers, colons, periods, hyphen, and forward slashes."
|
||||
@@ -164,6 +168,13 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
.describe(KUBERNETES_AUTH.ATTACH.accessTokenNumUsesLimit)
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
if (data.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Api && !data.kubernetesHost) {
|
||||
ctx.addIssue({
|
||||
path: ["kubernetesHost"],
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "When token review mode is set to API, a Kubernetes host must be provided"
|
||||
});
|
||||
}
|
||||
if (data.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway && !data.gatewayId) {
|
||||
ctx.addIssue({
|
||||
path: ["gatewayId"],
|
||||
@@ -171,6 +182,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
message: "When token review mode is set to Gateway, a gateway must be selected"
|
||||
});
|
||||
}
|
||||
|
||||
if (data.accessTokenTTL > data.accessTokenMaxTTL) {
|
||||
ctx.addIssue({
|
||||
path: ["accessTokenTTL"],
|
||||
@@ -203,7 +215,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
type: EventType.ADD_IDENTITY_KUBERNETES_AUTH,
|
||||
metadata: {
|
||||
identityId: identityKubernetesAuth.identityId,
|
||||
kubernetesHost: identityKubernetesAuth.kubernetesHost,
|
||||
kubernetesHost: identityKubernetesAuth.kubernetesHost ?? "",
|
||||
allowedNamespaces: identityKubernetesAuth.allowedNamespaces,
|
||||
allowedNames: identityKubernetesAuth.allowedNames,
|
||||
accessTokenTTL: identityKubernetesAuth.accessTokenTTL,
|
||||
@@ -243,6 +255,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.nullable()
|
||||
.optional()
|
||||
.describe(KUBERNETES_AUTH.UPDATE.kubernetesHost)
|
||||
.refine(
|
||||
@@ -345,7 +358,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
type: EventType.UPDATE_IDENTITY_KUBENETES_AUTH,
|
||||
metadata: {
|
||||
identityId: identityKubernetesAuth.identityId,
|
||||
kubernetesHost: identityKubernetesAuth.kubernetesHost,
|
||||
kubernetesHost: identityKubernetesAuth.kubernetesHost ?? "",
|
||||
allowedNamespaces: identityKubernetesAuth.allowedNamespaces,
|
||||
allowedNames: identityKubernetesAuth.allowedNames,
|
||||
accessTokenTTL: identityKubernetesAuth.accessTokenTTL,
|
||||
|
@@ -376,7 +376,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
.optional()
|
||||
.describe(PROJECTS.UPDATE.slug),
|
||||
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing)
|
||||
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing),
|
||||
showSnapshotsLegacy: z.boolean().optional().describe(PROJECTS.UPDATE.showSnapshotsLegacy)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -397,7 +398,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
autoCapitalization: req.body.autoCapitalization,
|
||||
hasDeleteProtection: req.body.hasDeleteProtection,
|
||||
slug: req.body.slug,
|
||||
secretSharing: req.body.secretSharing
|
||||
secretSharing: req.body.secretSharing,
|
||||
showSnapshotsLegacy: req.body.showSnapshotsLegacy
|
||||
},
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
|
@@ -11,8 +11,10 @@ import { AppConnection } from "../app-connection-enums";
|
||||
import { GcpConnectionMethod } from "./gcp-connection-enums";
|
||||
import {
|
||||
GCPApp,
|
||||
GCPGetProjectLocationsRes,
|
||||
GCPGetProjectsRes,
|
||||
GCPGetServiceRes,
|
||||
GCPLocation,
|
||||
TGcpConnection,
|
||||
TGcpConnectionConfig
|
||||
} from "./gcp-connection-types";
|
||||
@@ -145,6 +147,45 @@ export const getGcpSecretManagerProjects = async (appConnection: TGcpConnection)
|
||||
return projects;
|
||||
};
|
||||
|
||||
export const getGcpSecretManagerProjectLocations = async (projectId: string, appConnection: TGcpConnection) => {
|
||||
const accessToken = await getGcpConnectionAuthToken(appConnection);
|
||||
|
||||
let gcpLocations: GCPLocation[] = [];
|
||||
|
||||
const pageSize = 100;
|
||||
let pageToken: string | undefined;
|
||||
let hasMorePages = true;
|
||||
|
||||
while (hasMorePages) {
|
||||
const params = new URLSearchParams({
|
||||
pageSize: String(pageSize),
|
||||
...(pageToken ? { pageToken } : {})
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const { data } = await request.get<GCPGetProjectLocationsRes>(
|
||||
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${projectId}/locations`,
|
||||
{
|
||||
params,
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
gcpLocations = gcpLocations.concat(data.locations);
|
||||
|
||||
if (!data.nextPageToken) {
|
||||
hasMorePages = false;
|
||||
}
|
||||
|
||||
pageToken = data.nextPageToken;
|
||||
}
|
||||
|
||||
return gcpLocations.sort((a, b) => a.displayName.localeCompare(b.displayName));
|
||||
};
|
||||
|
||||
export const validateGcpConnectionCredentials = async (appConnection: TGcpConnectionConfig) => {
|
||||
// Check if provided service account email suffix matches organization ID.
|
||||
// We do this to mitigate confused deputy attacks in multi-tenant instances
|
||||
|
@@ -1,8 +1,8 @@
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import { getGcpSecretManagerProjects } from "./gcp-connection-fns";
|
||||
import { TGcpConnection } from "./gcp-connection-types";
|
||||
import { getGcpSecretManagerProjectLocations, getGcpSecretManagerProjects } from "./gcp-connection-fns";
|
||||
import { TGcpConnection, TGetGCPProjectLocationsDTO } from "./gcp-connection-types";
|
||||
|
||||
type TGetAppConnectionFunc = (
|
||||
app: AppConnection,
|
||||
@@ -23,7 +23,23 @@ export const gcpConnectionService = (getAppConnection: TGetAppConnectionFunc) =>
|
||||
}
|
||||
};
|
||||
|
||||
const listSecretManagerProjectLocations = async (
|
||||
{ connectionId, projectId }: TGetGCPProjectLocationsDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const appConnection = await getAppConnection(AppConnection.GCP, connectionId, actor);
|
||||
|
||||
try {
|
||||
const locations = await getGcpSecretManagerProjectLocations(projectId, appConnection);
|
||||
|
||||
return locations;
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
listSecretManagerProjects
|
||||
listSecretManagerProjects,
|
||||
listSecretManagerProjectLocations
|
||||
};
|
||||
};
|
||||
|
@@ -38,6 +38,22 @@ export type GCPGetProjectsRes = {
|
||||
nextPageToken?: string;
|
||||
};
|
||||
|
||||
export type GCPLocation = {
|
||||
name: string;
|
||||
locationId: string;
|
||||
displayName: string;
|
||||
};
|
||||
|
||||
export type GCPGetProjectLocationsRes = {
|
||||
locations: GCPLocation[];
|
||||
nextPageToken?: string;
|
||||
};
|
||||
|
||||
export type TGetGCPProjectLocationsDTO = {
|
||||
projectId: string;
|
||||
connectionId: string;
|
||||
};
|
||||
|
||||
export type GCPGetServiceRes = {
|
||||
name: string;
|
||||
parent: string;
|
||||
|
@@ -397,7 +397,7 @@ export const authLoginServiceFactory = ({
|
||||
|
||||
// Check if the user actually has access to the specified organization.
|
||||
const userOrgs = await orgDAL.findAllOrgsByUserId(user.id);
|
||||
const hasOrganizationMembership = userOrgs.some((org) => org.id === organizationId);
|
||||
const hasOrganizationMembership = userOrgs.some((org) => org.id === organizationId && org.userStatus !== "invited");
|
||||
const selectedOrg = await orgDAL.findById(organizationId);
|
||||
|
||||
if (!hasOrganizationMembership) {
|
||||
|
@@ -10,6 +10,7 @@ import { chunkArray } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { CommitType, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { KmsDataKey } from "../kms/kms-types";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
@@ -18,6 +19,7 @@ import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
|
||||
import { TProjectEnvServiceFactory } from "../project-env/project-env-service";
|
||||
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
|
||||
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
|
||||
import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal";
|
||||
import { TSecretTagDALFactory } from "../secret-tag/secret-tag-dal";
|
||||
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
|
||||
import { fnSecretBulkInsert, getAllSecretReferences } from "../secret-v2-bridge/secret-v2-bridge-fns";
|
||||
@@ -42,6 +44,8 @@ export type TImportDataIntoInfisicalDTO = {
|
||||
projectService: Pick<TProjectServiceFactory, "createProject">;
|
||||
projectEnvService: Pick<TProjectEnvServiceFactory, "createEnvironment">;
|
||||
secretV2BridgeService: Pick<TSecretV2BridgeServiceFactory, "createManySecret">;
|
||||
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
|
||||
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "create">;
|
||||
|
||||
input: TImportInfisicalDataCreate;
|
||||
};
|
||||
@@ -507,6 +511,8 @@ export const importDataIntoInfisicalFn = async ({
|
||||
secretVersionTagDAL,
|
||||
folderDAL,
|
||||
resourceMetadataDAL,
|
||||
folderVersionDAL,
|
||||
folderCommitService,
|
||||
input: { data, actor, actorId, actorOrgId, actorAuthMethod }
|
||||
}: TImportDataIntoInfisicalDTO) => {
|
||||
// Import data to infisical
|
||||
@@ -599,6 +605,36 @@ export const importDataIntoInfisicalFn = async ({
|
||||
tx
|
||||
);
|
||||
|
||||
const newFolderVersion = await folderVersionDAL.create(
|
||||
{
|
||||
name: newFolder.name,
|
||||
envId: newFolder.envId,
|
||||
version: newFolder.version,
|
||||
folderId: newFolder.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actor,
|
||||
metadata: {
|
||||
id: actorId
|
||||
}
|
||||
},
|
||||
message: "Changed by external migration",
|
||||
folderId: parentEnv.rootFolderId,
|
||||
changes: [
|
||||
{
|
||||
type: CommitType.ADD,
|
||||
folderVersionId: newFolderVersion.id
|
||||
}
|
||||
]
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
originalToNewFolderId.set(folder.id, {
|
||||
folderId: newFolder.id,
|
||||
projectId: parentEnv.projectId
|
||||
@@ -772,6 +808,7 @@ export const importDataIntoInfisicalFn = async ({
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
folderCommitService,
|
||||
actor: {
|
||||
type: actor,
|
||||
actorId
|
||||
|
@@ -3,6 +3,7 @@ import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
|
||||
import { TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
import { TProjectServiceFactory } from "../project/project-service";
|
||||
@@ -10,6 +11,7 @@ import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
|
||||
import { TProjectEnvServiceFactory } from "../project-env/project-env-service";
|
||||
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
|
||||
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
|
||||
import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal";
|
||||
import { TSecretTagDALFactory } from "../secret-tag/secret-tag-dal";
|
||||
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
|
||||
import { TSecretV2BridgeServiceFactory } from "../secret-v2-bridge/secret-v2-bridge-service";
|
||||
@@ -36,6 +38,8 @@ export type TExternalMigrationQueueFactoryDep = {
|
||||
projectService: Pick<TProjectServiceFactory, "createProject">;
|
||||
projectEnvService: Pick<TProjectEnvServiceFactory, "createEnvironment">;
|
||||
secretV2BridgeService: Pick<TSecretV2BridgeServiceFactory, "createManySecret">;
|
||||
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
|
||||
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "create">;
|
||||
|
||||
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
|
||||
};
|
||||
@@ -56,6 +60,8 @@ export const externalMigrationQueueFactory = ({
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL,
|
||||
folderDAL,
|
||||
folderCommitService,
|
||||
folderVersionDAL,
|
||||
resourceMetadataDAL
|
||||
}: TExternalMigrationQueueFactoryDep) => {
|
||||
const startImport = async (dto: {
|
||||
@@ -114,6 +120,8 @@ export const externalMigrationQueueFactory = ({
|
||||
projectService,
|
||||
projectEnvService,
|
||||
secretV2BridgeService,
|
||||
folderCommitService,
|
||||
folderVersionDAL,
|
||||
resourceMetadataDAL
|
||||
});
|
||||
|
||||
|
@@ -0,0 +1,118 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import {
|
||||
TableName,
|
||||
TFolderCheckpointResources,
|
||||
TFolderCheckpoints,
|
||||
TSecretFolderVersions,
|
||||
TSecretVersionsV2
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
export type TFolderCheckpointResourcesDALFactory = ReturnType<typeof folderCheckpointResourcesDALFactory>;
|
||||
|
||||
export type ResourceWithCheckpointInfo = TFolderCheckpointResources & {
|
||||
folderCommitId: string;
|
||||
};
|
||||
|
||||
export const folderCheckpointResourcesDALFactory = (db: TDbClient) => {
|
||||
const folderCheckpointResourcesOrm = ormify(db, TableName.FolderCheckpointResources);
|
||||
|
||||
const findByCheckpointId = async (
|
||||
folderCheckpointId: string,
|
||||
tx?: Knex
|
||||
): Promise<
|
||||
(TFolderCheckpointResources & {
|
||||
referencedSecretId?: string;
|
||||
referencedFolderId?: string;
|
||||
folderName?: string;
|
||||
folderVersion?: string;
|
||||
secretKey?: string;
|
||||
secretVersion?: string;
|
||||
})[]
|
||||
> => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())<TFolderCheckpointResources>(TableName.FolderCheckpointResources)
|
||||
.where({ folderCheckpointId })
|
||||
.leftJoin<TSecretVersionsV2>(
|
||||
TableName.SecretVersionV2,
|
||||
`${TableName.FolderCheckpointResources}.secretVersionId`,
|
||||
`${TableName.SecretVersionV2}.id`
|
||||
)
|
||||
.leftJoin<TSecretFolderVersions>(
|
||||
TableName.SecretFolderVersion,
|
||||
`${TableName.FolderCheckpointResources}.folderVersionId`,
|
||||
`${TableName.SecretFolderVersion}.id`
|
||||
)
|
||||
.select(selectAllTableCols(TableName.FolderCheckpointResources))
|
||||
.select(
|
||||
db.ref("secretId").withSchema(TableName.SecretVersionV2).as("referencedSecretId"),
|
||||
db.ref("folderId").withSchema(TableName.SecretFolderVersion).as("referencedFolderId"),
|
||||
db.ref("name").withSchema(TableName.SecretFolderVersion).as("folderName"),
|
||||
db.ref("version").withSchema(TableName.SecretFolderVersion).as("folderVersion"),
|
||||
db.ref("key").withSchema(TableName.SecretVersionV2).as("secretKey"),
|
||||
db.ref("version").withSchema(TableName.SecretVersionV2).as("secretVersion")
|
||||
);
|
||||
return docs.map((doc) => ({
|
||||
...doc,
|
||||
folderVersion: doc.folderVersion?.toString(),
|
||||
secretVersion: doc.secretVersion?.toString()
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByCheckpointId" });
|
||||
}
|
||||
};
|
||||
|
||||
const findBySecretVersionId = async (secretVersionId: string, tx?: Knex): Promise<ResourceWithCheckpointInfo[]> => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())<
|
||||
TFolderCheckpointResources & Pick<TFolderCheckpoints, "folderCommitId" | "createdAt">
|
||||
>(TableName.FolderCheckpointResources)
|
||||
.where({ secretVersionId })
|
||||
.select(selectAllTableCols(TableName.FolderCheckpointResources))
|
||||
.join(
|
||||
TableName.FolderCheckpoint,
|
||||
`${TableName.FolderCheckpointResources}.folderCheckpointId`,
|
||||
`${TableName.FolderCheckpoint}.id`
|
||||
)
|
||||
.select(
|
||||
db.ref("folderCommitId").withSchema(TableName.FolderCheckpoint),
|
||||
db.ref("createdAt").withSchema(TableName.FolderCheckpoint)
|
||||
);
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindBySecretVersionId" });
|
||||
}
|
||||
};
|
||||
|
||||
const findByFolderVersionId = async (folderVersionId: string, tx?: Knex): Promise<ResourceWithCheckpointInfo[]> => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())<
|
||||
TFolderCheckpointResources & Pick<TFolderCheckpoints, "folderCommitId" | "createdAt">
|
||||
>(TableName.FolderCheckpointResources)
|
||||
.where({ folderVersionId })
|
||||
.select(selectAllTableCols(TableName.FolderCheckpointResources))
|
||||
.join(
|
||||
TableName.FolderCheckpoint,
|
||||
`${TableName.FolderCheckpointResources}.folderCheckpointId`,
|
||||
`${TableName.FolderCheckpoint}.id`
|
||||
)
|
||||
.select(
|
||||
db.ref("folderCommitId").withSchema(TableName.FolderCheckpoint),
|
||||
db.ref("createdAt").withSchema(TableName.FolderCheckpoint)
|
||||
);
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByFolderVersionId" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...folderCheckpointResourcesOrm,
|
||||
findByCheckpointId,
|
||||
findBySecretVersionId,
|
||||
findByFolderVersionId
|
||||
};
|
||||
};
|
129
backend/src/services/folder-checkpoint/folder-checkpoint-dal.ts
Normal file
129
backend/src/services/folder-checkpoint/folder-checkpoint-dal.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TFolderCheckpoints, TFolderCommits } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
export type TFolderCheckpointDALFactory = ReturnType<typeof folderCheckpointDALFactory>;
|
||||
|
||||
type CheckpointWithCommitInfo = TFolderCheckpoints & {
|
||||
actorMetadata: unknown;
|
||||
actorType: string;
|
||||
message?: string | null;
|
||||
commitDate: Date;
|
||||
folderId: string;
|
||||
};
|
||||
|
||||
export const folderCheckpointDALFactory = (db: TDbClient) => {
|
||||
const folderCheckpointOrm = ormify(db, TableName.FolderCheckpoint);
|
||||
|
||||
const findByCommitId = async (folderCommitId: string, tx?: Knex): Promise<TFolderCheckpoints | undefined> => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())<TFolderCheckpoints>(TableName.FolderCheckpoint)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter({ folderCommitId }, TableName.FolderCheckpoint))
|
||||
.select(selectAllTableCols(TableName.FolderCheckpoint))
|
||||
.first();
|
||||
return doc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByCommitId" });
|
||||
}
|
||||
};
|
||||
|
||||
const findByFolderId = async (folderId: string, limit?: number, tx?: Knex): Promise<CheckpointWithCommitInfo[]> => {
|
||||
try {
|
||||
let query = (tx || db.replicaNode())(TableName.FolderCheckpoint)
|
||||
.join<TFolderCommits>(
|
||||
TableName.FolderCommit,
|
||||
`${TableName.FolderCheckpoint}.folderCommitId`,
|
||||
`${TableName.FolderCommit}.id`
|
||||
)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter({ folderId }, TableName.FolderCommit))
|
||||
.select(selectAllTableCols(TableName.FolderCheckpoint))
|
||||
.select(
|
||||
db.ref("actorMetadata").withSchema(TableName.FolderCommit),
|
||||
db.ref("actorType").withSchema(TableName.FolderCommit),
|
||||
db.ref("message").withSchema(TableName.FolderCommit),
|
||||
db.ref("createdAt").withSchema(TableName.FolderCommit).as("commitDate"),
|
||||
db.ref("folderId").withSchema(TableName.FolderCommit)
|
||||
)
|
||||
.orderBy(`${TableName.FolderCheckpoint}.createdAt`, "desc");
|
||||
|
||||
if (limit !== undefined) {
|
||||
query = query.limit(limit);
|
||||
}
|
||||
|
||||
return await query;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByFolderId" });
|
||||
}
|
||||
};
|
||||
|
||||
const findLatestByFolderId = async (folderId: string, tx?: Knex): Promise<CheckpointWithCommitInfo | undefined> => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.FolderCheckpoint)
|
||||
.join<TFolderCommits>(
|
||||
TableName.FolderCommit,
|
||||
`${TableName.FolderCheckpoint}.folderCommitId`,
|
||||
`${TableName.FolderCommit}.id`
|
||||
)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter({ folderId }, TableName.FolderCommit))
|
||||
.select(selectAllTableCols(TableName.FolderCheckpoint))
|
||||
.select(
|
||||
db.ref("actorMetadata").withSchema(TableName.FolderCommit),
|
||||
db.ref("actorType").withSchema(TableName.FolderCommit),
|
||||
db.ref("message").withSchema(TableName.FolderCommit),
|
||||
db.ref("createdAt").withSchema(TableName.FolderCommit).as("commitDate"),
|
||||
db.ref("folderId").withSchema(TableName.FolderCommit)
|
||||
)
|
||||
.orderBy(`${TableName.FolderCheckpoint}.createdAt`, "desc")
|
||||
.first();
|
||||
return doc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindLatestByFolderId" });
|
||||
}
|
||||
};
|
||||
|
||||
const findNearestCheckpoint = async (
|
||||
folderCommitId: bigint,
|
||||
folderId: string,
|
||||
tx?: Knex
|
||||
): Promise<(CheckpointWithCommitInfo & { commitId: bigint }) | undefined> => {
|
||||
try {
|
||||
// Get the checkpoint with the highest commitId that's still less than or equal to our commit
|
||||
const nearestCheckpoint = await (tx || db.replicaNode())(TableName.FolderCheckpoint)
|
||||
.join<TFolderCommits>(
|
||||
TableName.FolderCommit,
|
||||
`${TableName.FolderCheckpoint}.folderCommitId`,
|
||||
`${TableName.FolderCommit}.id`
|
||||
)
|
||||
.where(`${TableName.FolderCommit}.folderId`, "=", folderId)
|
||||
.where(`${TableName.FolderCommit}.commitId`, "<=", folderCommitId.toString())
|
||||
.select(selectAllTableCols(TableName.FolderCheckpoint))
|
||||
.select(
|
||||
db.ref("actorMetadata").withSchema(TableName.FolderCommit),
|
||||
db.ref("actorType").withSchema(TableName.FolderCommit),
|
||||
db.ref("message").withSchema(TableName.FolderCommit),
|
||||
db.ref("commitId").withSchema(TableName.FolderCommit),
|
||||
db.ref("createdAt").withSchema(TableName.FolderCommit).as("commitDate"),
|
||||
db.ref("folderId").withSchema(TableName.FolderCommit)
|
||||
)
|
||||
.orderBy(`${TableName.FolderCommit}.commitId`, "desc")
|
||||
.first();
|
||||
return nearestCheckpoint;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindNearestCheckpoint" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...folderCheckpointOrm,
|
||||
findByCommitId,
|
||||
findByFolderId,
|
||||
findLatestByFolderId,
|
||||
findNearestCheckpoint
|
||||
};
|
||||
};
|
@@ -0,0 +1,233 @@
|
||||
/* eslint-disable @typescript-eslint/no-misused-promises */
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import {
|
||||
TableName,
|
||||
TFolderCommitChanges,
|
||||
TFolderCommits,
|
||||
TProjectEnvironments,
|
||||
TSecretFolderVersions,
|
||||
TSecretVersionsV2
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
export type TFolderCommitChangesDALFactory = ReturnType<typeof folderCommitChangesDALFactory>;
|
||||
|
||||
// Base type with common fields
|
||||
type BaseCommitChangeInfo = TFolderCommitChanges & {
|
||||
actorMetadata: unknown;
|
||||
actorType: string;
|
||||
message?: string | null;
|
||||
folderId: string;
|
||||
createdAt: Date;
|
||||
};
|
||||
|
||||
// Secret-specific change
|
||||
export type SecretCommitChange = BaseCommitChangeInfo & {
|
||||
resourceType: "secret";
|
||||
secretKey: string;
|
||||
changeType: string;
|
||||
secretVersionId?: string | null;
|
||||
secretVersion: string;
|
||||
secretId: string;
|
||||
versions?: {
|
||||
secretKey: string;
|
||||
secretComment: string;
|
||||
skipMultilineEncoding?: boolean | null;
|
||||
secretReminderRepeatDays?: number | null;
|
||||
secretReminderNote?: string | null;
|
||||
metadata?: unknown;
|
||||
tags?: string[] | null;
|
||||
secretReminderRecipients?: string[] | null;
|
||||
secretValue: string;
|
||||
}[];
|
||||
};
|
||||
|
||||
// Folder-specific change
|
||||
export type FolderCommitChange = BaseCommitChangeInfo & {
|
||||
resourceType: "folder";
|
||||
folderName: string;
|
||||
folderVersion: string;
|
||||
folderChangeId: string;
|
||||
versions?: {
|
||||
version: string;
|
||||
name?: string;
|
||||
}[];
|
||||
};
|
||||
|
||||
// Discriminated union
|
||||
export type CommitChangeWithCommitInfo = SecretCommitChange | FolderCommitChange;
|
||||
|
||||
// Type guards
|
||||
export const isSecretCommitChange = (change: CommitChangeWithCommitInfo): change is SecretCommitChange =>
|
||||
change.resourceType === "secret";
|
||||
|
||||
export const isFolderCommitChange = (change: CommitChangeWithCommitInfo): change is FolderCommitChange =>
|
||||
change.resourceType === "folder";
|
||||
|
||||
export const folderCommitChangesDALFactory = (db: TDbClient) => {
|
||||
const folderCommitChangesOrm = ormify(db, TableName.FolderCommitChanges);
|
||||
|
||||
const findByCommitId = async (
|
||||
folderCommitId: string,
|
||||
projectId: string,
|
||||
tx?: Knex
|
||||
): Promise<CommitChangeWithCommitInfo[]> => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())<TFolderCommitChanges>(TableName.FolderCommitChanges)
|
||||
.where(buildFindFilter({ folderCommitId }, TableName.FolderCommitChanges))
|
||||
.leftJoin<TFolderCommits>(
|
||||
TableName.FolderCommit,
|
||||
`${TableName.FolderCommitChanges}.folderCommitId`,
|
||||
`${TableName.FolderCommit}.id`
|
||||
)
|
||||
.leftJoin<TSecretVersionsV2>(
|
||||
TableName.SecretVersionV2,
|
||||
`${TableName.FolderCommitChanges}.secretVersionId`,
|
||||
`${TableName.SecretVersionV2}.id`
|
||||
)
|
||||
.leftJoin<TSecretFolderVersions>(
|
||||
TableName.SecretFolderVersion,
|
||||
`${TableName.FolderCommitChanges}.folderVersionId`,
|
||||
`${TableName.SecretFolderVersion}.id`
|
||||
)
|
||||
.leftJoin<TProjectEnvironments>(
|
||||
TableName.Environment,
|
||||
`${TableName.FolderCommit}.envId`,
|
||||
`${TableName.Environment}.id`
|
||||
)
|
||||
.where((qb) => {
|
||||
if (projectId) {
|
||||
void qb.where(`${TableName.Environment}.projectId`, "=", projectId);
|
||||
}
|
||||
})
|
||||
.select(selectAllTableCols(TableName.FolderCommitChanges))
|
||||
.select(
|
||||
db.ref("name").withSchema(TableName.SecretFolderVersion).as("folderName"),
|
||||
db.ref("folderId").withSchema(TableName.SecretFolderVersion).as("folderChangeId"),
|
||||
db.ref("version").withSchema(TableName.SecretFolderVersion).as("folderVersion"),
|
||||
db.ref("key").withSchema(TableName.SecretVersionV2).as("secretKey"),
|
||||
db.ref("version").withSchema(TableName.SecretVersionV2).as("secretVersion"),
|
||||
db.ref("secretId").withSchema(TableName.SecretVersionV2),
|
||||
db.ref("actorMetadata").withSchema(TableName.FolderCommit),
|
||||
db.ref("actorType").withSchema(TableName.FolderCommit),
|
||||
db.ref("message").withSchema(TableName.FolderCommit),
|
||||
db.ref("createdAt").withSchema(TableName.FolderCommit),
|
||||
db.ref("folderId").withSchema(TableName.FolderCommit)
|
||||
);
|
||||
|
||||
return docs.map((doc) => {
|
||||
// Determine if this is a secret or folder change based on populated fields
|
||||
if (doc.secretKey && doc.secretVersion !== null && doc.secretId) {
|
||||
return {
|
||||
...doc,
|
||||
resourceType: "secret",
|
||||
secretKey: doc.secretKey,
|
||||
secretVersion: doc.secretVersion.toString(),
|
||||
secretId: doc.secretId
|
||||
} as SecretCommitChange;
|
||||
}
|
||||
return {
|
||||
...doc,
|
||||
resourceType: "folder",
|
||||
folderName: doc.folderName,
|
||||
folderVersion: doc.folderVersion.toString(),
|
||||
folderChangeId: doc.folderChangeId
|
||||
} as FolderCommitChange;
|
||||
});
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByCommitId" });
|
||||
}
|
||||
};
|
||||
|
||||
const findBySecretVersionId = async (secretVersionId: string, tx?: Knex): Promise<SecretCommitChange[]> => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())<
|
||||
TFolderCommitChanges &
|
||||
Pick<TFolderCommits, "actorMetadata" | "actorType" | "message" | "createdAt" | "folderId">
|
||||
>(TableName.FolderCommitChanges)
|
||||
.where(buildFindFilter({ secretVersionId }, TableName.FolderCommitChanges))
|
||||
.select(selectAllTableCols(TableName.FolderCommitChanges))
|
||||
.join(TableName.FolderCommit, `${TableName.FolderCommitChanges}.folderCommitId`, `${TableName.FolderCommit}.id`)
|
||||
.leftJoin<TSecretVersionsV2>(
|
||||
TableName.SecretVersionV2,
|
||||
`${TableName.FolderCommitChanges}.secretVersionId`,
|
||||
`${TableName.SecretVersionV2}.id`
|
||||
)
|
||||
.select(
|
||||
db.ref("actorMetadata").withSchema(TableName.FolderCommit),
|
||||
db.ref("actorType").withSchema(TableName.FolderCommit),
|
||||
db.ref("message").withSchema(TableName.FolderCommit),
|
||||
db.ref("createdAt").withSchema(TableName.FolderCommit),
|
||||
db.ref("folderId").withSchema(TableName.FolderCommit),
|
||||
db.ref("key").withSchema(TableName.SecretVersionV2).as("secretKey"),
|
||||
db.ref("version").withSchema(TableName.SecretVersionV2).as("secretVersion"),
|
||||
db.ref("secretId").withSchema(TableName.SecretVersionV2)
|
||||
);
|
||||
|
||||
return docs
|
||||
.filter((doc) => doc.secretKey && doc.secretVersion !== null && doc.secretId)
|
||||
.map(
|
||||
(doc): SecretCommitChange => ({
|
||||
...doc,
|
||||
resourceType: "secret",
|
||||
secretKey: doc.secretKey,
|
||||
secretVersion: doc.secretVersion.toString(),
|
||||
secretId: doc.secretId
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindBySecretVersionId" });
|
||||
}
|
||||
};
|
||||
|
||||
const findByFolderVersionId = async (folderVersionId: string, tx?: Knex): Promise<FolderCommitChange[]> => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())<
|
||||
TFolderCommitChanges &
|
||||
Pick<TFolderCommits, "actorMetadata" | "actorType" | "message" | "createdAt" | "folderId">
|
||||
>(TableName.FolderCommitChanges)
|
||||
.where(buildFindFilter({ folderVersionId }, TableName.FolderCommitChanges))
|
||||
.select(selectAllTableCols(TableName.FolderCommitChanges))
|
||||
.join(TableName.FolderCommit, `${TableName.FolderCommitChanges}.folderCommitId`, `${TableName.FolderCommit}.id`)
|
||||
.leftJoin<TSecretFolderVersions>(
|
||||
TableName.SecretFolderVersion,
|
||||
`${TableName.FolderCommitChanges}.folderVersionId`,
|
||||
`${TableName.SecretFolderVersion}.id`
|
||||
)
|
||||
.select(
|
||||
db.ref("actorMetadata").withSchema(TableName.FolderCommit),
|
||||
db.ref("actorType").withSchema(TableName.FolderCommit),
|
||||
db.ref("message").withSchema(TableName.FolderCommit),
|
||||
db.ref("createdAt").withSchema(TableName.FolderCommit),
|
||||
db.ref("folderId").withSchema(TableName.FolderCommit),
|
||||
db.ref("name").withSchema(TableName.SecretFolderVersion).as("folderName"),
|
||||
db.ref("folderId").withSchema(TableName.SecretFolderVersion).as("folderChangeId"),
|
||||
db.ref("version").withSchema(TableName.SecretFolderVersion).as("folderVersion")
|
||||
);
|
||||
|
||||
return docs
|
||||
.filter((doc) => doc.folderName && doc.folderVersion !== null && doc.folderChangeId)
|
||||
.map(
|
||||
(doc): FolderCommitChange => ({
|
||||
...doc,
|
||||
resourceType: "folder",
|
||||
folderName: doc.folderName,
|
||||
folderVersion: doc.folderVersion!.toString(),
|
||||
folderChangeId: doc.folderChangeId
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByFolderVersionId" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...folderCommitChangesOrm,
|
||||
findByCommitId,
|
||||
findBySecretVersionId,
|
||||
findByFolderVersionId
|
||||
};
|
||||
};
|
513
backend/src/services/folder-commit/folder-commit-dal.ts
Normal file
513
backend/src/services/folder-commit/folder-commit-dal.ts
Normal file
@@ -0,0 +1,513 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import {
|
||||
TableName,
|
||||
TFolderCommitChanges,
|
||||
TFolderCommits,
|
||||
TProjectEnvironments,
|
||||
TSecretFolderVersions,
|
||||
TSecretVersionsV2
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError, NotFoundError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
export type TFolderCommitDALFactory = ReturnType<typeof folderCommitDALFactory>;
|
||||
|
||||
export const folderCommitDALFactory = (db: TDbClient) => {
|
||||
const folderCommitOrm = ormify(db, TableName.FolderCommit);
|
||||
const { delete: deleteOp, deleteById, ...restOfOrm } = folderCommitOrm;
|
||||
|
||||
const findByFolderId = async (folderId: string, tx?: Knex): Promise<TFolderCommits[]> => {
|
||||
try {
|
||||
const trx = tx || db.replicaNode();
|
||||
|
||||
// First, get all folder commits
|
||||
const folderCommits = await trx(TableName.FolderCommit)
|
||||
.where({ folderId })
|
||||
.select("*")
|
||||
.orderBy("createdAt", "desc");
|
||||
|
||||
if (folderCommits.length === 0) return [];
|
||||
|
||||
// Get all commit IDs
|
||||
const commitIds = folderCommits.map((commit) => commit.id);
|
||||
|
||||
// Then get all related changes
|
||||
const changes = await trx(TableName.FolderCommitChanges).whereIn("folderCommitId", commitIds).select("*");
|
||||
|
||||
const changesMap = changes.reduce(
|
||||
(acc, change) => {
|
||||
const { folderCommitId } = change;
|
||||
if (!acc[folderCommitId]) acc[folderCommitId] = [];
|
||||
acc[folderCommitId].push(change);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, TFolderCommitChanges[]>
|
||||
);
|
||||
|
||||
return folderCommits.map((commit) => ({
|
||||
...commit,
|
||||
changes: changesMap[commit.id] || []
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByFolderId" });
|
||||
}
|
||||
};
|
||||
|
||||
const findLatestCommit = async (
|
||||
folderId: string,
|
||||
projectId?: string,
|
||||
tx?: Knex
|
||||
): Promise<TFolderCommits | undefined> => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.where({ folderId })
|
||||
.leftJoin(TableName.Environment, `${TableName.FolderCommit}.envId`, `${TableName.Environment}.id`)
|
||||
.where((qb) => {
|
||||
if (projectId) {
|
||||
void qb.where(`${TableName.Environment}.projectId`, "=", projectId);
|
||||
}
|
||||
})
|
||||
.select(selectAllTableCols(TableName.FolderCommit))
|
||||
.orderBy("commitId", "desc")
|
||||
.first();
|
||||
return doc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindLatestCommit" });
|
||||
}
|
||||
};
|
||||
|
||||
const findLatestCommitByFolderIds = async (folderIds: string[], tx?: Knex): Promise<TFolderCommits[] | undefined> => {
|
||||
try {
|
||||
// First get max commitId for each folderId
|
||||
const maxCommitIdSubquery = (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.select("folderId")
|
||||
.max("commitId as maxCommitId")
|
||||
.whereIn("folderId", folderIds)
|
||||
.groupBy("folderId");
|
||||
|
||||
// Join with main table to get complete records for each max commitId
|
||||
const docs = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.select(selectAllTableCols(TableName.FolderCommit))
|
||||
// eslint-disable-next-line func-names
|
||||
.join<TFolderCommits>(maxCommitIdSubquery.as("latest"), function () {
|
||||
this.on(`${TableName.FolderCommit}.folderId`, "=", "latest.folderId").andOn(
|
||||
`${TableName.FolderCommit}.commitId`,
|
||||
"=",
|
||||
"latest.maxCommitId"
|
||||
);
|
||||
});
|
||||
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindLatestCommitByFolderIds" });
|
||||
}
|
||||
};
|
||||
|
||||
const findLatestEnvCommit = async (envId: string, tx?: Knex): Promise<TFolderCommits | undefined> => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.where(`${TableName.FolderCommit}.envId`, "=", envId)
|
||||
.select(selectAllTableCols(TableName.FolderCommit))
|
||||
.orderBy("commitId", "desc")
|
||||
.first();
|
||||
return doc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindLatestCommit" });
|
||||
}
|
||||
};
|
||||
|
||||
const findMultipleLatestCommits = async (folderIds: string[], tx?: Knex): Promise<TFolderCommits[]> => {
|
||||
try {
|
||||
const knexInstance = tx || db.replicaNode();
|
||||
|
||||
// Get the latest commitId for each folderId
|
||||
const subquery = knexInstance(TableName.FolderCommit)
|
||||
.whereIn("folderId", folderIds)
|
||||
.groupBy("folderId")
|
||||
.select("folderId")
|
||||
.max("commitId as maxCommitId");
|
||||
|
||||
// Then fetch the complete rows matching those latest commits
|
||||
const docs = await knexInstance(TableName.FolderCommit)
|
||||
// eslint-disable-next-line func-names
|
||||
.innerJoin<TFolderCommits>(subquery.as("latest"), function () {
|
||||
this.on(`${TableName.FolderCommit}.folderId`, "=", "latest.folderId").andOn(
|
||||
`${TableName.FolderCommit}.commitId`,
|
||||
"=",
|
||||
"latest.maxCommitId"
|
||||
);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.FolderCommit));
|
||||
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindMultipleLatestCommits" });
|
||||
}
|
||||
};
|
||||
|
||||
const getNumberOfCommitsSince = async (folderId: string, folderCommitId: string, tx?: Knex): Promise<number> => {
|
||||
try {
|
||||
const referencedCommit = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.where({ id: folderCommitId })
|
||||
.select("commitId")
|
||||
.first();
|
||||
|
||||
if (referencedCommit?.commitId) {
|
||||
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.where({ folderId })
|
||||
.where("commitId", ">", referencedCommit.commitId)
|
||||
.count();
|
||||
return Number(doc?.[0].count);
|
||||
}
|
||||
return 0;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "getNumberOfCommitsSince" });
|
||||
}
|
||||
};
|
||||
|
||||
const getEnvNumberOfCommitsSince = async (envId: string, folderCommitId: string, tx?: Knex): Promise<number> => {
|
||||
try {
|
||||
const referencedCommit = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.where({ id: folderCommitId })
|
||||
.select("commitId")
|
||||
.first();
|
||||
|
||||
if (referencedCommit?.commitId) {
|
||||
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.where(`${TableName.FolderCommit}.envId`, "=", envId)
|
||||
.where("commitId", ">", referencedCommit.commitId)
|
||||
.count();
|
||||
return Number(doc?.[0].count);
|
||||
}
|
||||
return 0;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "getNumberOfCommitsSince" });
|
||||
}
|
||||
};
|
||||
|
||||
const findCommitsToRecreate = async (
|
||||
folderId: string,
|
||||
targetCommitNumber: bigint,
|
||||
checkpointCommitNumber: bigint,
|
||||
tx?: Knex
|
||||
): Promise<
|
||||
(TFolderCommits & {
|
||||
changes: (TFolderCommitChanges & {
|
||||
referencedSecretId?: string;
|
||||
referencedFolderId?: string;
|
||||
folderName?: string;
|
||||
folderVersion?: string;
|
||||
secretKey?: string;
|
||||
secretVersion?: string;
|
||||
})[];
|
||||
})[]
|
||||
> => {
|
||||
try {
|
||||
// First get all the commits in the range
|
||||
const commits = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter({ folderId }, TableName.FolderCommit))
|
||||
.andWhere(`${TableName.FolderCommit}.commitId`, ">", checkpointCommitNumber.toString())
|
||||
.andWhere(`${TableName.FolderCommit}.commitId`, "<=", targetCommitNumber.toString())
|
||||
.select(selectAllTableCols(TableName.FolderCommit))
|
||||
.orderBy(`${TableName.FolderCommit}.commitId`, "asc");
|
||||
|
||||
// If no commits found, return empty array
|
||||
if (!commits.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Get all the commit IDs
|
||||
const commitIds = commits.map((commit) => commit.id);
|
||||
|
||||
// Get all changes for these commits in a single query
|
||||
const allChanges = await (tx || db.replicaNode())(TableName.FolderCommitChanges)
|
||||
.whereIn(`${TableName.FolderCommitChanges}.folderCommitId`, commitIds)
|
||||
.leftJoin<TSecretVersionsV2>(
|
||||
TableName.SecretVersionV2,
|
||||
`${TableName.FolderCommitChanges}.secretVersionId`,
|
||||
`${TableName.SecretVersionV2}.id`
|
||||
)
|
||||
.leftJoin<TSecretFolderVersions>(
|
||||
TableName.SecretFolderVersion,
|
||||
`${TableName.FolderCommitChanges}.folderVersionId`,
|
||||
`${TableName.SecretFolderVersion}.id`
|
||||
)
|
||||
.select(selectAllTableCols(TableName.FolderCommitChanges))
|
||||
.select(
|
||||
db.ref("secretId").withSchema(TableName.SecretVersionV2).as("referencedSecretId"),
|
||||
db.ref("folderId").withSchema(TableName.SecretFolderVersion).as("referencedFolderId"),
|
||||
db.ref("name").withSchema(TableName.SecretFolderVersion).as("folderName"),
|
||||
db.ref("version").withSchema(TableName.SecretFolderVersion).as("folderVersion"),
|
||||
db.ref("key").withSchema(TableName.SecretVersionV2).as("secretKey"),
|
||||
db.ref("version").withSchema(TableName.SecretVersionV2).as("secretVersion")
|
||||
);
|
||||
|
||||
// Organize changes by commit ID
|
||||
const changesByCommitId = allChanges.reduce(
|
||||
(acc, change) => {
|
||||
if (!acc[change.folderCommitId]) {
|
||||
acc[change.folderCommitId] = [];
|
||||
}
|
||||
acc[change.folderCommitId].push(change);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, TFolderCommitChanges[]>
|
||||
);
|
||||
|
||||
// Attach changes to each commit
|
||||
return commits.map((commit) => ({
|
||||
...commit,
|
||||
changes: changesByCommitId[commit.id] || []
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindCommitsToRecreate" });
|
||||
}
|
||||
};
|
||||
|
||||
const findLatestCommitBetween = async ({
|
||||
folderId,
|
||||
startCommitId,
|
||||
endCommitId,
|
||||
tx
|
||||
}: {
|
||||
folderId: string;
|
||||
startCommitId?: string;
|
||||
endCommitId: string;
|
||||
tx?: Knex;
|
||||
}): Promise<TFolderCommits | undefined> => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.where("commitId", "<=", endCommitId)
|
||||
.where({ folderId })
|
||||
.where((qb) => {
|
||||
if (startCommitId) {
|
||||
void qb.where("commitId", ">=", startCommitId);
|
||||
}
|
||||
})
|
||||
.select(selectAllTableCols(TableName.FolderCommit))
|
||||
.orderBy("commitId", "desc")
|
||||
.first();
|
||||
return doc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindLatestCommitBetween" });
|
||||
}
|
||||
};
|
||||
|
||||
const findAllCommitsBetween = async ({
|
||||
envId,
|
||||
startCommitId,
|
||||
endCommitId,
|
||||
tx
|
||||
}: {
|
||||
envId?: string;
|
||||
startCommitId?: string;
|
||||
endCommitId?: string;
|
||||
tx?: Knex;
|
||||
}): Promise<TFolderCommits[]> => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.where((qb) => {
|
||||
if (envId) {
|
||||
void qb.where(`${TableName.FolderCommit}.envId`, "=", envId);
|
||||
}
|
||||
if (startCommitId) {
|
||||
void qb.where("commitId", ">=", startCommitId);
|
||||
}
|
||||
if (endCommitId) {
|
||||
void qb.where("commitId", "<=", endCommitId);
|
||||
}
|
||||
})
|
||||
.select(selectAllTableCols(TableName.FolderCommit))
|
||||
.orderBy("commitId", "desc");
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindLatestCommitBetween" });
|
||||
}
|
||||
};
|
||||
|
||||
const findAllFolderCommitsAfter = async ({
|
||||
envId,
|
||||
startCommitId,
|
||||
tx
|
||||
}: {
|
||||
envId?: string;
|
||||
startCommitId?: string;
|
||||
tx?: Knex;
|
||||
}): Promise<TFolderCommits[]> => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.where((qb) => {
|
||||
if (envId) {
|
||||
void qb.where(`${TableName.FolderCommit}.envId`, "=", envId);
|
||||
}
|
||||
if (startCommitId) {
|
||||
void qb.where("commitId", ">=", startCommitId);
|
||||
}
|
||||
})
|
||||
.select(selectAllTableCols(TableName.FolderCommit))
|
||||
.orderBy("commitId", "desc");
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindLatestCommitBetween" });
|
||||
}
|
||||
};
|
||||
|
||||
const findPreviousCommitTo = async (
|
||||
folderId: string,
|
||||
commitId: string,
|
||||
tx?: Knex
|
||||
): Promise<TFolderCommits | undefined> => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.where({ folderId })
|
||||
.where("commitId", "<=", commitId)
|
||||
.select(selectAllTableCols(TableName.FolderCommit))
|
||||
.orderBy("commitId", "desc")
|
||||
.first();
|
||||
return doc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindPreviousCommitTo" });
|
||||
}
|
||||
};
|
||||
|
||||
const findById = async (id: string, tx?: Knex, projectId?: string): Promise<TFolderCommits> => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter({ id }, TableName.FolderCommit))
|
||||
.leftJoin<TProjectEnvironments>(
|
||||
TableName.Environment,
|
||||
`${TableName.FolderCommit}.envId`,
|
||||
`${TableName.Environment}.id`
|
||||
)
|
||||
.where((qb) => {
|
||||
if (projectId) {
|
||||
void qb.where(`${TableName.Environment}.projectId`, "=", projectId);
|
||||
}
|
||||
})
|
||||
.select(selectAllTableCols(TableName.FolderCommit))
|
||||
.orderBy("commitId", "desc")
|
||||
.first();
|
||||
if (!doc) {
|
||||
throw new NotFoundError({
|
||||
message: `Folder commit not found for ID ${id}`
|
||||
});
|
||||
}
|
||||
return doc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindById" });
|
||||
}
|
||||
};
|
||||
|
||||
const findByFolderIdPaginated = async (
|
||||
folderId: string,
|
||||
options: {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
sort?: "asc" | "desc";
|
||||
} = {},
|
||||
tx?: Knex
|
||||
): Promise<{
|
||||
commits: TFolderCommits[];
|
||||
total: number;
|
||||
hasMore: boolean;
|
||||
}> => {
|
||||
try {
|
||||
const { offset = 0, limit = 20, search, sort = "desc" } = options;
|
||||
const trx = tx || db.replicaNode();
|
||||
|
||||
// Build base query
|
||||
let baseQuery = trx(TableName.FolderCommit).where({ folderId });
|
||||
|
||||
// Add search functionality
|
||||
if (search) {
|
||||
baseQuery = baseQuery.where((qb) => {
|
||||
void qb.whereILike("message", `%${search}%`);
|
||||
});
|
||||
}
|
||||
|
||||
// Get total count
|
||||
const totalResult = await baseQuery.clone().count("*", { as: "count" }).first();
|
||||
const total = Number(totalResult?.count || 0);
|
||||
|
||||
// Get paginated commits
|
||||
const folderCommits = await baseQuery.select("*").orderBy("createdAt", sort).limit(limit).offset(offset);
|
||||
|
||||
if (folderCommits.length === 0) {
|
||||
return { commits: [], total, hasMore: false };
|
||||
}
|
||||
|
||||
// Get all commit IDs for changes
|
||||
const commitIds = folderCommits.map((commit) => commit.id);
|
||||
|
||||
// Get all related changes
|
||||
const changes = await trx(TableName.FolderCommitChanges).whereIn("folderCommitId", commitIds).select("*");
|
||||
|
||||
const changesMap = changes.reduce(
|
||||
(acc, change) => {
|
||||
const { folderCommitId } = change;
|
||||
if (!acc[folderCommitId]) acc[folderCommitId] = [];
|
||||
acc[folderCommitId].push(change);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, TFolderCommitChanges[]>
|
||||
);
|
||||
|
||||
const commitsWithChanges = folderCommits.map((commit) => ({
|
||||
...commit,
|
||||
changes: changesMap[commit.id] || []
|
||||
}));
|
||||
|
||||
const hasMore = offset + limit < total;
|
||||
|
||||
return {
|
||||
commits: commitsWithChanges,
|
||||
total,
|
||||
hasMore
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByFolderIdPaginated" });
|
||||
}
|
||||
};
|
||||
|
||||
const findCommitBefore = async (
|
||||
folderId: string,
|
||||
commitId: bigint,
|
||||
tx?: Knex
|
||||
): Promise<TFolderCommits | undefined> => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
|
||||
.where({ folderId })
|
||||
.where("commitId", "<", commitId.toString())
|
||||
.select(selectAllTableCols(TableName.FolderCommit))
|
||||
.orderBy("commitId", "desc")
|
||||
.first();
|
||||
return doc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindCommitBefore" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...restOfOrm,
|
||||
findByFolderId,
|
||||
findLatestCommit,
|
||||
getNumberOfCommitsSince,
|
||||
findCommitsToRecreate,
|
||||
findMultipleLatestCommits,
|
||||
findAllCommitsBetween,
|
||||
findLatestCommitBetween,
|
||||
findLatestEnvCommit,
|
||||
getEnvNumberOfCommitsSince,
|
||||
findLatestCommitByFolderIds,
|
||||
findAllFolderCommitsAfter,
|
||||
findPreviousCommitTo,
|
||||
findById,
|
||||
findByFolderIdPaginated,
|
||||
findCommitBefore
|
||||
};
|
||||
};
|
282
backend/src/services/folder-commit/folder-commit-queue.ts
Normal file
282
backend/src/services/folder-commit/folder-commit-queue.ts
Normal file
@@ -0,0 +1,282 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TSecretFolders } from "@app/db/schemas";
|
||||
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
|
||||
import { TFolderTreeCheckpointDALFactory } from "../folder-tree-checkpoint/folder-tree-checkpoint-dal";
|
||||
import { TFolderTreeCheckpointResourcesDALFactory } from "../folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
|
||||
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
|
||||
import { TFolderCommitDALFactory } from "./folder-commit-dal";
|
||||
|
||||
// Define types for job data
|
||||
type TCreateFolderTreeCheckpointDTO = {
|
||||
envId: string;
|
||||
failedToAcquireLockCount?: number;
|
||||
folderCommitId?: string;
|
||||
};
|
||||
|
||||
type TFolderCommitQueueServiceFactoryDep = {
|
||||
queueService: TQueueServiceFactory;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem" | "deleteItem">;
|
||||
folderTreeCheckpointDAL: Pick<
|
||||
TFolderTreeCheckpointDALFactory,
|
||||
"create" | "findLatestByEnvId" | "findNearestCheckpoint"
|
||||
>;
|
||||
folderTreeCheckpointResourcesDAL: Pick<
|
||||
TFolderTreeCheckpointResourcesDALFactory,
|
||||
"insertMany" | "findByTreeCheckpointId"
|
||||
>;
|
||||
folderCommitDAL: Pick<
|
||||
TFolderCommitDALFactory,
|
||||
"findLatestEnvCommit" | "getEnvNumberOfCommitsSince" | "findMultipleLatestCommits" | "findById"
|
||||
>;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findByEnvId">;
|
||||
};
|
||||
|
||||
export type TFolderCommitQueueServiceFactory = ReturnType<typeof folderCommitQueueServiceFactory>;
|
||||
|
||||
export const folderCommitQueueServiceFactory = ({
|
||||
queueService,
|
||||
keyStore,
|
||||
folderTreeCheckpointDAL,
|
||||
folderTreeCheckpointResourcesDAL,
|
||||
folderCommitDAL,
|
||||
folderDAL
|
||||
}: TFolderCommitQueueServiceFactoryDep) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
// Helper function to calculate delay for requeuing
|
||||
const getRequeueDelay = (failureCount?: number) => {
|
||||
if (!failureCount) return 0;
|
||||
|
||||
const baseDelay = 5000;
|
||||
const maxDelay = 30000;
|
||||
|
||||
const delay = Math.min(baseDelay * 2 ** failureCount, maxDelay);
|
||||
const jitter = delay * (0.5 + Math.random() * 0.5);
|
||||
|
||||
return jitter;
|
||||
};
|
||||
|
||||
const scheduleTreeCheckpoint = async (payload: TCreateFolderTreeCheckpointDTO) => {
|
||||
const { envId, failedToAcquireLockCount = 0 } = payload;
|
||||
|
||||
// Create a unique jobId for each retry to prevent conflicts
|
||||
const jobId =
|
||||
failedToAcquireLockCount > 0 ? `${envId}-retry-${failedToAcquireLockCount}-${Date.now()}` : `${envId}`;
|
||||
|
||||
await queueService.queue(QueueName.FolderTreeCheckpoint, QueueJobs.CreateFolderTreeCheckpoint, payload, {
|
||||
jobId,
|
||||
delay: getRequeueDelay(failedToAcquireLockCount),
|
||||
backoff: {
|
||||
type: "exponential",
|
||||
delay: 3000
|
||||
},
|
||||
removeOnFail: {
|
||||
count: 3
|
||||
},
|
||||
removeOnComplete: true
|
||||
});
|
||||
};
|
||||
|
||||
// Sort folders by hierarchy (copied from the source code)
|
||||
const sortFoldersByHierarchy = (folders: TSecretFolders[]) => {
|
||||
const childrenMap = new Map<string, TSecretFolders[]>();
|
||||
const allFolderIds = new Set<string>();
|
||||
|
||||
folders.forEach((folder) => {
|
||||
if (folder.id) allFolderIds.add(folder.id);
|
||||
});
|
||||
|
||||
folders.forEach((folder) => {
|
||||
if (folder.parentId) {
|
||||
const children = childrenMap.get(folder.parentId) || [];
|
||||
children.push(folder);
|
||||
childrenMap.set(folder.parentId, children);
|
||||
}
|
||||
});
|
||||
|
||||
const rootFolders = folders.filter((folder) => !folder.parentId || !allFolderIds.has(folder.parentId));
|
||||
|
||||
const result = [];
|
||||
let currentLevel = rootFolders;
|
||||
|
||||
while (currentLevel.length > 0) {
|
||||
result.push(...currentLevel);
|
||||
|
||||
const nextLevel = [];
|
||||
for (const folder of currentLevel) {
|
||||
if (folder.id) {
|
||||
const children = childrenMap.get(folder.id) || [];
|
||||
nextLevel.push(...children);
|
||||
}
|
||||
}
|
||||
|
||||
currentLevel = nextLevel;
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const createFolderTreeCheckpoint = async (jobData: TCreateFolderTreeCheckpointDTO, tx?: Knex) => {
|
||||
const { envId, folderCommitId, failedToAcquireLockCount = 0 } = jobData;
|
||||
|
||||
logger.info(`Folder tree checkpoint creation started [envId=${envId}] [attempt=${failedToAcquireLockCount + 1}]`);
|
||||
|
||||
// First, try to clear any stale locks before attempting to acquire
|
||||
if (failedToAcquireLockCount > 1) {
|
||||
try {
|
||||
await keyStore.deleteItem(KeyStorePrefixes.FolderTreeCheckpoint(envId));
|
||||
logger.info(`Cleared potential stale lock for envId ${envId} before attempt ${failedToAcquireLockCount + 1}`);
|
||||
} catch (error) {
|
||||
// This is fine if it fails, we'll still try to acquire the lock
|
||||
logger.info(`No stale lock found for envId ${envId}`);
|
||||
}
|
||||
}
|
||||
|
||||
let lock: Awaited<ReturnType<typeof keyStore.acquireLock>> | undefined;
|
||||
|
||||
try {
|
||||
// Attempt to acquire the lock with a shorter timeout for first attempts
|
||||
const timeout = failedToAcquireLockCount > 3 ? 60 * 1000 : 15 * 1000;
|
||||
|
||||
logger.info(`Attempting to acquire lock for envId=${envId} with timeout ${timeout}ms`);
|
||||
|
||||
lock = await keyStore.acquireLock([KeyStorePrefixes.FolderTreeCheckpoint(envId)], timeout);
|
||||
|
||||
logger.info(`Successfully acquired lock for envId=${envId}`);
|
||||
} catch (e) {
|
||||
logger.info(
|
||||
`Failed to acquire lock for folder tree checkpoint [envId=${envId}] [attempt=${failedToAcquireLockCount + 1}]`
|
||||
);
|
||||
|
||||
// Requeue with incremented failure count if under max attempts
|
||||
if (failedToAcquireLockCount < 10) {
|
||||
// Force a delay between retries
|
||||
const nextRetryCount = failedToAcquireLockCount + 1;
|
||||
|
||||
logger.info(`Scheduling retry #${nextRetryCount} for folder tree checkpoint [envId=${envId}]`);
|
||||
|
||||
// Create a new job with incremented counter
|
||||
await scheduleTreeCheckpoint({
|
||||
envId,
|
||||
folderCommitId,
|
||||
failedToAcquireLockCount: nextRetryCount
|
||||
});
|
||||
} else {
|
||||
// Max retries reached
|
||||
logger.error(`Maximum lock acquisition attempts (10) reached for envId ${envId}. Giving up.`);
|
||||
// Try to force-clear the lock for next time
|
||||
try {
|
||||
await keyStore.deleteItem(KeyStorePrefixes.FolderTreeCheckpoint(envId));
|
||||
} catch (clearError) {
|
||||
logger.error(clearError, `Failed to clear lock after maximum retries for envId=${envId}`);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (!lock) {
|
||||
logger.error(`Lock is undefined after acquisition for envId=${envId}. This should never happen.`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info(`Processing tree checkpoint data for envId=${envId}`);
|
||||
|
||||
const latestTreeCheckpoint = await folderTreeCheckpointDAL.findLatestByEnvId(envId, tx);
|
||||
|
||||
let latestCommit;
|
||||
if (folderCommitId) {
|
||||
latestCommit = await folderCommitDAL.findById(folderCommitId, tx);
|
||||
} else {
|
||||
latestCommit = await folderCommitDAL.findLatestEnvCommit(envId, tx);
|
||||
}
|
||||
if (!latestCommit) {
|
||||
logger.info(`Latest commit ID not found for envId ${envId}`);
|
||||
return;
|
||||
}
|
||||
const latestCommitId = latestCommit.id;
|
||||
|
||||
if (latestTreeCheckpoint) {
|
||||
const commitsSinceLastCheckpoint = await folderCommitDAL.getEnvNumberOfCommitsSince(
|
||||
envId,
|
||||
latestTreeCheckpoint.folderCommitId,
|
||||
tx
|
||||
);
|
||||
if (commitsSinceLastCheckpoint < Number(appCfg.PIT_TREE_CHECKPOINT_WINDOW)) {
|
||||
logger.info(
|
||||
`Commits since last checkpoint ${commitsSinceLastCheckpoint} is less than ${appCfg.PIT_TREE_CHECKPOINT_WINDOW}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const folders = await folderDAL.findByEnvId(envId, tx);
|
||||
const sortedFolders = sortFoldersByHierarchy(folders);
|
||||
const filteredFoldersIds = sortedFolders.filter((folder) => !folder.isReserved).map((folder) => folder.id);
|
||||
|
||||
const folderCommits = await folderCommitDAL.findMultipleLatestCommits(filteredFoldersIds, tx);
|
||||
const folderTreeCheckpoint = await folderTreeCheckpointDAL.create(
|
||||
{
|
||||
folderCommitId: latestCommitId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await folderTreeCheckpointResourcesDAL.insertMany(
|
||||
folderCommits.map((folderCommit) => ({
|
||||
folderTreeCheckpointId: folderTreeCheckpoint.id,
|
||||
folderId: folderCommit.folderId,
|
||||
folderCommitId: folderCommit.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
||||
logger.info(`Folder tree checkpoint created successfully: ${folderTreeCheckpoint.id}`);
|
||||
} catch (error) {
|
||||
logger.error(error, `Error processing folder tree checkpoint [envId=${envId}]`);
|
||||
throw error;
|
||||
} finally {
|
||||
// Always release the lock
|
||||
try {
|
||||
if (lock) {
|
||||
await lock.release();
|
||||
logger.info(`Released lock for folder tree checkpoint [envId=${envId}]`);
|
||||
} else {
|
||||
logger.error(`No lock to release for envId=${envId}. This should never happen.`);
|
||||
}
|
||||
} catch (releaseError) {
|
||||
logger.error(releaseError, `Error releasing lock for folder tree checkpoint [envId=${envId}]`);
|
||||
// Try to force delete the lock if release fails
|
||||
try {
|
||||
await keyStore.deleteItem(KeyStorePrefixes.FolderTreeCheckpoint(envId));
|
||||
logger.info(`Force deleted lock after release failure for envId=${envId}`);
|
||||
} catch (deleteError) {
|
||||
logger.error(deleteError, `Failed to force delete lock after release failure for envId=${envId}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
queueService.start(QueueName.FolderTreeCheckpoint, async (job) => {
|
||||
try {
|
||||
if (job.name === QueueJobs.CreateFolderTreeCheckpoint) {
|
||||
const jobData = job.data as TCreateFolderTreeCheckpointDTO;
|
||||
await createFolderTreeCheckpoint(jobData);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(error, "Error creating folder tree checkpoint:");
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
scheduleTreeCheckpoint: (envId: string) => scheduleTreeCheckpoint({ envId }),
|
||||
createFolderTreeCheckpoint: (envId: string, folderCommitId?: string, tx?: Knex) =>
|
||||
createFolderTreeCheckpoint({ envId, folderCommitId }, tx)
|
||||
};
|
||||
};
|
143
backend/src/services/folder-commit/folder-commit-schemas.ts
Normal file
143
backend/src/services/folder-commit/folder-commit-schemas.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// Base schema shared by both secret and folder changes
|
||||
const baseChangeSchema = z.object({
|
||||
id: z.string(),
|
||||
folderCommitId: z.string(),
|
||||
changeType: z.string(),
|
||||
isUpdate: z.boolean().optional(),
|
||||
createdAt: z.union([z.string(), z.date()]),
|
||||
updatedAt: z.union([z.string(), z.date()]),
|
||||
actorMetadata: z
|
||||
.union([
|
||||
z.object({
|
||||
id: z.string().optional(),
|
||||
name: z.string().optional()
|
||||
}),
|
||||
z.unknown()
|
||||
])
|
||||
.optional(),
|
||||
actorType: z.string(),
|
||||
message: z.string().nullable().optional(),
|
||||
folderId: z.string()
|
||||
});
|
||||
|
||||
// Secret-specific versions schema
|
||||
const secretVersionSchema = z.object({
|
||||
secretKey: z.string(),
|
||||
secretComment: z.string(),
|
||||
skipMultilineEncoding: z.boolean().nullable().optional(),
|
||||
tags: z.array(z.string()).nullable().optional(),
|
||||
metadata: z.unknown().nullable().optional(),
|
||||
secretValue: z.string()
|
||||
});
|
||||
|
||||
// Folder-specific versions schema
|
||||
const folderVersionSchema = z.object({
|
||||
version: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
description: z.string().optional().nullable()
|
||||
});
|
||||
|
||||
// Secret commit change schema
|
||||
const secretCommitChangeSchema = baseChangeSchema.extend({
|
||||
resourceType: z.literal("secret"),
|
||||
secretVersionId: z.string().optional().nullable(),
|
||||
secretKey: z.string(),
|
||||
secretVersion: z.union([z.string(), z.number()]),
|
||||
secretId: z.string(),
|
||||
versions: z.array(secretVersionSchema).optional()
|
||||
});
|
||||
|
||||
// Folder commit change schema
|
||||
const folderCommitChangeSchema = baseChangeSchema.extend({
|
||||
resourceType: z.literal("folder"),
|
||||
folderVersionId: z.string().optional().nullable(),
|
||||
folderName: z.string(),
|
||||
folderChangeId: z.string(),
|
||||
folderVersion: z.union([z.string(), z.number()]),
|
||||
versions: z.array(folderVersionSchema).optional()
|
||||
});
|
||||
|
||||
// Discriminated union for commit changes
|
||||
export const commitChangeSchema = z.discriminatedUnion("resourceType", [
|
||||
secretCommitChangeSchema,
|
||||
folderCommitChangeSchema
|
||||
]);
|
||||
|
||||
// Commit schema
|
||||
const commitSchema = z.object({
|
||||
id: z.string(),
|
||||
commitId: z.string(),
|
||||
actorMetadata: z
|
||||
.union([
|
||||
z.object({
|
||||
id: z.string().optional(),
|
||||
name: z.string().optional()
|
||||
}),
|
||||
z.unknown()
|
||||
])
|
||||
.optional(),
|
||||
actorType: z.string(),
|
||||
message: z.string().nullable().optional(),
|
||||
folderId: z.string(),
|
||||
envId: z.string(),
|
||||
createdAt: z.union([z.string(), z.date()]),
|
||||
updatedAt: z.union([z.string(), z.date()]),
|
||||
isLatest: z.boolean().default(false),
|
||||
changes: z.array(commitChangeSchema).optional()
|
||||
});
|
||||
|
||||
// Response schema
|
||||
export const commitChangesResponseSchema = z.object({
|
||||
changes: commitSchema
|
||||
});
|
||||
|
||||
// Base resource change schema for comparison results
|
||||
const baseResourceChangeSchema = z.object({
|
||||
id: z.string(),
|
||||
versionId: z.string(),
|
||||
oldVersionId: z.string().optional(),
|
||||
changeType: z.enum(["add", "delete", "update", "create"]),
|
||||
commitId: z.union([z.string(), z.bigint()]),
|
||||
createdAt: z.union([z.string(), z.date()]).optional(),
|
||||
parentId: z.string().optional(),
|
||||
isUpdate: z.boolean().optional(),
|
||||
fromVersion: z.union([z.string(), z.number()]).optional()
|
||||
});
|
||||
|
||||
// Secret resource change schema
|
||||
const secretResourceChangeSchema = baseResourceChangeSchema.extend({
|
||||
type: z.literal("secret"),
|
||||
secretKey: z.string(),
|
||||
secretVersion: z.union([z.string(), z.number()]),
|
||||
secretId: z.string(),
|
||||
versions: z
|
||||
.array(
|
||||
z.object({
|
||||
secretKey: z.string().optional(),
|
||||
secretComment: z.string().optional(),
|
||||
skipMultilineEncoding: z.boolean().nullable().optional(),
|
||||
secretReminderRepeatDays: z.number().nullable().optional(),
|
||||
tags: z.array(z.string()).nullable().optional(),
|
||||
metadata: z.unknown().nullable().optional(),
|
||||
secretReminderNote: z.string().nullable().optional(),
|
||||
secretValue: z.string().optional()
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
});
|
||||
|
||||
// Folder resource change schema
|
||||
const folderResourceChangeSchema = baseResourceChangeSchema.extend({
|
||||
type: z.literal("folder"),
|
||||
folderName: z.string(),
|
||||
folderVersion: z.union([z.string(), z.number()]),
|
||||
versions: z.array(folderVersionSchema).optional()
|
||||
});
|
||||
|
||||
// Discriminated union for resource changes
|
||||
export const resourceChangeSchema = z.discriminatedUnion("type", [
|
||||
secretResourceChangeSchema,
|
||||
folderResourceChangeSchema
|
||||
]);
|
671
backend/src/services/folder-commit/folder-commit-service.test.ts
Normal file
671
backend/src/services/folder-commit/folder-commit-service.test.ts
Normal file
@@ -0,0 +1,671 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/return-await */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
import { Knex } from "knex";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { ProjectType, TSecretFolderVersions, TSecretVersionsV2 } from "@app/db/schemas";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
|
||||
import { ActorType } from "../auth/auth-type";
|
||||
import {
|
||||
ChangeType,
|
||||
CommitType,
|
||||
folderCommitServiceFactory,
|
||||
ResourceChange,
|
||||
TFolderCommitServiceFactory
|
||||
} from "./folder-commit-service";
|
||||
|
||||
// Mock config
|
||||
vi.mock("@app/lib/config/env", () => ({
|
||||
getConfig: () => ({
|
||||
PIT_CHECKPOINT_WINDOW: 5,
|
||||
PIT_TREE_CHECKPOINT_WINDOW: 10
|
||||
})
|
||||
}));
|
||||
|
||||
// Mock logger
|
||||
vi.mock("@app/lib/logger", () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn()
|
||||
}
|
||||
}));
|
||||
|
||||
describe("folderCommitServiceFactory", () => {
|
||||
// Properly type the mock functions
|
||||
type TransactionCallback<T> = (trx: Knex) => Promise<T>;
|
||||
|
||||
// Mock dependencies
|
||||
const mockFolderCommitDAL = {
|
||||
create: vi.fn().mockResolvedValue({}),
|
||||
findById: vi.fn().mockResolvedValue({}),
|
||||
findByFolderId: vi.fn().mockResolvedValue([]),
|
||||
findLatestCommit: vi.fn().mockResolvedValue({}),
|
||||
transaction: vi.fn().mockImplementation(<T>(callback: TransactionCallback<T>) => callback({} as Knex)),
|
||||
getNumberOfCommitsSince: vi.fn().mockResolvedValue(0),
|
||||
getEnvNumberOfCommitsSince: vi.fn().mockResolvedValue(0),
|
||||
findCommitsToRecreate: vi.fn().mockResolvedValue([]),
|
||||
findMultipleLatestCommits: vi.fn().mockResolvedValue([]),
|
||||
findLatestCommitBetween: vi.fn().mockResolvedValue({}),
|
||||
findAllCommitsBetween: vi.fn().mockResolvedValue([]),
|
||||
findLatestEnvCommit: vi.fn().mockResolvedValue({}),
|
||||
findLatestCommitByFolderIds: vi.fn().mockResolvedValue({})
|
||||
};
|
||||
|
||||
const mockKmsService = {
|
||||
createCipherPairWithDataKey: vi.fn().mockResolvedValue({})
|
||||
};
|
||||
|
||||
const mockFolderCommitChangesDAL = {
|
||||
create: vi.fn().mockResolvedValue({}),
|
||||
findByCommitId: vi.fn().mockResolvedValue([]),
|
||||
insertMany: vi.fn().mockResolvedValue([])
|
||||
};
|
||||
|
||||
const mockFolderCheckpointDAL = {
|
||||
create: vi.fn().mockResolvedValue({}),
|
||||
findByFolderId: vi.fn().mockResolvedValue([]),
|
||||
findLatestByFolderId: vi.fn().mockResolvedValue(null),
|
||||
findNearestCheckpoint: vi.fn().mockResolvedValue({})
|
||||
};
|
||||
|
||||
const mockFolderCheckpointResourcesDAL = {
|
||||
insertMany: vi.fn().mockResolvedValue([]),
|
||||
findByCheckpointId: vi.fn().mockResolvedValue([])
|
||||
};
|
||||
|
||||
const mockFolderTreeCheckpointDAL = {
|
||||
create: vi.fn().mockResolvedValue({}),
|
||||
findByProjectId: vi.fn().mockResolvedValue([]),
|
||||
findLatestByProjectId: vi.fn().mockResolvedValue({}),
|
||||
findNearestCheckpoint: vi.fn().mockResolvedValue({}),
|
||||
findLatestByEnvId: vi.fn().mockResolvedValue({})
|
||||
};
|
||||
|
||||
const mockFolderTreeCheckpointResourcesDAL = {
|
||||
insertMany: vi.fn().mockResolvedValue([]),
|
||||
findByTreeCheckpointId: vi.fn().mockResolvedValue([])
|
||||
};
|
||||
|
||||
const mockUserDAL = {
|
||||
findById: vi.fn().mockResolvedValue({})
|
||||
};
|
||||
|
||||
const mockIdentityDAL = {
|
||||
findById: vi.fn().mockResolvedValue({})
|
||||
};
|
||||
|
||||
const mockFolderDAL = {
|
||||
findByParentId: vi.fn().mockResolvedValue([]),
|
||||
findByProjectId: vi.fn().mockResolvedValue([]),
|
||||
deleteById: vi.fn().mockResolvedValue({}),
|
||||
create: vi.fn().mockResolvedValue({}),
|
||||
updateById: vi.fn().mockResolvedValue({}),
|
||||
update: vi.fn().mockResolvedValue({}),
|
||||
find: vi.fn().mockResolvedValue([]),
|
||||
findById: vi.fn().mockResolvedValue({}),
|
||||
findByEnvId: vi.fn().mockResolvedValue([]),
|
||||
findFoldersByRootAndIds: vi.fn().mockResolvedValue([])
|
||||
};
|
||||
|
||||
const mockFolderVersionDAL = {
|
||||
findLatestFolderVersions: vi.fn().mockResolvedValue({}),
|
||||
findById: vi.fn().mockResolvedValue({}),
|
||||
deleteById: vi.fn().mockResolvedValue({}),
|
||||
create: vi.fn().mockResolvedValue({}),
|
||||
updateById: vi.fn().mockResolvedValue({}),
|
||||
find: vi.fn().mockResolvedValue({}), // Changed from [] to {} to match Object.values() expectation
|
||||
findByIdsWithLatestVersion: vi.fn().mockResolvedValue({})
|
||||
};
|
||||
|
||||
const mockSecretVersionV2BridgeDAL = {
|
||||
findLatestVersionByFolderId: vi.fn().mockResolvedValue([]),
|
||||
findById: vi.fn().mockResolvedValue({}),
|
||||
deleteById: vi.fn().mockResolvedValue({}),
|
||||
create: vi.fn().mockResolvedValue({}),
|
||||
updateById: vi.fn().mockResolvedValue({}),
|
||||
find: vi.fn().mockResolvedValue([]),
|
||||
findByIdsWithLatestVersion: vi.fn().mockResolvedValue({}),
|
||||
findLatestVersionMany: vi.fn().mockResolvedValue({})
|
||||
};
|
||||
|
||||
const mockSecretV2BridgeDAL = {
|
||||
deleteById: vi.fn().mockResolvedValue({}),
|
||||
create: vi.fn().mockResolvedValue({}),
|
||||
updateById: vi.fn().mockResolvedValue({}),
|
||||
update: vi.fn().mockResolvedValue({}),
|
||||
insertMany: vi.fn().mockResolvedValue([]),
|
||||
invalidateSecretCacheByProjectId: vi.fn().mockResolvedValue({})
|
||||
};
|
||||
|
||||
const mockProjectDAL = {
|
||||
findById: vi.fn().mockResolvedValue({}),
|
||||
findProjectByEnvId: vi.fn().mockResolvedValue({})
|
||||
};
|
||||
|
||||
const mockFolderCommitQueueService = {
|
||||
scheduleTreeCheckpoint: vi.fn().mockResolvedValue({}),
|
||||
createFolderTreeCheckpoint: vi.fn().mockResolvedValue({})
|
||||
};
|
||||
|
||||
const mockPermissionService = {
|
||||
getProjectPermission: vi.fn().mockResolvedValue({})
|
||||
};
|
||||
|
||||
const mockSecretTagDAL = {
|
||||
findSecretTagsByVersionId: vi.fn().mockResolvedValue([]),
|
||||
saveTagsToSecretV2: vi.fn().mockResolvedValue([]),
|
||||
findSecretTagsBySecretId: vi.fn().mockResolvedValue([]),
|
||||
deleteTagsToSecretV2: vi.fn().mockResolvedValue([]),
|
||||
saveTagsToSecretVersionV2: vi.fn().mockResolvedValue([])
|
||||
};
|
||||
|
||||
const mockResourceMetadataDAL = {
|
||||
find: vi.fn().mockResolvedValue([]),
|
||||
insertMany: vi.fn().mockResolvedValue([]),
|
||||
delete: vi.fn().mockResolvedValue([])
|
||||
};
|
||||
|
||||
let folderCommitService: TFolderCommitServiceFactory;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
folderCommitService = folderCommitServiceFactory({
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
folderCommitDAL: mockFolderCommitDAL,
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
folderCommitChangesDAL: mockFolderCommitChangesDAL,
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
folderCheckpointDAL: mockFolderCheckpointDAL,
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
folderCheckpointResourcesDAL: mockFolderCheckpointResourcesDAL,
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
folderTreeCheckpointDAL: mockFolderTreeCheckpointDAL,
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
folderTreeCheckpointResourcesDAL: mockFolderTreeCheckpointResourcesDAL,
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
userDAL: mockUserDAL,
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
identityDAL: mockIdentityDAL,
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
folderDAL: mockFolderDAL,
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
folderVersionDAL: mockFolderVersionDAL,
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
secretVersionV2BridgeDAL: mockSecretVersionV2BridgeDAL,
|
||||
projectDAL: mockProjectDAL,
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
secretV2BridgeDAL: mockSecretV2BridgeDAL,
|
||||
folderCommitQueueService: mockFolderCommitQueueService,
|
||||
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
|
||||
permissionService: mockPermissionService,
|
||||
kmsService: mockKmsService,
|
||||
secretTagDAL: mockSecretTagDAL,
|
||||
resourceMetadataDAL: mockResourceMetadataDAL
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
describe("createCommit", () => {
|
||||
it("should successfully create a commit with user actor", async () => {
|
||||
// Arrange
|
||||
const userData = { id: "user-id", username: "testuser" };
|
||||
const folderData = { id: "folder-id", envId: "env-id" };
|
||||
const commitData = { id: "commit-id", folderId: "folder-id" };
|
||||
|
||||
mockUserDAL.findById.mockResolvedValue(userData);
|
||||
mockFolderDAL.findById.mockResolvedValue(folderData);
|
||||
mockFolderCommitDAL.create.mockResolvedValue(commitData);
|
||||
mockFolderCheckpointDAL.findLatestByFolderId.mockResolvedValue(null);
|
||||
mockFolderCommitDAL.findLatestCommit.mockResolvedValue({ id: "latest-commit-id" });
|
||||
mockFolderDAL.findByParentId.mockResolvedValue([]);
|
||||
mockSecretVersionV2BridgeDAL.findLatestVersionByFolderId.mockResolvedValue([]);
|
||||
|
||||
const data = {
|
||||
actor: {
|
||||
type: ActorType.USER,
|
||||
metadata: { id: userData.id }
|
||||
},
|
||||
message: "Test commit",
|
||||
folderId: folderData.id,
|
||||
changes: [
|
||||
{
|
||||
type: CommitType.ADD,
|
||||
secretVersionId: "secret-version-1"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = await folderCommitService.createCommit(data);
|
||||
|
||||
// Assert
|
||||
expect(mockUserDAL.findById).toHaveBeenCalledWith(userData.id, undefined);
|
||||
expect(mockFolderDAL.findById).toHaveBeenCalledWith(folderData.id, undefined);
|
||||
expect(mockFolderCommitDAL.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
actorType: ActorType.USER,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
actorMetadata: expect.objectContaining({ name: userData.username }),
|
||||
message: data.message,
|
||||
folderId: data.folderId,
|
||||
envId: folderData.envId
|
||||
}),
|
||||
undefined
|
||||
);
|
||||
expect(mockFolderCommitChangesDAL.insertMany).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
folderCommitId: commitData.id,
|
||||
changeType: data.changes[0].type,
|
||||
secretVersionId: data.changes[0].secretVersionId
|
||||
})
|
||||
]),
|
||||
undefined
|
||||
);
|
||||
expect(mockFolderCommitQueueService.scheduleTreeCheckpoint).toHaveBeenCalledWith(folderData.envId);
|
||||
expect(result).toEqual(commitData);
|
||||
});
|
||||
|
||||
it("should successfully create a commit with identity actor", async () => {
|
||||
// Arrange
|
||||
const identityData = { id: "identity-id", name: "testidentity" };
|
||||
const folderData = { id: "folder-id", envId: "env-id" };
|
||||
const commitData = { id: "commit-id", folderId: "folder-id" };
|
||||
|
||||
mockIdentityDAL.findById.mockResolvedValue(identityData);
|
||||
mockFolderDAL.findById.mockResolvedValue(folderData);
|
||||
mockFolderCommitDAL.create.mockResolvedValue(commitData);
|
||||
mockFolderCheckpointDAL.findLatestByFolderId.mockResolvedValue(null);
|
||||
mockFolderCommitDAL.findLatestCommit.mockResolvedValue({ id: "latest-commit-id" });
|
||||
mockFolderDAL.findByParentId.mockResolvedValue([]);
|
||||
mockSecretVersionV2BridgeDAL.findLatestVersionByFolderId.mockResolvedValue([]);
|
||||
|
||||
// Mock folderVersionDAL.find to return an object with folder version data
|
||||
mockFolderVersionDAL.find.mockResolvedValue({
|
||||
"folder-version-1": {
|
||||
id: "folder-version-1",
|
||||
folderId: "sub-folder-id",
|
||||
envId: "env-id",
|
||||
name: "Test Folder",
|
||||
version: 1
|
||||
}
|
||||
});
|
||||
|
||||
const data = {
|
||||
actor: {
|
||||
type: ActorType.IDENTITY,
|
||||
metadata: { id: identityData.id }
|
||||
},
|
||||
message: "Test commit",
|
||||
folderId: folderData.id,
|
||||
changes: [
|
||||
{
|
||||
type: CommitType.ADD,
|
||||
folderVersionId: "folder-version-1"
|
||||
}
|
||||
],
|
||||
omitIgnoreFilter: true
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = await folderCommitService.createCommit(data);
|
||||
|
||||
// Assert
|
||||
expect(mockIdentityDAL.findById).toHaveBeenCalledWith(identityData.id, undefined);
|
||||
expect(mockFolderDAL.findById).toHaveBeenCalledWith(folderData.id, undefined);
|
||||
expect(mockFolderCommitDAL.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
actorType: ActorType.IDENTITY,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
actorMetadata: expect.objectContaining({ name: identityData.name }),
|
||||
message: data.message,
|
||||
folderId: data.folderId,
|
||||
envId: folderData.envId
|
||||
}),
|
||||
undefined
|
||||
);
|
||||
expect(mockFolderCommitChangesDAL.insertMany).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
folderCommitId: commitData.id,
|
||||
changeType: data.changes[0].type,
|
||||
folderVersionId: data.changes[0].folderVersionId
|
||||
})
|
||||
]),
|
||||
undefined
|
||||
);
|
||||
expect(mockFolderCommitQueueService.scheduleTreeCheckpoint).toHaveBeenCalledWith(folderData.envId);
|
||||
expect(result).toEqual(commitData);
|
||||
});
|
||||
|
||||
it("should throw NotFoundError when folder does not exist", async () => {
|
||||
// Arrange
|
||||
mockFolderDAL.findById.mockResolvedValue(null);
|
||||
|
||||
const data = {
|
||||
actor: {
|
||||
type: ActorType.PLATFORM
|
||||
},
|
||||
message: "Test commit",
|
||||
folderId: "non-existent-folder",
|
||||
changes: []
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
await expect(folderCommitService.createCommit(data)).rejects.toThrow(NotFoundError);
|
||||
expect(mockFolderDAL.findById).toHaveBeenCalledWith("non-existent-folder", undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe("addCommitChange", () => {
|
||||
it("should successfully add a change to an existing commit", async () => {
|
||||
// Arrange
|
||||
const commitData = { id: "commit-id", folderId: "folder-id" };
|
||||
const changeData = { id: "change-id", folderCommitId: "commit-id" };
|
||||
|
||||
mockFolderCommitDAL.findById.mockResolvedValue(commitData);
|
||||
mockFolderCommitChangesDAL.create.mockResolvedValue(changeData);
|
||||
|
||||
const data = {
|
||||
folderCommitId: commitData.id,
|
||||
changeType: CommitType.ADD,
|
||||
secretVersionId: "secret-version-1"
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = await folderCommitService.addCommitChange(data);
|
||||
|
||||
// Assert
|
||||
expect(mockFolderCommitDAL.findById).toHaveBeenCalledWith(commitData.id, undefined);
|
||||
expect(mockFolderCommitChangesDAL.create).toHaveBeenCalledWith(data, undefined);
|
||||
expect(result).toEqual(changeData);
|
||||
});
|
||||
|
||||
it("should throw BadRequestError when neither secretVersionId nor folderVersionId is provided", async () => {
|
||||
// Arrange
|
||||
const data = {
|
||||
folderCommitId: "commit-id",
|
||||
changeType: CommitType.ADD
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
await expect(folderCommitService.addCommitChange(data)).rejects.toThrow(BadRequestError);
|
||||
});
|
||||
|
||||
it("should throw NotFoundError when commit does not exist", async () => {
|
||||
// Arrange
|
||||
mockFolderCommitDAL.findById.mockResolvedValue(null);
|
||||
|
||||
const data = {
|
||||
folderCommitId: "non-existent-commit",
|
||||
changeType: CommitType.ADD,
|
||||
secretVersionId: "secret-version-1"
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
await expect(folderCommitService.addCommitChange(data)).rejects.toThrow(NotFoundError);
|
||||
expect(mockFolderCommitDAL.findById).toHaveBeenCalledWith("non-existent-commit", undefined);
|
||||
});
|
||||
});
|
||||
|
||||
// Note: reconstructFolderState is an internal function not exposed in the public API
|
||||
// We'll test it indirectly through compareFolderStates
|
||||
|
||||
describe("compareFolderStates", () => {
|
||||
it("should mark all resources as creates when currentCommitId is not provided", async () => {
|
||||
// Arrange
|
||||
const targetCommitId = "target-commit-id";
|
||||
const targetCommit = { id: targetCommitId, commitId: 1, folderId: "folder-id" };
|
||||
|
||||
mockFolderCommitDAL.findById.mockResolvedValue(targetCommit);
|
||||
// Mock how compareFolderStates would process the results internally
|
||||
mockFolderCheckpointDAL.findNearestCheckpoint.mockResolvedValue({ id: "checkpoint-id", commitId: "hash-0" });
|
||||
mockFolderCheckpointResourcesDAL.findByCheckpointId.mockResolvedValue([
|
||||
{ secretVersionId: "secret-version-1", referencedSecretId: "secret-1" },
|
||||
{ folderVersionId: "folder-version-1", referencedFolderId: "folder-1" }
|
||||
]);
|
||||
mockFolderCommitDAL.findCommitsToRecreate.mockResolvedValue([]);
|
||||
mockProjectDAL.findProjectByEnvId.mockResolvedValue({
|
||||
id: "project-id",
|
||||
name: "test-project",
|
||||
type: ProjectType.SecretManager
|
||||
});
|
||||
|
||||
// Act
|
||||
const result = await folderCommitService.compareFolderStates({
|
||||
targetCommitId
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(mockFolderCommitDAL.findById).toHaveBeenCalledWith(targetCommitId, undefined);
|
||||
|
||||
// Verify we get resources marked as create
|
||||
expect(result).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
changeType: "create",
|
||||
commitId: targetCommit.commitId
|
||||
})
|
||||
])
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createFolderCheckpoint", () => {
|
||||
it("should successfully create a checkpoint when force is true", async () => {
|
||||
// Arrange
|
||||
const folderCommitId = "commit-id";
|
||||
const folderId = "folder-id";
|
||||
const checkpointData = { id: "checkpoint-id", folderCommitId };
|
||||
|
||||
mockFolderDAL.findByParentId.mockResolvedValue([{ id: "subfolder-id" }]);
|
||||
mockFolderVersionDAL.findLatestFolderVersions.mockResolvedValue({ "subfolder-id": { id: "folder-version-1" } });
|
||||
mockSecretVersionV2BridgeDAL.findLatestVersionByFolderId.mockResolvedValue([{ id: "secret-version-1" }]);
|
||||
mockFolderCheckpointDAL.create.mockResolvedValue(checkpointData);
|
||||
|
||||
// Act
|
||||
const result = await folderCommitService.createFolderCheckpoint({
|
||||
folderId,
|
||||
folderCommitId,
|
||||
force: true
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(mockFolderCheckpointDAL.create).toHaveBeenCalledWith({ folderCommitId }, undefined);
|
||||
expect(mockFolderCheckpointResourcesDAL.insertMany).toHaveBeenCalled();
|
||||
expect(result).toBe(folderCommitId);
|
||||
});
|
||||
});
|
||||
|
||||
describe("deepRollbackFolder", () => {
|
||||
it("should throw NotFoundError when commit doesn't exist", async () => {
|
||||
// Arrange
|
||||
const targetCommitId = "non-existent-commit";
|
||||
const envId = "env-id";
|
||||
const actorId = "user-id";
|
||||
const actorType = ActorType.USER;
|
||||
const projectId = "project-id";
|
||||
|
||||
// Mock the transaction to properly handle the error
|
||||
mockFolderCommitDAL.transaction.mockImplementation(async (callback) => {
|
||||
return await callback({} as Knex);
|
||||
});
|
||||
|
||||
// Mock findById to return null inside the transaction
|
||||
mockFolderCommitDAL.findById.mockResolvedValue(null);
|
||||
|
||||
// Act & Assert
|
||||
await expect(
|
||||
folderCommitService.deepRollbackFolder(targetCommitId, envId, actorId, actorType, projectId)
|
||||
).rejects.toThrow(NotFoundError);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createFolderTreeCheckpoint", () => {
|
||||
it("should create a tree checkpoint when checkpoint window is exceeded", async () => {
|
||||
// Arrange
|
||||
const envId = "env-id";
|
||||
const folderCommitId = "commit-id";
|
||||
const latestCommit = { id: folderCommitId };
|
||||
const latestTreeCheckpoint = { id: "tree-checkpoint-id", folderCommitId: "old-commit-id" };
|
||||
const folders = [
|
||||
{ id: "folder-1", isReserved: false },
|
||||
{ id: "folder-2", isReserved: false },
|
||||
{ id: "folder-3", isReserved: true } // Reserved folders should be filtered out
|
||||
];
|
||||
const folderCommits = [
|
||||
{ folderId: "folder-1", id: "commit-1" },
|
||||
{ folderId: "folder-2", id: "commit-2" }
|
||||
];
|
||||
const treeCheckpoint = { id: "new-tree-checkpoint-id" };
|
||||
|
||||
mockFolderCommitDAL.findLatestEnvCommit.mockResolvedValue(latestCommit);
|
||||
mockFolderTreeCheckpointDAL.findLatestByEnvId.mockResolvedValue(latestTreeCheckpoint);
|
||||
mockFolderCommitDAL.getEnvNumberOfCommitsSince.mockResolvedValue(15); // More than PIT_TREE_CHECKPOINT_WINDOW (10)
|
||||
mockFolderDAL.findByEnvId.mockResolvedValue(folders);
|
||||
mockFolderCommitDAL.findMultipleLatestCommits.mockResolvedValue(folderCommits);
|
||||
mockFolderTreeCheckpointDAL.create.mockResolvedValue(treeCheckpoint);
|
||||
|
||||
// Act
|
||||
await folderCommitService.createFolderTreeCheckpoint(envId);
|
||||
|
||||
// Assert
|
||||
expect(mockFolderCommitDAL.findLatestEnvCommit).toHaveBeenCalledWith(envId, undefined);
|
||||
expect(mockFolderTreeCheckpointDAL.create).toHaveBeenCalledWith({ folderCommitId }, undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe("applyFolderStateDifferences", () => {
|
||||
it("should process changes correctly", async () => {
|
||||
// Arrange
|
||||
const folderId = "folder-id";
|
||||
const projectId = "project-id";
|
||||
const actorId = "user-id";
|
||||
const actorType = ActorType.USER;
|
||||
|
||||
const differences = [
|
||||
{
|
||||
id: "secret-1",
|
||||
versionId: "v1",
|
||||
changeType: ChangeType.CREATE,
|
||||
commitId: BigInt(1)
|
||||
} as ResourceChange,
|
||||
{
|
||||
id: "folder-1",
|
||||
versionId: "v2",
|
||||
changeType: ChangeType.UPDATE,
|
||||
commitId: BigInt(1),
|
||||
folderName: "Test Folder",
|
||||
folderVersion: "v2"
|
||||
} as ResourceChange
|
||||
];
|
||||
|
||||
const secretVersions = {
|
||||
"secret-1": {
|
||||
id: "secret-version-1",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
type: "shared",
|
||||
folderId: "folder-1",
|
||||
secretId: "secret-1",
|
||||
version: 1,
|
||||
key: "SECRET_KEY",
|
||||
encryptedValue: Buffer.from("encrypted"),
|
||||
encryptedComment: Buffer.from("comment"),
|
||||
skipMultilineEncoding: false,
|
||||
userId: "user-1",
|
||||
envId: "env-1",
|
||||
metadata: {}
|
||||
} as TSecretVersionsV2
|
||||
};
|
||||
|
||||
const folderVersions = {
|
||||
"folder-1": {
|
||||
folderId: "folder-1",
|
||||
version: 1,
|
||||
name: "Test Folder",
|
||||
envId: "env-1"
|
||||
} as TSecretFolderVersions
|
||||
};
|
||||
|
||||
// Mock folder lookup for the folder being processed
|
||||
mockFolderDAL.findById.mockImplementation((id) => {
|
||||
if (id === folderId) {
|
||||
return Promise.resolve({ id: folderId, envId: "env-1" });
|
||||
}
|
||||
return Promise.resolve(null);
|
||||
});
|
||||
|
||||
// Mock latest commit lookup
|
||||
mockFolderCommitDAL.findLatestCommit.mockImplementation((id) => {
|
||||
if (id === folderId) {
|
||||
return Promise.resolve({ id: "latest-commit-id", folderId });
|
||||
}
|
||||
return Promise.resolve(null);
|
||||
});
|
||||
|
||||
// Make sure findByParentId returns an array, not undefined
|
||||
mockFolderDAL.findByParentId.mockResolvedValue([]);
|
||||
|
||||
// Make sure other required functions return appropriate values
|
||||
mockFolderCheckpointDAL.findLatestByFolderId.mockResolvedValue(null);
|
||||
mockSecretVersionV2BridgeDAL.findLatestVersionByFolderId.mockResolvedValue([]);
|
||||
|
||||
// These mocks need to return objects with an id field
|
||||
mockSecretVersionV2BridgeDAL.findByIdsWithLatestVersion.mockResolvedValue(Object.values(secretVersions));
|
||||
mockFolderVersionDAL.findByIdsWithLatestVersion.mockResolvedValue(Object.values(folderVersions));
|
||||
mockSecretV2BridgeDAL.insertMany.mockResolvedValue([{ id: "new-secret-1" }]);
|
||||
mockSecretVersionV2BridgeDAL.create.mockResolvedValue({ id: "new-secret-version-1" });
|
||||
mockFolderDAL.updateById.mockResolvedValue({ id: "updated-folder-1" });
|
||||
mockFolderVersionDAL.create.mockResolvedValue({ id: "new-folder-version-1" });
|
||||
mockFolderCommitDAL.create.mockResolvedValue({ id: "new-commit-id" });
|
||||
mockSecretVersionV2BridgeDAL.findLatestVersionMany.mockResolvedValue([
|
||||
{
|
||||
id: "secret-version-1",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
type: "shared",
|
||||
folderId: "folder-1",
|
||||
secretId: "secret-1",
|
||||
version: 1,
|
||||
key: "SECRET_KEY",
|
||||
encryptedValue: Buffer.from("encrypted"),
|
||||
encryptedComment: Buffer.from("comment"),
|
||||
skipMultilineEncoding: false,
|
||||
userId: "user-1",
|
||||
envId: "env-1",
|
||||
metadata: {}
|
||||
}
|
||||
]);
|
||||
|
||||
// Mock transaction
|
||||
mockFolderCommitDAL.transaction.mockImplementation(<T>(callback: TransactionCallback<T>) => callback({} as Knex));
|
||||
|
||||
// Act
|
||||
const result = await folderCommitService.applyFolderStateDifferences({
|
||||
differences,
|
||||
actorInfo: {
|
||||
actorType,
|
||||
actorId,
|
||||
message: "Applying changes"
|
||||
},
|
||||
folderId,
|
||||
projectId,
|
||||
reconstructNewFolders: false
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(mockFolderCommitDAL.create).toHaveBeenCalled();
|
||||
expect(mockSecretV2BridgeDAL.invalidateSecretCacheByProjectId).toHaveBeenCalledWith(projectId);
|
||||
|
||||
// Check that we got the right counts
|
||||
expect(result.totalChanges).toEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
2173
backend/src/services/folder-commit/folder-commit-service.ts
Normal file
2173
backend/src/services/folder-commit/folder-commit-service.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,44 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TFolderTreeCheckpointResources } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
export type TFolderTreeCheckpointResourcesDALFactory = ReturnType<typeof folderTreeCheckpointResourcesDALFactory>;
|
||||
|
||||
type TFolderTreeCheckpointResourcesWithCommitId = TFolderTreeCheckpointResources & {
|
||||
commitId: bigint;
|
||||
};
|
||||
|
||||
export const folderTreeCheckpointResourcesDALFactory = (db: TDbClient) => {
|
||||
const folderTreeCheckpointResourcesOrm = ormify(db, TableName.FolderTreeCheckpointResources);
|
||||
|
||||
const findByTreeCheckpointId = async (
|
||||
folderTreeCheckpointId: string,
|
||||
tx?: Knex
|
||||
): Promise<TFolderTreeCheckpointResourcesWithCommitId[]> => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())<TFolderTreeCheckpointResources>(
|
||||
TableName.FolderTreeCheckpointResources
|
||||
)
|
||||
.join(
|
||||
TableName.FolderCommit,
|
||||
`${TableName.FolderTreeCheckpointResources}.folderCommitId`,
|
||||
`${TableName.FolderCommit}.id`
|
||||
)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter({ folderTreeCheckpointId }, TableName.FolderTreeCheckpointResources))
|
||||
.select(selectAllTableCols(TableName.FolderTreeCheckpointResources))
|
||||
.select(db.ref("commitId").withSchema(TableName.FolderCommit).as("commitId"));
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByTreeCheckpointId" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...folderTreeCheckpointResourcesOrm,
|
||||
findByTreeCheckpointId
|
||||
};
|
||||
};
|
@@ -0,0 +1,79 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TFolderCommits, TFolderTreeCheckpoints } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
export type TFolderTreeCheckpointDALFactory = ReturnType<typeof folderTreeCheckpointDALFactory>;
|
||||
|
||||
type TreeCheckpointWithCommitInfo = TFolderTreeCheckpoints & {
|
||||
commitId: bigint;
|
||||
};
|
||||
|
||||
export const folderTreeCheckpointDALFactory = (db: TDbClient) => {
|
||||
const folderTreeCheckpointOrm = ormify(db, TableName.FolderTreeCheckpoint);
|
||||
|
||||
const findByCommitId = async (folderCommitId: string, tx?: Knex): Promise<TFolderTreeCheckpoints | undefined> => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())<TFolderTreeCheckpoints>(TableName.FolderTreeCheckpoint)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter({ folderCommitId }, TableName.FolderTreeCheckpoint))
|
||||
.select(selectAllTableCols(TableName.FolderTreeCheckpoint))
|
||||
.first();
|
||||
return doc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindByCommitId" });
|
||||
}
|
||||
};
|
||||
|
||||
const findNearestCheckpoint = async (
|
||||
folderCommitId: bigint,
|
||||
envId: string,
|
||||
tx?: Knex
|
||||
): Promise<TreeCheckpointWithCommitInfo | undefined> => {
|
||||
try {
|
||||
const nearestCheckpoint = await (tx || db.replicaNode())(TableName.FolderTreeCheckpoint)
|
||||
.join<TFolderCommits>(
|
||||
TableName.FolderCommit,
|
||||
`${TableName.FolderTreeCheckpoint}.folderCommitId`,
|
||||
`${TableName.FolderCommit}.id`
|
||||
)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(`${TableName.FolderCommit}.envId`, "=", envId)
|
||||
.andWhere(`${TableName.FolderCommit}.commitId`, "<=", folderCommitId.toString())
|
||||
.select(selectAllTableCols(TableName.FolderTreeCheckpoint))
|
||||
.select(db.ref("commitId").withSchema(TableName.FolderCommit))
|
||||
.orderBy(`${TableName.FolderCommit}.commitId`, "desc")
|
||||
.first();
|
||||
|
||||
return nearestCheckpoint;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindNearestCheckpoint" });
|
||||
}
|
||||
};
|
||||
|
||||
const findLatestByEnvId = async (envId: string, tx?: Knex): Promise<TFolderTreeCheckpoints | undefined> => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())<TFolderTreeCheckpoints>(TableName.FolderTreeCheckpoint)
|
||||
.join<TFolderCommits>(
|
||||
TableName.FolderCommit,
|
||||
`${TableName.FolderTreeCheckpoint}.folderCommitId`,
|
||||
`${TableName.FolderCommit}.id`
|
||||
)
|
||||
.where(`${TableName.FolderCommit}.envId`, "=", envId)
|
||||
.orderBy(`${TableName.FolderTreeCheckpoint}.createdAt`, "desc")
|
||||
.first();
|
||||
return doc;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindLatestByEnvId" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...folderTreeCheckpointOrm,
|
||||
findByCommitId,
|
||||
findNearestCheckpoint,
|
||||
findLatestByEnvId
|
||||
};
|
||||
};
|
@@ -11,5 +11,9 @@ export type TIdentityAccessTokenJwtPayload = {
|
||||
oidc?: {
|
||||
claims: Record<string, string>;
|
||||
};
|
||||
kubernetes?: {
|
||||
namespace: string;
|
||||
name: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
@@ -72,8 +72,8 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
const $gatewayProxyWrapper = async <T>(
|
||||
inputs: {
|
||||
gatewayId: string;
|
||||
targetHost: string;
|
||||
targetPort: number;
|
||||
targetHost?: string;
|
||||
targetPort?: number;
|
||||
caCert?: string;
|
||||
reviewTokenThroughGateway: boolean;
|
||||
},
|
||||
@@ -104,11 +104,15 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
},
|
||||
// we always pass this, because its needed for both tcp and http protocol
|
||||
httpsAgent: new https.Agent({
|
||||
ca: inputs.caCert,
|
||||
rejectUnauthorized: Boolean(inputs.caCert)
|
||||
})
|
||||
// only needed for TCP protocol, because the gateway as reviewer will use the pod's CA cert for auth directly
|
||||
...(!inputs.reviewTokenThroughGateway
|
||||
? {
|
||||
httpsAgent: new https.Agent({
|
||||
ca: inputs.caCert,
|
||||
rejectUnauthorized: Boolean(inputs.caCert)
|
||||
})
|
||||
}
|
||||
: {})
|
||||
}
|
||||
);
|
||||
|
||||
@@ -142,8 +146,15 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
caCert = decryptor({ cipherTextBlob: identityKubernetesAuth.encryptedKubernetesCaCertificate }).toString();
|
||||
}
|
||||
|
||||
const tokenReviewCallbackRaw = async (host: string = identityKubernetesAuth.kubernetesHost, port?: number) => {
|
||||
const tokenReviewCallbackRaw = async (host = identityKubernetesAuth.kubernetesHost, port?: number) => {
|
||||
logger.info({ host, port }, "tokenReviewCallbackRaw: Processing kubernetes token review using raw API");
|
||||
|
||||
if (!host || !identityKubernetesAuth.kubernetesHost) {
|
||||
throw new BadRequestError({
|
||||
message: "Kubernetes host is required when token review mode is set to API"
|
||||
});
|
||||
}
|
||||
|
||||
let tokenReviewerJwt = "";
|
||||
if (identityKubernetesAuth.encryptedKubernetesTokenReviewerJwt) {
|
||||
tokenReviewerJwt = decryptor({
|
||||
@@ -211,11 +222,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
return res.data;
|
||||
};
|
||||
|
||||
const tokenReviewCallbackThroughGateway = async (
|
||||
host: string = identityKubernetesAuth.kubernetesHost,
|
||||
port?: number,
|
||||
httpsAgent?: https.Agent
|
||||
) => {
|
||||
const tokenReviewCallbackThroughGateway = async (host: string, port?: number) => {
|
||||
logger.info(
|
||||
{
|
||||
host,
|
||||
@@ -224,11 +231,9 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
"tokenReviewCallbackThroughGateway: Processing kubernetes token review using gateway"
|
||||
);
|
||||
|
||||
const baseUrl = port ? `${host}:${port}` : host;
|
||||
|
||||
const res = await axios
|
||||
.post<TCreateTokenReviewResponse>(
|
||||
`${baseUrl}/apis/authentication.k8s.io/v1/tokenreviews`,
|
||||
`${host}:${port}/apis/authentication.k8s.io/v1/tokenreviews`,
|
||||
{
|
||||
apiVersion: "authentication.k8s.io/v1",
|
||||
kind: "TokenReview",
|
||||
@@ -240,11 +245,10 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken
|
||||
"x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount
|
||||
},
|
||||
signal: AbortSignal.timeout(10000),
|
||||
timeout: 10000,
|
||||
...(httpsAgent ? { httpsAgent } : {})
|
||||
timeout: 10000
|
||||
}
|
||||
)
|
||||
.catch((err) => {
|
||||
@@ -273,11 +277,6 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
let data: TCreateTokenReviewResponse | undefined;
|
||||
|
||||
if (identityKubernetesAuth.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway) {
|
||||
const { kubernetesHost } = identityKubernetesAuth;
|
||||
const lastColonIndex = kubernetesHost.lastIndexOf(":");
|
||||
const k8sHost = kubernetesHost.substring(0, lastColonIndex);
|
||||
const k8sPort = kubernetesHost.substring(lastColonIndex + 1);
|
||||
|
||||
if (!identityKubernetesAuth.gatewayId) {
|
||||
throw new BadRequestError({
|
||||
message: "Gateway ID is required when token review mode is set to Gateway"
|
||||
@@ -287,14 +286,17 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
data = await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: identityKubernetesAuth.gatewayId,
|
||||
targetHost: k8sHost, // note(daniel): must include the protocol (https|http)
|
||||
targetPort: k8sPort ? Number(k8sPort) : 443,
|
||||
caCert,
|
||||
reviewTokenThroughGateway: true
|
||||
},
|
||||
tokenReviewCallbackThroughGateway
|
||||
);
|
||||
} else if (identityKubernetesAuth.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Api) {
|
||||
if (!identityKubernetesAuth.kubernetesHost) {
|
||||
throw new BadRequestError({
|
||||
message: "Kubernetes host is required when token review mode is set to API"
|
||||
});
|
||||
}
|
||||
|
||||
let { kubernetesHost } = identityKubernetesAuth;
|
||||
if (kubernetesHost.startsWith("https://") || kubernetesHost.startsWith("http://")) {
|
||||
kubernetesHost = new RE2("^https?:\\/\\/").replace(kubernetesHost, "");
|
||||
@@ -398,7 +400,13 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
{
|
||||
identityId: identityKubernetesAuth.identityId,
|
||||
identityAccessTokenId: identityAccessToken.id,
|
||||
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
|
||||
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN,
|
||||
identityAuth: {
|
||||
kubernetes: {
|
||||
namespace: targetNamespace,
|
||||
name: targetName
|
||||
}
|
||||
}
|
||||
} as TIdentityAccessTokenJwtPayload,
|
||||
appCfg.AUTH_SECRET,
|
||||
// akhilmhdh: for non-expiry tokens you should not even set the value, including undefined. Even for undefined jsonwebtoken throws error
|
||||
|
@@ -12,7 +12,7 @@ export enum IdentityKubernetesAuthTokenReviewMode {
|
||||
|
||||
export type TAttachKubernetesAuthDTO = {
|
||||
identityId: string;
|
||||
kubernetesHost: string;
|
||||
kubernetesHost: string | null;
|
||||
caCert: string;
|
||||
tokenReviewerJwt?: string;
|
||||
tokenReviewMode: IdentityKubernetesAuthTokenReviewMode;
|
||||
@@ -29,7 +29,7 @@ export type TAttachKubernetesAuthDTO = {
|
||||
|
||||
export type TUpdateKubernetesAuthDTO = {
|
||||
identityId: string;
|
||||
kubernetesHost?: string;
|
||||
kubernetesHost?: string | null;
|
||||
caCert?: string;
|
||||
tokenReviewerJwt?: string | null;
|
||||
tokenReviewMode?: IdentityKubernetesAuthTokenReviewMode;
|
||||
|
@@ -122,9 +122,9 @@ export const identityUaServiceFactory = ({
|
||||
}
|
||||
: {
|
||||
accessTokenTTL: identityUa.accessTokenPeriod,
|
||||
// Setting Max TTL to 2 × period ensures that clients can always renew their token
|
||||
// at least once, and matches client logic that checks if renewing would exceed Max TTL.
|
||||
accessTokenMaxTTL: 2 * identityUa.accessTokenPeriod
|
||||
// We set a very large Max TTL for periodic tokens to ensure that clients (even outdated ones) can always renew their token
|
||||
// without them having to update their SDKs, CLIs, etc. This workaround sets it to 30 years to emulate "forever"
|
||||
accessTokenMaxTTL: 1000000000
|
||||
};
|
||||
|
||||
const identityAccessToken = await identityUaDAL.transaction(async (tx) => {
|
||||
|
@@ -212,7 +212,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
// special query
|
||||
const findAllOrgsByUserId = async (
|
||||
userId: string
|
||||
): Promise<(TOrganizations & { orgAuthMethod: string; userRole: string })[]> => {
|
||||
): Promise<(TOrganizations & { orgAuthMethod: string; userRole: string; userStatus: string })[]> => {
|
||||
try {
|
||||
const org = (await db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
@@ -234,6 +234,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
})
|
||||
.select(selectAllTableCols(TableName.Organization))
|
||||
.select(db.ref("role").withSchema(TableName.OrgMembership).as("userRole"))
|
||||
.select(db.ref("status").withSchema(TableName.OrgMembership).as("userStatus"))
|
||||
.select(
|
||||
db.raw(`
|
||||
CASE
|
||||
@@ -242,7 +243,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
ELSE ''
|
||||
END as "orgAuthMethod"
|
||||
`)
|
||||
)) as (TOrganizations & { orgAuthMethod: string; userRole: string })[];
|
||||
)) as (TOrganizations & { orgAuthMethod: string; userRole: string; userStatus: string })[];
|
||||
|
||||
return org;
|
||||
} catch (error) {
|
||||
|
@@ -183,7 +183,9 @@ export const orgServiceFactory = ({
|
||||
* */
|
||||
const findAllOrganizationOfUser = async (userId: string) => {
|
||||
const orgs = await orgDAL.findAllOrgsByUserId(userId);
|
||||
return orgs;
|
||||
|
||||
// Filter out orgs where the membership object is an invitation
|
||||
return orgs.filter((org) => org.userStatus !== "invited");
|
||||
};
|
||||
/*
|
||||
* Get all workspace members
|
||||
@@ -835,16 +837,22 @@ export const orgServiceFactory = ({
|
||||
|
||||
// if the user doesn't exist we create the user with the email
|
||||
if (!inviteeUser) {
|
||||
inviteeUser = await userDAL.create(
|
||||
{
|
||||
isAccepted: false,
|
||||
email: inviteeEmail,
|
||||
username: inviteeEmail,
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
// TODO(carlos): will be removed once the function receives usernames instead of emails
|
||||
const usersByEmail = await userDAL.findUserByEmail(inviteeEmail, tx);
|
||||
if (usersByEmail?.length === 1) {
|
||||
[inviteeUser] = usersByEmail;
|
||||
} else {
|
||||
inviteeUser = await userDAL.create(
|
||||
{
|
||||
isAccepted: false,
|
||||
email: inviteeEmail,
|
||||
username: inviteeEmail,
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const inviteeUserId = inviteeUser?.id;
|
||||
|
@@ -12,7 +12,7 @@ import {
|
||||
TProjectsUpdate
|
||||
} from "@app/db/schemas";
|
||||
import { BadRequestError, DatabaseError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
|
||||
|
||||
import { ActorType } from "../auth/auth-type";
|
||||
import { Filter, ProjectFilterType, SearchProjectSortBy } from "./project-types";
|
||||
@@ -475,6 +475,16 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
return { docs, totalCount: Number(docs?.[0]?.count ?? 0) };
|
||||
};
|
||||
|
||||
const findProjectByEnvId = async (envId: string, tx?: Knex) => {
|
||||
const project = await (tx || db.replicaNode())(TableName.Project)
|
||||
.leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter({ id: envId }, TableName.Environment))
|
||||
.select(selectAllTableCols(TableName.Project))
|
||||
.first();
|
||||
return project;
|
||||
};
|
||||
|
||||
const countOfOrgProjects = async (orgId: string | null, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.Project)
|
||||
@@ -504,6 +514,7 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
checkProjectUpgradeStatus,
|
||||
getProjectFromSplitId,
|
||||
searchProjects,
|
||||
findProjectByEnvId,
|
||||
countOfOrgProjects
|
||||
};
|
||||
};
|
||||
|
@@ -165,7 +165,7 @@ type TProjectServiceFactoryDep = {
|
||||
sshHostGroupDAL: Pick<TSshHostGroupDALFactory, "find" | "findSshHostGroupsWithLoginMappings">;
|
||||
permissionService: TPermissionServiceFactory;
|
||||
orgService: Pick<TOrgServiceFactory, "addGhostUser">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "invalidateGetPlan">;
|
||||
queueService: Pick<TQueueServiceFactory, "stopRepeatableJob">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findOne">;
|
||||
@@ -494,6 +494,10 @@ export const projectServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
// no need to invalidate if there was no limit
|
||||
if (plan.workspaceLimit) {
|
||||
await licenseService.invalidateGetPlan(organization.id);
|
||||
}
|
||||
return {
|
||||
...project,
|
||||
environments: envs,
|
||||
@@ -667,7 +671,8 @@ export const projectServiceFactory = ({
|
||||
enforceCapitalization: update.autoCapitalization,
|
||||
hasDeleteProtection: update.hasDeleteProtection,
|
||||
slug: update.slug,
|
||||
secretSharing: update.secretSharing
|
||||
secretSharing: update.secretSharing,
|
||||
showSnapshotsLegacy: update.showSnapshotsLegacy
|
||||
});
|
||||
|
||||
return updatedProject;
|
||||
|
@@ -94,6 +94,7 @@ export type TUpdateProjectDTO = {
|
||||
hasDeleteProtection?: boolean;
|
||||
slug?: string;
|
||||
secretSharing?: boolean;
|
||||
showSnapshotsLegacy?: boolean;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
|
@@ -488,6 +488,75 @@ export const secretFolderDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findFoldersByRootAndIds = async ({ rootId, folderIds }: { rootId: string; folderIds: string[] }, tx?: Knex) => {
|
||||
try {
|
||||
// First, get all descendant folders of rootId
|
||||
const descendants = await (tx || db.replicaNode())
|
||||
.withRecursive("descendants", (qb) =>
|
||||
qb
|
||||
.select(
|
||||
selectAllTableCols(TableName.SecretFolder),
|
||||
db.raw("0 as depth"),
|
||||
db.raw(`'/' as path`),
|
||||
db.ref(`${TableName.Environment}.slug`).as("environment")
|
||||
)
|
||||
.from(TableName.SecretFolder)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.where(`${TableName.SecretFolder}.id`, rootId)
|
||||
.union((un) => {
|
||||
void un
|
||||
.select(
|
||||
selectAllTableCols(TableName.SecretFolder),
|
||||
db.raw("descendants.depth + 1 as depth"),
|
||||
db.raw(
|
||||
`CONCAT(
|
||||
CASE WHEN descendants.path = '/' THEN '' ELSE descendants.path END,
|
||||
CASE WHEN ${TableName.SecretFolder}."parentId" is NULL THEN '' ELSE CONCAT('/', secret_folders.name) END
|
||||
)`
|
||||
),
|
||||
db.ref("descendants.environment")
|
||||
)
|
||||
.from(TableName.SecretFolder)
|
||||
.where(`${TableName.SecretFolder}.isReserved`, false)
|
||||
.join("descendants", `${TableName.SecretFolder}.parentId`, "descendants.id");
|
||||
})
|
||||
)
|
||||
.select<(TSecretFolders & { path: string; depth: number; environment: string })[]>("*")
|
||||
.from("descendants")
|
||||
.whereIn(`id`, folderIds)
|
||||
.orderBy("depth")
|
||||
.orderBy(`name`);
|
||||
|
||||
return descendants;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindFoldersByRootAndIds" });
|
||||
}
|
||||
};
|
||||
|
||||
const findByParentId = async (parentId: string, tx?: Knex) => {
|
||||
try {
|
||||
const folders = await (tx || db.replicaNode())(TableName.SecretFolder)
|
||||
.where({ parentId })
|
||||
.andWhere({ isReserved: false })
|
||||
.select(selectAllTableCols(TableName.SecretFolder));
|
||||
return folders;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "findByParentId" });
|
||||
}
|
||||
};
|
||||
|
||||
const findByEnvId = async (envId: string, tx?: Knex) => {
|
||||
try {
|
||||
const folders = await (tx || db.replicaNode())(TableName.SecretFolder)
|
||||
.where({ envId })
|
||||
.andWhere({ isReserved: false })
|
||||
.select(selectAllTableCols(TableName.SecretFolder));
|
||||
return folders;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "findByEnvId" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...secretFolderOrm,
|
||||
update,
|
||||
@@ -499,6 +568,9 @@ export const secretFolderDALFactory = (db: TDbClient) => {
|
||||
findClosestFolder,
|
||||
findByProjectId,
|
||||
findByMultiEnv,
|
||||
findByEnvsDeep
|
||||
findByEnvsDeep,
|
||||
findByParentId,
|
||||
findByEnvId,
|
||||
findFoldersByRootAndIds
|
||||
};
|
||||
};
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user