Compare commits

...

100 Commits

Author SHA1 Message Date
Maidul Islam
26bed22b94 fix lint by adding void 2025-06-10 17:05:10 -04:00
Maidul Islam
2d3c63e8b9 fix lint 2025-06-10 03:10:16 -04:00
Maidul Islam
bdb36d6be4 disable caching for frontend assets
This aims to fix the issue where it says

```
TypeError
Cannot read properties of undefined (reading 'component')
```

by telling the browser to not cache any chunks
2025-06-10 02:59:31 -04:00
Maidul Islam
3ee8f7aa20 Merge pull request #3758 from Infisical/revert-3757-revert-3676-revert-3675-revert-3546-feat/point-in-time-revamp
feat(PIT): Point In Time Revamp
2025-06-10 00:46:07 -04:00
x032205
36a5291dc3 Merge pull request #3754 from Infisical/add-webhook-trigger-audit-log
improvement(project-webhooks): Add webhook triggered audit log
2025-06-09 15:39:42 -04:00
x032205
977fd7a057 Small tweaks 2025-06-09 15:34:32 -04:00
x032205
bf413c75bc Merge pull request #3693 from Infisical/check-non-re2-regex-workflow
Check non re2 regex workflow
2025-06-09 14:03:02 -04:00
x032205
3250a18050 Fix escaping quotes 2025-06-09 13:28:02 -04:00
x032205
2eb1451c56 Further optimized the regex (99% accuracy | 99/100 passing tests) 2025-06-09 13:10:42 -04:00
x032205
a24158b187 Remove false detection for relative paths ("../../path") and other minor
improvements
2025-06-09 12:28:11 -04:00
Sheen
4cc80e38f4 Merge pull request #3761 from Infisical/fix/re-added-merge-user-logic
fix: re-added merge user logic
2025-06-09 22:09:44 +08:00
Sheen Capadngan
d5ee74bb1a misc: simplified logic 2025-06-09 22:02:01 +08:00
Sheen Capadngan
ec776b94ae fix: re-added merge user logic 2025-06-09 21:57:01 +08:00
Maidul Islam
14be4eb601 Revert "Revert "Revert "Revert "feat(PIT): Point In Time Revamp"""" 2025-06-08 21:21:04 -04:00
Maidul Islam
d1faed5672 Merge pull request #3757 from Infisical/revert-3676-revert-3675-revert-3546-feat/point-in-time-revamp
Revert "Revert "Revert "feat(PIT): Point In Time Revamp"""
2025-06-08 21:20:57 -04:00
Maidul Islam
9c6b300ad4 Revert "Revert "Revert "feat(PIT): Point In Time Revamp""" 2025-06-08 21:20:37 -04:00
Maidul Islam
210ddf506a Merge pull request #3676 from Infisical/revert-3675-revert-3546-feat/point-in-time-revamp
Revert "Revert "feat(PIT): Point In Time Revamp""
2025-06-08 20:29:51 -04:00
Daniel Hougaard
33d740a4de Merge pull request #3753 from Infisical/daniel/gateway-docs
feat(gateway): multiple authentication methods
2025-06-09 00:14:14 +04:00
Sheen
86dee1ec5d Merge pull request #3746 from Infisical/feat/kubernetes-dynamic-secret-improvements
feat: added dynamic credential support and gateway auth to k8 dynamic secret
2025-06-09 03:17:20 +08:00
Sheen
6dfe2851e1 misc: doc improvements 2025-06-08 18:56:40 +00:00
Sheen Capadngan
95b843779b misc: addressed type comment 2025-06-09 02:41:19 +08:00
Scott Wilson
219aa3c641 improvement: add webhook triggered audit log 2025-06-06 16:06:29 -07:00
Daniel Hougaard
cf5391d6d4 Update overview.mdx 2025-06-07 03:06:01 +04:00
Daniel Hougaard
2ca476f21e Update gateway.mdx 2025-06-07 03:04:45 +04:00
Daniel Hougaard
bf81469341 Merge branch 'heads/main' into daniel/gateway-docs 2025-06-07 03:00:16 +04:00
Daniel Hougaard
8445127fad feat(gateway): multiple authentication methods 2025-06-07 02:58:07 +04:00
carlosmonastyrski
fb1cf3eb02 feat(PIT-revamp): minor UI improvements on snapshots deprecation messages 2025-06-06 18:30:53 -03:00
Scott Wilson
f8c822eda7 Merge pull request #3744 from Infisical/project-group-users-page
feature(group-projects): Add project group details page
2025-06-06 14:30:50 -07:00
Scott Wilson
ea5a5e0aa7 improvements: address feedback 2025-06-06 14:13:18 -07:00
Akhil Mohan
f20e4e189d Merge pull request #3722 from Infisical/feat/dynamicSecretIdentityName
Add identityName to Dynamic Secrets userName template
2025-06-07 02:23:41 +05:30
Scott Wilson
c7ec6236e1 Merge pull request #3738 from Infisical/gcp-sync-location
feature(gcp-sync): Add support for syncing to locations
2025-06-06 13:47:55 -07:00
carlosmonastyrski
c4dea2d51f Type fix 2025-06-06 17:34:29 -03:00
carlosmonastyrski
e89b0fdf3f Merge remote-tracking branch 'origin/main' into feat/dynamicSecretIdentityName 2025-06-06 17:27:48 -03:00
Scott Wilson
d57f76d230 improvements: address feedback 2025-06-06 13:22:45 -07:00
carlosmonastyrski
55efa00b8c Merge pull request #3749 from Infisical/feat/pit-snapshot-changes
feat(PIT-revamp): snapshot changes for PIT revamp and add docs for ne…
2025-06-06 16:38:12 -03:00
carlosmonastyrski
29ba92dadb feat(PIT-revamp): minor doc improvements 2025-06-06 16:32:12 -03:00
Maidul Islam
7ba79dec19 Merge pull request #3752 from akhilmhdh/feat/k8s-metadata-auth
feat: added k8s metadata in template policy
2025-06-06 15:30:33 -04:00
Akhil Mohan
6ea8bff224 Merge pull request #3750 from akhilmhdh/feat/dynamic-secret-aws
feat: assume role mode for aws dynamic secret iam
2025-06-07 00:59:22 +05:30
=
65f4e1bea1 feat: corrected typo 2025-06-07 00:56:03 +05:30
=
73ce3b8bb7 feat: review based update 2025-06-07 00:48:45 +05:30
Akhil Mohan
e63af81e60 Update docs/documentation/platform/access-controls/abac/managing-machine-identity-attributes.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-06 23:47:40 +05:30
=
6c2c2b319b feat: updated doc for k8s policy 2025-06-06 23:43:15 +05:30
=
82c2be64a1 feat: completed changes for backend to have k8s auth 2025-06-06 23:42:56 +05:30
Sheen
a064e31117 misc: image updates 2025-06-06 17:57:28 +00:00
x032205
051d0780a8 Merge pull request #3721 from Infisical/fix/user-stuck-on-invited
fix invite bug
2025-06-06 13:43:33 -04:00
Sheen Capadngan
5c9563f18b feat: docs 2025-06-07 01:42:01 +08:00
carlosmonastyrski
5406871c30 feat(dynamic-secret): Minor improvements on usernameTemplate 2025-06-06 14:34:32 -03:00
=
8b89edc277 feat: resolved ts fail in license 2025-06-06 22:46:51 +05:30
x032205
b394e191a8 Fix accepting invite while logged out 2025-06-06 13:02:23 -04:00
Daniel Hougaard
92030884ec Merge pull request #3751 from Infisical/daniel/gateway-http-handle-multple-requests
fix(gateway): allow multiple requests when using http proxy
2025-06-06 20:54:22 +04:00
=
4583eb1732 feat: removed console log 2025-06-06 22:13:06 +05:30
carlosmonastyrski
ae00e74c17 Merge pull request #3715 from Infisical/feat/addAzureDevopsDocsOIDC
feat(oidc): add azure docs for OIDC authentication
2025-06-06 13:11:25 -03:00
=
adfd5a1b59 feat: doc for assume aws iam 2025-06-06 21:35:40 +05:30
=
d6c321d34d feat: ui for aws dynamic secret 2025-06-06 21:35:25 +05:30
=
09a7346f32 feat: backend changes for assume permission in aws dynamic secret 2025-06-06 21:33:19 +05:30
carlosmonastyrski
1ae82dc460 feat(PIT-revamp): snapshot changes for PIT revamp and add docs for new logic 2025-06-06 12:52:37 -03:00
Sheen Capadngan
80fada6b55 misc: finalized httpsAgent usage 2025-06-06 23:51:39 +08:00
x032205
e4abac91b4 Merge branch 'main' into fix/user-stuck-on-invited 2025-06-06 11:50:03 -04:00
Maidul Islam
b4f37193ac Merge pull request #3748 from Infisical/akhilmhdh-patch-3
feat: updated dynamic secret,secret import to support glob in environment
2025-06-06 10:50:36 -04:00
Akhil Mohan
c8be5a637a feat: updated dynamic secret,secret import to support glob in environment 2025-06-06 20:08:21 +05:30
Akhil Mohan
45485f8bd3 Merge pull request #3739 from akhilmhdh/feat/limit-project-create
feat: added invalidate function to lock
2025-06-06 18:55:03 +05:30
Sheen Capadngan
545df3bf28 misc: added dynamic credential support and gateway auth 2025-06-06 21:03:46 +08:00
Daniel Hougaard
766254c4e3 Merge pull request #3742 from Infisical/daniel/gateway-fix
fix(gateway): handle malformed URL's
2025-06-06 16:20:48 +04:00
Scott Wilson
4c22024d13 feature: project group details page 2025-06-05 19:17:46 -07:00
carlosmonastyrski
6847e5bb89 Merge pull request #3741 from Infisical/fix/inviteUsersByUsernameFix
Fix for inviteUserToOrganization for usernames with no email formats
2025-06-05 21:04:15 -03:00
carlosmonastyrski
5d35ce6c6c Add isEmailVerified to findUserByEmail 2025-06-05 20:59:12 -03:00
carlosmonastyrski
635f027752 Fix for inviteUserToOrganization for usernames with no email formats 2025-06-05 20:47:29 -03:00
carlosmonastyrski
6334ad0d07 Merge branch 'main' into feat/point-in-time-revamp 2025-06-05 18:31:27 -03:00
x032205
89e8f200e9 Reverted test 2025-06-05 16:54:29 -04:00
x032205
e57935a7d3 Support for RegExp + workflow test 2025-06-05 16:53:19 -04:00
x032205
617d07177c Merge branch 'main' into check-non-re2-regex-workflow 2025-06-05 16:46:16 -04:00
carlosmonastyrski
ac5bfbb6c9 feat(dynamic-secret): Minor improvements on usernameTemplate 2025-06-05 17:18:56 -03:00
=
1f80ff040d feat: added invalidate function to lock 2025-06-06 01:45:01 +05:30
Scott Wilson
f8939835e1 feature(gcp-sync): add support for syncing to locations 2025-06-05 13:02:05 -07:00
x032205
d2b0ca94d8 Remove commented line 2025-06-05 11:59:10 -04:00
x032205
5255f0ac17 Fix select org 2025-06-05 11:30:05 -04:00
x032205
4f67834eaa Merge branch 'main' into fix/user-stuck-on-invited 2025-06-05 10:46:22 -04:00
x032205
952e60f08a Select organization checkpoint 2025-06-04 16:54:14 -04:00
carlosmonastyrski
5367d1ac2e feat(dynamic-secret): Added new options to username template 2025-06-04 16:43:17 -03:00
x032205
92b9abb52b Fix type issue 2025-06-03 21:48:59 -04:00
x032205
e2680d9aee Insert old code as comment 2025-06-03 21:48:42 -04:00
x032205
aa049dc43b Fix invite problem on backend 2025-06-03 21:06:48 -04:00
carlosmonastyrski
419e9ac755 Add identityName to Dynamic Secrets userName template 2025-06-03 21:21:36 -03:00
x032205
b7b36a475d fix invite bug 2025-06-03 20:12:29 -04:00
carlosmonastyrski
9159a9fa36 feat(oidc): add azure docs for OIDC authentication 2025-06-03 16:52:12 -03:00
carlosmonastyrski
d5f5abef8e PIT: add migration to fix secret versions 2025-06-02 14:54:40 -03:00
x032205
f711f8a35c Finishing touches + undo RE2 removal 2025-05-31 01:14:37 -04:00
x032205
9c8bb71878 Remove debug info and change wording 2025-05-31 01:05:57 -04:00
x032205
d0547c354a grep fix 2025-05-31 01:03:03 -04:00
x032205
88abdd9529 Debug info 2025-05-31 00:58:11 -04:00
x032205
f3a04f1a2f Fetch depth fix 2025-05-31 00:54:23 -04:00
x032205
082d6c44c4 Vulnerable regex test 2025-05-31 00:50:51 -04:00
x032205
a0aafcc1bf Workflow 2025-05-31 00:50:35 -04:00
carlosmonastyrski
b350841b86 PIT: fix migration for old projects with no versioning set 2025-05-30 19:14:22 -03:00
carlosmonastyrski
ad623f8753 PIT: fix migration 2025-05-30 16:37:34 -03:00
carlosmonastyrski
9cedae61a9 PIT: fix migration 2025-05-30 15:37:46 -03:00
carlosmonastyrski
f7a4731565 PIT: add batch lookup for secret/folder resource versions to migration 2025-05-29 22:16:26 -03:00
carlosmonastyrski
a70aff5f31 PIT: rework of init migration 2025-05-29 16:44:20 -03:00
carlosmonastyrski
d1d5dd29c6 PIT: fix checkpoint creation to do it in batches to avoid insert fails 2025-05-28 22:02:55 -03:00
Maidul Islam
41d7987a6e Revert "Revert "feat(PIT): Point In Time Revamp"" 2025-05-28 20:56:49 -04:00
239 changed files with 16031 additions and 1204 deletions

View File

@@ -0,0 +1,53 @@
name: Detect Non-RE2 Regex
on:
pull_request:
types: [opened, synchronize]
jobs:
check-non-re2-regex:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get diff of backend/*
run: |
git diff --unified=0 "origin/${{ github.base_ref }}"...HEAD -- backend/ > diff.txt
- name: Scan backend diff for non-RE2 regex
run: |
# Extract only added lines (excluding file headers)
grep '^+' diff.txt | grep -v '^+++' | sed 's/^\+//' > added_lines.txt
if [ ! -s added_lines.txt ]; then
echo "✅ No added lines in backend/ to check for regex usage."
exit 0
fi
regex_usage_pattern='(^|[^A-Za-z0-9_"'"'"'`\.\/\\])(\/(?:\\.|[^\/\n\\])+\/[gimsuyv]*(?=\s*[\.\(;,)\]}:]|$)|new RegExp\()'
# Find all added lines that contain regex patterns
if grep -E "$regex_usage_pattern" added_lines.txt > potential_violations.txt 2>/dev/null; then
# Filter out lines that contain 'new RE2' (allowing for whitespace variations)
if grep -v -E 'new\s+RE2\s*\(' potential_violations.txt > actual_violations.txt 2>/dev/null && [ -s actual_violations.txt ]; then
echo "🚨 ERROR: Found forbidden regex pattern in added/modified backend code."
echo ""
echo "The following lines use raw regex literals (/.../) or new RegExp(...):"
echo "Please replace with 'new RE2(...)' for RE2 compatibility."
echo ""
echo "Offending lines:"
cat actual_violations.txt
exit 1
else
echo "✅ All identified regex usages are correctly using 'new RE2(...)'."
fi
else
echo "✅ No regex patterns found in added/modified backend lines."
fi
- name: Cleanup temporary files
if: always()
run: |
rm -f diff.txt added_lines.txt potential_violations.txt actual_violations.txt

View File

@@ -84,6 +84,11 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
}
};
const bigIntegerColumns: Record<string, string[]> = {
"folder_commits": ["commitId"]
};
const main = async () => {
const tables = (
await db("information_schema.tables")
@@ -108,6 +113,9 @@ const main = async () => {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
if (bigIntegerColumns[tableName]?.includes(columnName)) {
ztype = "z.coerce.bigint()";
}
if (["zodBuffer"].includes(ztype)) {
zodImportSet.add(ztype);
}

View File

@@ -26,6 +26,7 @@ import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-con
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TPitServiceFactory } from "@app/ee/services/pit/pit-service";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
@@ -59,6 +60,7 @@ import { TCertificateTemplateServiceFactory } from "@app/services/certificate-te
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
@@ -119,6 +121,10 @@ declare module "@fastify/request-context" {
oidc?: {
claims: Record<string, string>;
};
kubernetes?: {
namespace: string;
name: string;
};
};
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };
@@ -272,6 +278,8 @@ declare module "fastify" {
microsoftTeams: TMicrosoftTeamsServiceFactory;
assumePrivileges: TAssumePrivilegeServiceFactory;
githubOrgSync: TGithubOrgSyncServiceFactory;
folderCommit: TFolderCommitServiceFactory;
pit: TPitServiceFactory;
secretScanningV2: TSecretScanningV2ServiceFactory;
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
pkiTemplate: TPkiTemplatesServiceFactory;

View File

@@ -80,6 +80,24 @@ import {
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate,
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate,
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate,
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate,
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate,
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate,
TGateways,
TGatewaysInsert,
TGatewaysUpdate,
@@ -1122,6 +1140,36 @@ declare module "knex/types/tables" {
TGithubOrgSyncConfigsInsert,
TGithubOrgSyncConfigsUpdate
>;
[TableName.FolderCommit]: KnexOriginal.CompositeTableType<
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate
>;
[TableName.FolderCommitChanges]: KnexOriginal.CompositeTableType<
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate
>;
[TableName.FolderCheckpoint]: KnexOriginal.CompositeTableType<
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate
>;
[TableName.FolderCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate
>;
[TableName.FolderTreeCheckpoint]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate
>;
[TableName.FolderTreeCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate
>;
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,

View File

@@ -0,0 +1,166 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (!hasFolderCommitTable) {
await knex.schema.createTable(TableName.FolderCommit, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.bigIncrements("commitId");
t.jsonb("actorMetadata").notNullable();
t.string("actorType").notNullable();
t.string("message");
t.uuid("folderId").notNullable();
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderId");
t.index("envId");
});
}
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
if (!hasFolderCommitChangesTable) {
await knex.schema.createTable(TableName.FolderCommitChanges, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.string("changeType").notNullable();
t.boolean("isUpdate").notNullable().defaultTo(false);
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (!hasFolderCheckpointTable) {
await knex.schema.createTable(TableName.FolderCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
if (!hasFolderCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCheckpointId").notNullable();
t.foreign("folderCheckpointId").references("id").inTable(TableName.FolderCheckpoint).onDelete("CASCADE");
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCheckpointId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
if (!hasFolderTreeCheckpointTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
if (!hasFolderTreeCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderTreeCheckpointId").notNullable();
t.foreign("folderTreeCheckpointId").references("id").inTable(TableName.FolderTreeCheckpoint).onDelete("CASCADE");
t.uuid("folderId").notNullable();
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderTreeCheckpointId");
t.index("folderId");
t.index("folderCommitId");
});
}
if (!hasFolderCommitTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommit);
}
if (!hasFolderCommitChangesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommitChanges);
}
if (!hasFolderCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpoint);
}
if (!hasFolderCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
}
if (!hasFolderTreeCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
}
if (!hasFolderTreeCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
}
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (hasFolderTreeCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpointResources);
}
if (hasFolderCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderCheckpointResources);
}
if (hasFolderTreeCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpoint);
}
if (hasFolderCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderCheckpoint);
}
if (hasFolderCommitChangesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommitChanges);
await knex.schema.dropTableIfExists(TableName.FolderCommitChanges);
}
if (hasFolderCommitTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommit);
await knex.schema.dropTableIfExists(TableName.FolderCommit);
}
}

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretFolderVersion, "description"))) {
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
t.string("description").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretFolderVersion, "description")) {
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
t.dropColumn("description");
});
}
}

View File

@@ -0,0 +1,139 @@
/* eslint-disable no-await-in-loop */
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { SecretType, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
logger.info("Starting secret version fix migration");
// Get all shared secret IDs first to optimize versions query
const secretIds = await knex(TableName.SecretV2)
.where("type", SecretType.Shared)
.select("id")
.then((rows) => rows.map((row) => row.id));
logger.info(`Found ${secretIds.length} shared secrets to process`);
if (secretIds.length === 0) {
logger.info("No shared secrets found");
return;
}
const secretIdChunks = chunkArray(secretIds, 5000);
for (let chunkIndex = 0; chunkIndex < secretIdChunks.length; chunkIndex += 1) {
const currentSecretIds = secretIdChunks[chunkIndex];
logger.info(`Processing chunk ${chunkIndex + 1} of ${secretIdChunks.length}`);
// Get secrets and versions for current chunk
const [sharedSecrets, allVersions] = await Promise.all([
knex(TableName.SecretV2).whereIn("id", currentSecretIds).select(selectAllTableCols(TableName.SecretV2)),
knex(TableName.SecretVersionV2).whereIn("secretId", currentSecretIds).select("secretId", "version")
]);
const versionsBySecretId = new Map<string, number[]>();
allVersions.forEach((v) => {
const versions = versionsBySecretId.get(v.secretId);
if (versions) {
versions.push(v.version);
} else {
versionsBySecretId.set(v.secretId, [v.version]);
}
});
const versionsToAdd = [];
const secretsToUpdate = [];
// Process each shared secret
for (const secret of sharedSecrets) {
const existingVersions = versionsBySecretId.get(secret.id) || [];
if (existingVersions.length === 0) {
// No versions exist - add current version
versionsToAdd.push({
secretId: secret.id,
version: secret.version,
key: secret.key,
encryptedValue: secret.encryptedValue,
encryptedComment: secret.encryptedComment,
reminderNote: secret.reminderNote,
reminderRepeatDays: secret.reminderRepeatDays,
skipMultilineEncoding: secret.skipMultilineEncoding,
metadata: secret.metadata,
folderId: secret.folderId,
actorType: "platform"
});
} else {
const latestVersion = Math.max(...existingVersions);
if (latestVersion !== secret.version) {
// Latest version doesn't match - create new version and update secret
const nextVersion = latestVersion + 1;
versionsToAdd.push({
secretId: secret.id,
version: nextVersion,
key: secret.key,
encryptedValue: secret.encryptedValue,
encryptedComment: secret.encryptedComment,
reminderNote: secret.reminderNote,
reminderRepeatDays: secret.reminderRepeatDays,
skipMultilineEncoding: secret.skipMultilineEncoding,
metadata: secret.metadata,
folderId: secret.folderId,
actorType: "platform"
});
secretsToUpdate.push({
id: secret.id,
newVersion: nextVersion
});
}
}
}
logger.info(
`Chunk ${chunkIndex + 1}: Adding ${versionsToAdd.length} versions, updating ${secretsToUpdate.length} secrets`
);
// Batch insert new versions
if (versionsToAdd.length > 0) {
const insertBatches = chunkArray(versionsToAdd, 9000);
for (let i = 0; i < insertBatches.length; i += 1) {
await knex.batchInsert(TableName.SecretVersionV2, insertBatches[i]);
}
}
if (secretsToUpdate.length > 0) {
const updateBatches = chunkArray(secretsToUpdate, 1000);
for (const updateBatch of updateBatches) {
const ids = updateBatch.map((u) => u.id);
const versionCases = updateBatch.map((u) => `WHEN '${u.id}' THEN ${u.newVersion}`).join(" ");
await knex.raw(
`
UPDATE ${TableName.SecretV2}
SET version = CASE id ${versionCases} END,
"updatedAt" = NOW()
WHERE id IN (${ids.map(() => "?").join(",")})
`,
ids
);
}
}
}
logger.info("Secret version fix migration completed");
}
export async function down(): Promise<void> {
logger.info("Rollback not implemented for secret version fix migration");
// Note: Rolling back this migration would be complex and potentially destructive
// as it would require tracking which version entries were added
}

View File

@@ -0,0 +1,345 @@
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { ChangeType } from "@app/services/folder-commit/folder-commit-service";
import {
ProjectType,
SecretType,
TableName,
TFolderCheckpoints,
TFolderCommits,
TFolderTreeCheckpoints,
TSecretFolders
} from "../schemas";
const sortFoldersByHierarchy = (folders: TSecretFolders[]) => {
// Create a map for quick lookup of children by parent ID
const childrenMap = new Map<string, TSecretFolders[]>();
// Set of all folder IDs
const allFolderIds = new Set<string>();
// Build the set of all folder IDs
folders.forEach((folder) => {
if (folder.id) {
allFolderIds.add(folder.id);
}
});
// Group folders by their parentId
folders.forEach((folder) => {
if (folder.parentId) {
const children = childrenMap.get(folder.parentId) || [];
children.push(folder);
childrenMap.set(folder.parentId, children);
}
});
// Find root folders - those with no parentId or with a parentId that doesn't exist
const rootFolders = folders.filter((folder) => !folder.parentId || !allFolderIds.has(folder.parentId));
// Process each level of the hierarchy
const result = [];
let currentLevel = rootFolders;
while (currentLevel.length > 0) {
result.push(...currentLevel);
const nextLevel = [];
for (const folder of currentLevel) {
if (folder.id) {
const children = childrenMap.get(folder.id) || [];
nextLevel.push(...children);
}
}
currentLevel = nextLevel;
}
return result.reverse();
};
const getSecretsByFolderIds = async (knex: Knex, folderIds: string[]): Promise<Record<string, string[]>> => {
const secrets = await knex(TableName.SecretV2)
.whereIn(`${TableName.SecretV2}.folderId`, folderIds)
.where(`${TableName.SecretV2}.type`, SecretType.Shared)
.join<TableName.SecretVersionV2>(TableName.SecretVersionV2, (queryBuilder) => {
void queryBuilder
.on(`${TableName.SecretVersionV2}.secretId`, `${TableName.SecretV2}.id`)
.andOn(`${TableName.SecretVersionV2}.version`, `${TableName.SecretV2}.version`);
})
.select(selectAllTableCols(TableName.SecretV2))
.select(knex.ref("id").withSchema(TableName.SecretVersionV2).as("secretVersionId"));
const secretsMap: Record<string, string[]> = {};
secrets.forEach((secret) => {
if (!secretsMap[secret.folderId]) {
secretsMap[secret.folderId] = [];
}
secretsMap[secret.folderId].push(secret.secretVersionId);
});
return secretsMap;
};
const getFoldersByParentIds = async (knex: Knex, parentIds: string[]): Promise<Record<string, string[]>> => {
const folders = await knex(TableName.SecretFolder)
.whereIn(`${TableName.SecretFolder}.parentId`, parentIds)
.where(`${TableName.SecretFolder}.isReserved`, false)
.join<TableName.SecretFolderVersion>(TableName.SecretFolderVersion, (queryBuilder) => {
void queryBuilder
.on(`${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`)
.andOn(`${TableName.SecretFolderVersion}.version`, `${TableName.SecretFolder}.version`);
})
.select(selectAllTableCols(TableName.SecretFolder))
.select(knex.ref("id").withSchema(TableName.SecretFolderVersion).as("folderVersionId"));
const foldersMap: Record<string, string[]> = {};
folders.forEach((folder) => {
if (!folder.parentId) {
return;
}
if (!foldersMap[folder.parentId]) {
foldersMap[folder.parentId] = [];
}
foldersMap[folder.parentId].push(folder.folderVersionId);
});
return foldersMap;
};
export async function up(knex: Knex): Promise<void> {
logger.info("Initializing folder commits");
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (hasFolderCommitTable) {
// Get Projects to Initialize
const projects = await knex(TableName.Project)
.where(`${TableName.Project}.version`, 3)
.where(`${TableName.Project}.type`, ProjectType.SecretManager)
.select(selectAllTableCols(TableName.Project));
logger.info(`Found ${projects.length} projects to initialize`);
// Process Projects in batches of 100
const batches = chunkArray(projects, 100);
let i = 0;
for (const batch of batches) {
i += 1;
logger.info(`Processing project batch ${i} of ${batches.length}`);
let foldersCommitsList = [];
const rootFoldersMap: Record<string, string> = {};
const envRootFoldersMap: Record<string, string> = {};
// Get All Folders for the Project
// eslint-disable-next-line no-await-in-loop
const folders = await knex(TableName.SecretFolder)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.whereIn(
`${TableName.Environment}.projectId`,
batch.map((project) => project.id)
)
.where(`${TableName.SecretFolder}.isReserved`, false)
.select(selectAllTableCols(TableName.SecretFolder));
logger.info(`Found ${folders.length} folders to initialize in project batch ${i} of ${batches.length}`);
// Sort Folders by Hierarchy (parents before nested folders)
const sortedFolders = sortFoldersByHierarchy(folders);
// eslint-disable-next-line no-await-in-loop
const folderSecretsMap = await getSecretsByFolderIds(
knex,
sortedFolders.map((folder) => folder.id)
);
// eslint-disable-next-line no-await-in-loop
const folderFoldersMap = await getFoldersByParentIds(
knex,
sortedFolders.map((folder) => folder.id)
);
// Get folder commit changes
for (const folder of sortedFolders) {
const subFolderVersionIds = folderFoldersMap[folder.id];
const secretVersionIds = folderSecretsMap[folder.id];
const changes = [];
if (subFolderVersionIds) {
changes.push(
...subFolderVersionIds.map((folderVersionId) => ({
folderId: folder.id,
changeType: ChangeType.ADD,
secretVersionId: undefined,
folderVersionId,
isUpdate: false
}))
);
}
if (secretVersionIds) {
changes.push(
...secretVersionIds.map((secretVersionId) => ({
folderId: folder.id,
changeType: ChangeType.ADD,
secretVersionId,
folderVersionId: undefined,
isUpdate: false
}))
);
}
if (changes.length > 0) {
const folderCommit = {
commit: {
actorMetadata: {},
actorType: ActorType.PLATFORM,
message: "Initialized folder",
folderId: folder.id,
envId: folder.envId
},
changes
};
foldersCommitsList.push(folderCommit);
if (!folder.parentId) {
rootFoldersMap[folder.id] = folder.envId;
envRootFoldersMap[folder.envId] = folder.id;
}
}
}
logger.info(`Retrieved folder changes for project batch ${i} of ${batches.length}`);
const filteredBrokenProjectFolders: string[] = [];
foldersCommitsList = foldersCommitsList.filter((folderCommit) => {
if (!envRootFoldersMap[folderCommit.commit.envId]) {
filteredBrokenProjectFolders.push(folderCommit.commit.folderId);
return false;
}
return true;
});
logger.info(
`Filtered ${filteredBrokenProjectFolders.length} broken project folders: ${JSON.stringify(filteredBrokenProjectFolders)}`
);
// Insert New Commits in batches of 9000
const newCommits = foldersCommitsList.map((folderCommit) => folderCommit.commit);
const commitBatches = chunkArray(newCommits, 9000);
let j = 0;
for (const commitBatch of commitBatches) {
j += 1;
logger.info(`Inserting folder commits - batch ${j} of ${commitBatches.length}`);
// Create folder commit
// eslint-disable-next-line no-await-in-loop
const newCommitsInserted = (await knex
.batchInsert(TableName.FolderCommit, commitBatch)
.returning("*")) as TFolderCommits[];
logger.info(`Finished inserting folder commits - batch ${j} of ${commitBatches.length}`);
const newCommitsMap: Record<string, string> = {};
const newCommitsMapInverted: Record<string, string> = {};
const newCheckpointsMap: Record<string, string> = {};
newCommitsInserted.forEach((commit) => {
newCommitsMap[commit.folderId] = commit.id;
newCommitsMapInverted[commit.id] = commit.folderId;
});
// Create folder checkpoints
// eslint-disable-next-line no-await-in-loop
const newCheckpoints = (await knex
.batchInsert(
TableName.FolderCheckpoint,
Object.values(newCommitsMap).map((commitId) => ({
folderCommitId: commitId
}))
)
.returning("*")) as TFolderCheckpoints[];
logger.info(`Finished inserting folder checkpoints - batch ${j} of ${commitBatches.length}`);
newCheckpoints.forEach((checkpoint) => {
newCheckpointsMap[newCommitsMapInverted[checkpoint.folderCommitId]] = checkpoint.id;
});
// Create folder commit changes
// eslint-disable-next-line no-await-in-loop
await knex.batchInsert(
TableName.FolderCommitChanges,
foldersCommitsList
.map((folderCommit) => folderCommit.changes)
.flat()
.map((change) => ({
folderCommitId: newCommitsMap[change.folderId],
changeType: change.changeType,
secretVersionId: change.secretVersionId,
folderVersionId: change.folderVersionId,
isUpdate: false
}))
);
logger.info(`Finished inserting folder commit changes - batch ${j} of ${commitBatches.length}`);
// Create folder checkpoint resources
// eslint-disable-next-line no-await-in-loop
await knex.batchInsert(
TableName.FolderCheckpointResources,
foldersCommitsList
.map((folderCommit) => folderCommit.changes)
.flat()
.map((change) => ({
folderCheckpointId: newCheckpointsMap[change.folderId],
folderVersionId: change.folderVersionId,
secretVersionId: change.secretVersionId
}))
);
logger.info(`Finished inserting folder checkpoint resources - batch ${j} of ${commitBatches.length}`);
// Create Folder Tree Checkpoint
// eslint-disable-next-line no-await-in-loop
const newTreeCheckpoints = (await knex
.batchInsert(
TableName.FolderTreeCheckpoint,
Object.keys(rootFoldersMap).map((folderId) => ({
folderCommitId: newCommitsMap[folderId]
}))
)
.returning("*")) as TFolderTreeCheckpoints[];
logger.info(`Finished inserting folder tree checkpoints - batch ${j} of ${commitBatches.length}`);
const newTreeCheckpointsMap: Record<string, string> = {};
newTreeCheckpoints.forEach((checkpoint) => {
newTreeCheckpointsMap[rootFoldersMap[newCommitsMapInverted[checkpoint.folderCommitId]]] = checkpoint.id;
});
// Create Folder Tree Checkpoint Resources
// eslint-disable-next-line no-await-in-loop
await knex
.batchInsert(
TableName.FolderTreeCheckpointResources,
newCommitsInserted.map((folderCommit) => ({
folderTreeCheckpointId: newTreeCheckpointsMap[folderCommit.envId],
folderId: folderCommit.folderId,
folderCommitId: folderCommit.id
}))
)
.returning("*");
logger.info(`Finished inserting folder tree checkpoint resources - batch ${j} of ${commitBatches.length}`);
}
}
}
logger.info("Folder commits initialized");
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (hasFolderCommitTable) {
// delete all existing entries
await knex(TableName.FolderCommit).del();
}
}

View File

@@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
if (!hasShowSnapshotsLegacyColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.boolean("showSnapshotsLegacy").notNullable().defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
if (hasShowSnapshotsLegacyColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.dropColumn("showSnapshotsLegacy");
});
}
}

View File

@@ -3,12 +3,27 @@ import { Knex } from "knex";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
import { identityDALFactory } from "@app/services/identity/identity-dal";
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { orgDALFactory } from "@app/services/org/org-dal";
import { projectDALFactory } from "@app/services/project/project-dal";
import { resourceMetadataDALFactory } from "@app/services/resource-metadata/resource-metadata-dal";
import { secretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { secretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal";
import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { secretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal";
import { secretVersionV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
import { userDALFactory } from "@app/services/user/user-dal";
import { TMigrationEnvConfig } from "./env-config";
@@ -50,3 +65,77 @@ export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }
return { kmsService };
};
export const getMigrationPITServices = async ({
db,
keyStore,
envConfig
}: {
db: Knex;
keyStore: TKeyStoreFactory;
envConfig: TMigrationEnvConfig;
}) => {
const projectDAL = projectDALFactory(db);
const folderCommitDAL = folderCommitDALFactory(db);
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
const folderCheckpointDAL = folderCheckpointDALFactory(db);
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
const userDAL = userDALFactory(db);
const identityDAL = identityDALFactory(db);
const folderDAL = secretFolderDALFactory(db);
const folderVersionDAL = secretFolderVersionDALFactory(db);
const secretVersionV2BridgeDAL = secretVersionV2BridgeDALFactory(db);
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
const secretV2BridgeDAL = secretV2BridgeDALFactory({ db, keyStore });
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
const secretTagDAL = secretTagDALFactory(db);
const orgDAL = orgDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
const resourceMetadataDAL = resourceMetadataDALFactory(db);
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig
});
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL,
internalKmsDAL,
orgDAL,
projectDAL,
hsmService,
envConfig
});
await hsmService.startService();
await kmsService.startService();
const folderCommitService = folderCommitServiceFactory({
folderCommitDAL,
folderCommitChangesDAL,
folderCheckpointDAL,
folderTreeCheckpointDAL,
userDAL,
identityDAL,
folderDAL,
folderVersionDAL,
secretVersionV2BridgeDAL,
projectDAL,
folderCheckpointResourcesDAL,
secretV2BridgeDAL,
folderTreeCheckpointResourcesDAL,
kmsService,
secretTagDAL,
resourceMetadataDAL
});
return { folderCommitService };
};

View File

@@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCheckpointResourcesSchema = z.object({
id: z.string().uuid(),
folderCheckpointId: z.string().uuid(),
secretVersionId: z.string().uuid().nullable().optional(),
folderVersionId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCheckpointResources = z.infer<typeof FolderCheckpointResourcesSchema>;
export type TFolderCheckpointResourcesInsert = Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>;
export type TFolderCheckpointResourcesUpdate = Partial<
Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,19 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCheckpointsSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCheckpoints = z.infer<typeof FolderCheckpointsSchema>;
export type TFolderCheckpointsInsert = Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>;
export type TFolderCheckpointsUpdate = Partial<Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCommitChangesSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
changeType: z.string(),
isUpdate: z.boolean().default(false),
secretVersionId: z.string().uuid().nullable().optional(),
folderVersionId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCommitChanges = z.infer<typeof FolderCommitChangesSchema>;
export type TFolderCommitChangesInsert = Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>;
export type TFolderCommitChangesUpdate = Partial<Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCommitsSchema = z.object({
id: z.string().uuid(),
commitId: z.coerce.bigint(),
actorMetadata: z.unknown(),
actorType: z.string(),
message: z.string().nullable().optional(),
folderId: z.string().uuid(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCommits = z.infer<typeof FolderCommitsSchema>;
export type TFolderCommitsInsert = Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>;
export type TFolderCommitsUpdate = Partial<Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,26 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderTreeCheckpointResourcesSchema = z.object({
id: z.string().uuid(),
folderTreeCheckpointId: z.string().uuid(),
folderId: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderTreeCheckpointResources = z.infer<typeof FolderTreeCheckpointResourcesSchema>;
export type TFolderTreeCheckpointResourcesInsert = Omit<
z.input<typeof FolderTreeCheckpointResourcesSchema>,
TImmutableDBKeys
>;
export type TFolderTreeCheckpointResourcesUpdate = Partial<
Omit<z.input<typeof FolderTreeCheckpointResourcesSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,19 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderTreeCheckpointsSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderTreeCheckpoints = z.infer<typeof FolderTreeCheckpointsSchema>;
export type TFolderTreeCheckpointsInsert = Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>;
export type TFolderTreeCheckpointsUpdate = Partial<Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>>;

View File

@@ -24,6 +24,12 @@ export * from "./dynamic-secrets";
export * from "./external-certificate-authorities";
export * from "./external-group-org-role-mappings";
export * from "./external-kms";
export * from "./folder-checkpoint-resources";
export * from "./folder-checkpoints";
export * from "./folder-commit-changes";
export * from "./folder-commits";
export * from "./folder-tree-checkpoint-resources";
export * from "./folder-tree-checkpoints";
export * from "./gateways";
export * from "./git-app-install-sessions";
export * from "./git-app-org";

View File

@@ -160,6 +160,12 @@ export enum TableName {
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
SecretReminderRecipients = "secret_reminder_recipients",
GithubOrgSyncConfig = "github_org_sync_configs",
FolderCommit = "folder_commits",
FolderCommitChanges = "folder_commit_changes",
FolderCheckpoint = "folder_checkpoints",
FolderCheckpointResources = "folder_checkpoint_resources",
FolderTreeCheckpoint = "folder_tree_checkpoints",
FolderTreeCheckpointResources = "folder_tree_checkpoint_resources",
SecretScanningDataSource = "secret_scanning_data_sources",
SecretScanningResource = "secret_scanning_resources",
SecretScanningScan = "secret_scanning_scans",
@@ -167,7 +173,7 @@ export enum TableName {
SecretScanningConfig = "secret_scanning_configs"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt" | "commitId";
export const UserDeviceSchema = z
.object({

View File

@@ -28,7 +28,8 @@ export const ProjectsSchema = z.object({
type: z.string(),
enforceCapitalization: z.boolean().default(false),
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
secretSharing: z.boolean().default(true)
secretSharing: z.boolean().default(true),
showSnapshotsLegacy: z.boolean().default(false)
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@@ -14,7 +14,8 @@ export const SecretFolderVersionsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
envId: z.string().uuid(),
folderId: z.string().uuid()
folderId: z.string().uuid(),
description: z.string().nullable().optional()
});
export type TSecretFolderVersions = z.infer<typeof SecretFolderVersionsSchema>;

View File

@@ -23,7 +23,10 @@ const validateUsernameTemplateCharacters = characterValidator([
CharacterType.CloseBrace,
CharacterType.CloseBracket,
CharacterType.OpenBracket,
CharacterType.Fullstop
CharacterType.Fullstop,
CharacterType.SingleQuote,
CharacterType.Spaces,
CharacterType.Pipe
]);
const userTemplateSchema = z
@@ -33,7 +36,7 @@ const userTemplateSchema = z
.refine((el) => validateUsernameTemplateCharacters(el))
.refine((el) =>
isValidHandleBarTemplate(el, {
allowedExpressions: (val) => ["randomUsername", "unixTimestamp"].includes(val)
allowedExpressions: (val) => ["randomUsername", "unixTimestamp", "identity.name"].includes(val)
})
);

View File

@@ -18,6 +18,7 @@ import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router";
import { registerOidcRouter } from "./oidc-router";
import { registerOrgRoleRouter } from "./org-role-router";
import { registerPITRouter } from "./pit-router";
import { registerProjectRoleRouter } from "./project-role-router";
import { registerProjectRouter } from "./project-router";
import { registerRateLimitRouter } from "./rate-limit-router";
@@ -53,6 +54,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/workspace" }
);
await server.register(registerSnapshotRouter, { prefix: "/secret-snapshot" });
await server.register(registerPITRouter, { prefix: "/pit" });
await server.register(registerSecretApprovalPolicyRouter, { prefix: "/secret-approvals" });
await server.register(registerSecretApprovalRequestRouter, {
prefix: "/secret-approval-requests"

View File

@@ -0,0 +1,416 @@
/* eslint-disable @typescript-eslint/no-base-to-string */
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { booleanSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { commitChangesResponseSchema, resourceChangeSchema } from "@app/services/folder-commit/folder-commit-schemas";
const commitHistoryItemSchema = z.object({
id: z.string(),
folderId: z.string(),
actorType: z.string(),
actorMetadata: z.unknown().optional(),
message: z.string().optional().nullable(),
commitId: z.string(),
createdAt: z.string().or(z.date()),
envId: z.string()
});
const folderStateSchema = z.array(
z.object({
type: z.string(),
id: z.string(),
versionId: z.string(),
secretKey: z.string().optional(),
secretVersion: z.number().optional(),
folderName: z.string().optional(),
folderVersion: z.number().optional()
})
);
export const registerPITRouter = async (server: FastifyZodProvider) => {
// Get commits count for a folder
server.route({
method: "GET",
url: "/commits/count",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim()
}),
response: {
200: z.object({
count: z.number(),
folderId: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitsCount({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
environment: req.query.environment,
path: req.query.path
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT,
metadata: {
environment: req.query.environment,
path: req.query.path,
commitCount: result.count.toString()
}
}
});
return result;
}
});
// Get all commits for a folder
server.route({
method: "GET",
url: "/commits",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim(),
offset: z.coerce.number().min(0).default(0),
limit: z.coerce.number().min(1).max(100).default(20),
search: z.string().trim().optional(),
sort: z.enum(["asc", "desc"]).default("desc")
}),
response: {
200: z.object({
commits: commitHistoryItemSchema.array(),
total: z.number(),
hasMore: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitsForFolder({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
environment: req.query.environment,
path: req.query.path,
offset: req.query.offset,
limit: req.query.limit,
search: req.query.search,
sort: req.query.sort
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMITS,
metadata: {
environment: req.query.environment,
path: req.query.path,
commitCount: result.commits.length.toString(),
offset: req.query.offset.toString(),
limit: req.query.limit.toString(),
search: req.query.search,
sort: req.query.sort
}
}
});
return result;
}
});
// Get commit changes for a specific commit
server.route({
method: "GET",
url: "/commits/:commitId/changes",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
projectId: z.string().trim()
}),
response: {
200: commitChangesResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitChanges({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES,
metadata: {
commitId: req.params.commitId,
changesCount: (result.changes.changes?.length || 0).toString()
}
}
});
return result;
}
});
// Retrieve rollback changes for a commit
server.route({
method: "GET",
url: "/commits/:commitId/compare",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
folderId: z.string().trim(),
environment: z.string().trim(),
deepRollback: booleanSchema.default(false),
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim()
}),
response: {
200: z.array(
z.object({
folderId: z.string(),
folderName: z.string(),
folderPath: z.string().optional(),
changes: z.array(resourceChangeSchema)
})
)
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.compareCommitChanges({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId,
folderId: req.query.folderId,
environment: req.query.environment,
deepRollback: req.query.deepRollback,
secretPath: req.query.secretPath
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.PIT_COMPARE_FOLDER_STATES,
metadata: {
targetCommitId: req.params.commitId,
folderId: req.query.folderId,
deepRollback: req.query.deepRollback,
diffsCount: result.length.toString(),
environment: req.query.environment,
folderPath: req.query.secretPath
}
}
});
return result;
}
});
// Rollback to a previous commit
server.route({
method: "POST",
url: "/commits/:commitId/rollback",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
body: z.object({
folderId: z.string().trim(),
deepRollback: z.boolean().default(false),
message: z.string().max(256).trim().optional(),
environment: z.string().trim(),
projectId: z.string().trim()
}),
response: {
200: z.object({
success: z.boolean(),
secretChangesCount: z.number().optional(),
folderChangesCount: z.number().optional(),
totalChanges: z.number().optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.rollbackToCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.body.projectId,
commitId: req.params.commitId,
folderId: req.body.folderId,
deepRollback: req.body.deepRollback,
message: req.body.message,
environment: req.body.environment
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.body.projectId,
event: {
type: EventType.PIT_ROLLBACK_COMMIT,
metadata: {
targetCommitId: req.params.commitId,
environment: req.body.environment,
folderId: req.body.folderId,
deepRollback: req.body.deepRollback,
message: req.body.message || "Rollback to previous commit",
totalChanges: result.totalChanges?.toString() || "0"
}
}
});
return result;
}
});
// Revert commit
server.route({
method: "POST",
url: "/commits/:commitId/revert",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
body: z.object({
projectId: z.string().trim()
}),
response: {
200: z.object({
success: z.boolean(),
message: z.string(),
originalCommitId: z.string(),
revertCommitId: z.string().optional(),
changesReverted: z.number().optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.revertCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.body.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.body.projectId,
event: {
type: EventType.PIT_REVERT_COMMIT,
metadata: {
commitId: req.params.commitId,
revertCommitId: result.revertCommitId,
changesReverted: result.changesReverted?.toString()
}
}
});
return result;
}
});
// Folder state at commit
server.route({
method: "GET",
url: "/commits/:commitId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
folderId: z.string().trim(),
projectId: z.string().trim()
}),
response: {
200: folderStateSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getFolderStateAtCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.PIT_GET_FOLDER_STATE,
metadata: {
commitId: req.params.commitId,
folderId: req.query.folderId,
resourceCount: result.length.toString()
}
}
});
return result;
}
});
};

View File

@@ -65,9 +65,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
rateLimit: writeLimit
},
schema: {
hide: false,
hide: true,
deprecated: true,
tags: [ApiDocsTags.Projects],
description: "Roll back project secrets to those captured in a secret snapshot version.",
description: "(Deprecated) Roll back project secrets to those captured in a secret snapshot version.",
security: [
{
bearerAuth: []
@@ -84,6 +85,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
throw new Error(
"This endpoint is deprecated. Please use the new PIT recovery system. More information is available at: https://infisical.com/docs/documentation/platform/pit-recovery."
);
const secretSnapshot = await server.services.snapshot.rollbackSnapshot({
actor: req.permission.type,
actorId: req.permission.id,

View File

@@ -44,6 +44,7 @@ import {
TSecretSyncRaw,
TUpdateSecretSyncDTO
} from "@app/services/secret-sync/secret-sync-types";
import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
import { WorkflowIntegration } from "@app/services/workflow-integration/workflow-integration-types";
import { KmipPermission } from "../kmip/kmip-enum";
@@ -206,6 +207,7 @@ export enum EventType {
CREATE_WEBHOOK = "create-webhook",
UPDATE_WEBHOOK_STATUS = "update-webhook-status",
DELETE_WEBHOOK = "delete-webhook",
WEBHOOK_TRIGGERED = "webhook-triggered",
GET_SECRET_IMPORTS = "get-secret-imports",
GET_SECRET_IMPORT = "get-secret-import",
CREATE_SECRET_IMPORT = "create-secret-import",
@@ -393,6 +395,13 @@ export enum EventType {
PROJECT_ASSUME_PRIVILEGE_SESSION_START = "project-assume-privileges-session-start",
PROJECT_ASSUME_PRIVILEGE_SESSION_END = "project-assume-privileges-session-end",
GET_PROJECT_PIT_COMMITS = "get-project-pit-commits",
GET_PROJECT_PIT_COMMIT_CHANGES = "get-project-pit-commit-changes",
GET_PROJECT_PIT_COMMIT_COUNT = "get-project-pit-commit-count",
PIT_ROLLBACK_COMMIT = "pit-rollback-commit",
PIT_REVERT_COMMIT = "pit-revert-commit",
PIT_GET_FOLDER_STATE = "pit-get-folder-state",
PIT_COMPARE_FOLDER_STATES = "pit-compare-folder-states",
SECRET_SCANNING_DATA_SOURCE_LIST = "secret-scanning-data-source-list",
SECRET_SCANNING_DATA_SOURCE_CREATE = "secret-scanning-data-source-create",
SECRET_SCANNING_DATA_SOURCE_UPDATE = "secret-scanning-data-source-update",
@@ -1440,6 +1449,14 @@ interface DeleteWebhookEvent {
};
}
export interface WebhookTriggeredEvent {
type: EventType.WEBHOOK_TRIGGERED;
metadata: {
webhookId: string;
status: string;
} & TWebhookPayloads;
}
interface GetSecretImportsEvent {
type: EventType.GET_SECRET_IMPORTS;
metadata: {
@@ -2979,6 +2996,78 @@ interface MicrosoftTeamsWorkflowIntegrationUpdateEvent {
};
}
interface GetProjectPitCommitsEvent {
type: EventType.GET_PROJECT_PIT_COMMITS;
metadata: {
commitCount: string;
environment: string;
path: string;
offset: string;
limit: string;
search?: string;
sort: string;
};
}
interface GetProjectPitCommitChangesEvent {
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES;
metadata: {
changesCount: string;
commitId: string;
};
}
interface GetProjectPitCommitCountEvent {
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT;
metadata: {
environment: string;
path: string;
commitCount: string;
};
}
interface PitRollbackCommitEvent {
type: EventType.PIT_ROLLBACK_COMMIT;
metadata: {
targetCommitId: string;
folderId: string;
deepRollback: boolean;
message: string;
totalChanges: string;
environment: string;
};
}
interface PitRevertCommitEvent {
type: EventType.PIT_REVERT_COMMIT;
metadata: {
commitId: string;
revertCommitId?: string;
changesReverted?: string;
};
}
interface PitGetFolderStateEvent {
type: EventType.PIT_GET_FOLDER_STATE;
metadata: {
commitId: string;
folderId: string;
resourceCount: string;
};
}
interface PitCompareFolderStatesEvent {
type: EventType.PIT_COMPARE_FOLDER_STATES;
metadata: {
targetCommitId: string;
folderId: string;
deepRollback: boolean;
diffsCount: string;
environment: string;
folderPath: string;
};
}
interface SecretScanningDataSourceListEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST;
metadata: {
@@ -3221,6 +3310,7 @@ export type Event =
| CreateWebhookEvent
| UpdateWebhookStatusEvent
| DeleteWebhookEvent
| WebhookTriggeredEvent
| GetSecretImportsEvent
| GetSecretImportEvent
| CreateSecretImportEvent
@@ -3397,6 +3487,13 @@ export type Event =
| MicrosoftTeamsWorkflowIntegrationGetEvent
| MicrosoftTeamsWorkflowIntegrationListEvent
| MicrosoftTeamsWorkflowIntegrationUpdateEvent
| GetProjectPitCommitsEvent
| GetProjectPitCommitChangesEvent
| PitRollbackCommitEvent
| GetProjectPitCommitCountEvent
| PitRevertCommitEvent
| PitCompareFolderStatesEvent
| PitGetFolderStateEvent
| SecretScanningDataSourceListEvent
| SecretScanningDataSourceGetEvent
| SecretScanningDataSourceCreateEvent

View File

@@ -99,7 +99,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId, {
projectId: folder.projectId
});
await dynamicSecretLeaseDAL.deleteById(dynamicSecretLease.id);
return;
}
@@ -133,7 +135,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
await Promise.all(
dynamicSecretLeases.map(({ externalEntityId }) =>
selectedProvider.revoke(decryptedStoredInput, externalEntityId)
selectedProvider.revoke(decryptedStoredInput, externalEntityId, {
projectId: folder.projectId
})
)
);
}

View File

@@ -1,4 +1,5 @@
import { ForbiddenError, subject } from "@casl/ability";
import RE2 from "re2";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
@@ -11,10 +12,13 @@ import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ms } from "@app/lib/ms";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityDALFactory } from "@app/services/identity/identity-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
import { DynamicSecretProviders, TDynamicProviderFns } from "../dynamic-secret/providers/models";
@@ -39,6 +43,8 @@ type TDynamicSecretLeaseServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
userDAL: Pick<TUserDALFactory, "findById">;
identityDAL: TIdentityDALFactory;
};
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
@@ -52,8 +58,16 @@ export const dynamicSecretLeaseServiceFactory = ({
dynamicSecretQueueService,
projectDAL,
licenseService,
kmsService
kmsService,
userDAL,
identityDAL
}: TDynamicSecretLeaseServiceFactoryDep) => {
const extractEmailUsername = (email: string) => {
const regex = new RE2(/^([^@]+)/);
const match = email.match(regex);
return match ? match[1] : email;
};
const create = async ({
environmentSlug,
path,
@@ -132,10 +146,24 @@ export const dynamicSecretLeaseServiceFactory = ({
let result;
try {
const identity: { name: string } = { name: "" };
if (actor === ActorType.USER) {
const user = await userDAL.findById(actorId);
if (user) {
identity.name = extractEmailUsername(user.username);
}
} else if (actor === ActorType.Machine) {
const machineIdentity = await identityDAL.findById(actorId);
if (machineIdentity) {
identity.name = machineIdentity.name;
}
}
result = await selectedProvider.create({
inputs: decryptedStoredInput,
expireAt: expireAt.getTime(),
usernameTemplate: dynamicSecretCfg.usernameTemplate
usernameTemplate: dynamicSecretCfg.usernameTemplate,
identity,
metadata: { projectId }
});
} catch (error: unknown) {
if (error && typeof error === "object" && error !== null && "sqlMessage" in error) {
@@ -237,7 +265,8 @@ export const dynamicSecretLeaseServiceFactory = ({
const { entityId } = await selectedProvider.renew(
decryptedStoredInput,
dynamicSecretLease.externalEntityId,
expireAt.getTime()
expireAt.getTime(),
{ projectId }
);
await dynamicSecretQueueService.unsetLeaseRevocation(dynamicSecretLease.id);
@@ -313,7 +342,7 @@ export const dynamicSecretLeaseServiceFactory = ({
) as object;
const revokeResponse = await selectedProvider
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId)
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId, { projectId })
.catch(async (err) => {
// only propogate this error if forced is false
if (!isForced) return { error: err as Error };

View File

@@ -116,7 +116,7 @@ export const dynamicSecretServiceFactory = ({
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
const selectedProvider = dynamicSecretProviders[provider.type];
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
const inputs = await selectedProvider.validateProviderInputs(provider.inputs, { projectId });
let selectedGatewayId: string | null = null;
if (inputs && typeof inputs === "object" && "gatewayId" in inputs && inputs.gatewayId) {
@@ -146,7 +146,7 @@ export const dynamicSecretServiceFactory = ({
selectedGatewayId = gateway.id;
}
const isConnected = await selectedProvider.validateConnection(provider.inputs);
const isConnected = await selectedProvider.validateConnection(provider.inputs, { projectId });
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
@@ -272,7 +272,7 @@ export const dynamicSecretServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
const updatedInput = await selectedProvider.validateProviderInputs(newInput, { projectId });
let selectedGatewayId: string | null = null;
if (updatedInput && typeof updatedInput === "object" && "gatewayId" in updatedInput && updatedInput?.gatewayId) {
@@ -301,7 +301,7 @@ export const dynamicSecretServiceFactory = ({
selectedGatewayId = gateway.id;
}
const isConnected = await selectedProvider.validateConnection(newInput);
const isConnected = await selectedProvider.validateConnection(newInput, { projectId });
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const updatedDynamicCfg = await dynamicSecretDAL.transaction(async (tx) => {
@@ -472,7 +472,9 @@ export const dynamicSecretServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput, {
projectId
})) as object;
return { ...dynamicSecretCfg, inputs: providerInputs };
};

View File

@@ -16,6 +16,7 @@ import { BadRequestError } from "@app/lib/errors";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const CreateElastiCacheUserSchema = z.object({
UserId: z.string().trim().min(1),
@@ -132,14 +133,14 @@ const generatePassword = () => {
return customAlphabet(charset, 64)();
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-";
const randomUsername = `inf-${customAlphabet(charset, 32)()}`;
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -174,14 +175,21 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
return true;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
if (!(await validateConnection(providerInputs))) {
throw new BadRequestError({ message: "Failed to establish connection" });
}
const leaseUsername = generateUsername(usernameTemplate);
const leaseUsername = generateUsername(usernameTemplate, identity);
const leasePassword = generatePassword();
const leaseExpiration = new Date(expireAt).toISOString();

View File

@@ -16,21 +16,25 @@ import {
PutUserPolicyCommand,
RemoveUserFromGroupCommand
} from "@aws-sdk/client-iam";
import handlebars from "handlebars";
import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts";
import { randomUUID } from "crypto";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -40,7 +44,43 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>, projectId: string) => {
const appCfg = getConfig();
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
const stsClient = new STSClient({
region: providerInputs.region,
credentials:
appCfg.DYNAMIC_SECRET_AWS_ACCESS_KEY_ID && appCfg.DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY
? {
accessKeyId: appCfg.DYNAMIC_SECRET_AWS_ACCESS_KEY_ID,
secretAccessKey: appCfg.DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY
}
: undefined // if hosting on AWS
});
const command = new AssumeRoleCommand({
RoleArn: providerInputs.roleArn,
RoleSessionName: `infisical-dynamic-secret-${randomUUID()}`,
DurationSeconds: 900, // 15 mins
ExternalId: projectId
});
const assumeRes = await stsClient.send(command);
if (!assumeRes.Credentials?.AccessKeyId || !assumeRes.Credentials?.SecretAccessKey) {
throw new BadRequestError({ message: "Failed to assume role - verify credentials and role configuration" });
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
accessKeyId: assumeRes.Credentials?.AccessKeyId,
secretAccessKey: assumeRes.Credentials?.SecretAccessKey,
sessionToken: assumeRes.Credentials?.SessionToken
}
});
return client;
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
@@ -52,21 +92,41 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return client;
};
const validateConnection = async (inputs: unknown) => {
const validateConnection = async (inputs: unknown, { projectId }: { projectId: string }) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
const client = await $getClient(providerInputs, projectId);
const isConnected = await client
.send(new GetUserCommand({}))
.then(() => true)
.catch((err) => {
const message = (err as Error)?.message;
if (
providerInputs.method === AwsIamAuthType.AssumeRole &&
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
message.includes("Must specify userName when calling with non-User credentials")
) {
return true;
}
throw err;
});
return isConnected;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
metadata: { projectId: string };
}) => {
const { inputs, usernameTemplate, metadata, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await $getClient(providerInputs, metadata.projectId);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
const createUserRes = await client.send(
new CreateUserCommand({
@@ -76,6 +136,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
UserName: username
})
);
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
if (userGroups) {
await Promise.all(
@@ -125,9 +186,9 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
};
};
const revoke = async (inputs: unknown, entityId: string) => {
const revoke = async (inputs: unknown, entityId: string, metadata: { projectId: string }) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await $getClient(providerInputs, metadata.projectId);
const username = entityId;

View File

@@ -8,19 +8,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -75,12 +76,17 @@ export const CassandraProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const { keyspace } = providerInputs;
const expiration = new Date(expireAt).toISOString();

View File

@@ -1,5 +1,4 @@
import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import { z } from "zod";
@@ -7,19 +6,20 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -71,12 +71,12 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
return infoResponse;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
await connection.security.putUser({

View File

@@ -1,13 +1,21 @@
import axios from "axios";
import handlebars from "handlebars";
import https from "https";
import { InternalServerError } from "@app/lib/errors";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { TKubernetesTokenRequest } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-types";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { DynamicSecretKubernetesSchema, TDynamicProviderFns } from "./models";
import {
DynamicSecretKubernetesSchema,
KubernetesAuthMethod,
KubernetesCredentialType,
KubernetesRoleType,
TDynamicProviderFns
} from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
@@ -15,6 +23,16 @@ type TKubernetesProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
};
const generateUsername = (usernameTemplate?: string | null) => {
const randomUsername = `dynamic-secret-sa-${alphaNumericNanoId(10).toLowerCase()}`;
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
});
};
export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretKubernetesSchema.parseAsync(inputs);
@@ -30,20 +48,27 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
gatewayId: string;
targetHost: string;
targetPort: number;
caCert?: string;
reviewTokenThroughGateway: boolean;
enableSsl: boolean;
},
gatewayCallback: (host: string, port: number) => Promise<T>
gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent) => Promise<T>
): Promise<T> => {
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
const callbackResult = await withGatewayProxy(
async (port) => {
async (port, httpsAgent) => {
// Needs to be https protocol or the kubernetes API server will fail with "Client sent an HTTP request to an HTTPS server"
const res = await gatewayCallback("https://localhost", port);
const res = await gatewayCallback(
inputs.reviewTokenThroughGateway ? "http://localhost" : "https://localhost",
port,
httpsAgent
);
return res;
},
{
protocol: GatewayProxyProtocol.Tcp,
protocol: inputs.reviewTokenThroughGateway ? GatewayProxyProtocol.Http : GatewayProxyProtocol.Tcp,
targetHost: inputs.targetHost,
targetPort: inputs.targetPort,
relayHost,
@@ -54,7 +79,12 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
},
// we always pass this, because its needed for both tcp and http protocol
httpsAgent: new https.Agent({
ca: inputs.caCert,
rejectUnauthorized: inputs.enableSsl
})
}
);
@@ -64,7 +94,151 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const serviceAccountGetCallback = async (host: string, port: number) => {
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const serviceAccountName = generateUsername();
const roleBindingName = `${serviceAccountName}-role-binding`;
// 1. Create a test service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts`,
{
metadata: {
name: serviceAccountName,
namespace: providerInputs.namespace
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 2. Create a test role binding
const roleBindingUrl =
providerInputs.roleType === KubernetesRoleType.ClusterRole
? `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings`
: `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${providerInputs.namespace}/rolebindings`;
const roleBindingMetadata = {
name: roleBindingName,
...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace: providerInputs.namespace })
};
await axios.post(
roleBindingUrl,
{
metadata: roleBindingMetadata,
roleRef: {
kind: providerInputs.roleType === KubernetesRoleType.ClusterRole ? "ClusterRole" : "Role",
name: providerInputs.role,
apiGroup: "rbac.authorization.k8s.io"
},
subjects: [
{
kind: "ServiceAccount",
name: serviceAccountName,
namespace: providerInputs.namespace
}
]
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 3. Request a token for the test service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${serviceAccountName}/token`,
{
spec: {
expirationSeconds: 600, // 10 minutes
...(providerInputs.audiences?.length ? { audiences: providerInputs.audiences } : {})
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 4. Cleanup: delete role binding and service account
if (providerInputs.roleType === KubernetesRoleType.Role) {
await axios.delete(
`${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${providerInputs.namespace}/rolebindings/${roleBindingName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
} else {
await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
});
}
await axios.delete(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${serviceAccountName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
};
const serviceAccountStaticCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Static) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
await axios.get(
@@ -72,36 +246,57 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
{
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${providerInputs.clusterToken}`
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent: new https.Agent({
ca: providerInputs.ca,
rejectUnauthorized: providerInputs.sslEnabled
})
httpsAgent
}
);
};
const url = new URL(providerInputs.url);
const k8sGatewayHost = url.hostname;
const k8sPort = url.port ? Number(url.port) : 443;
const k8sHost = `${url.protocol}//${url.hostname}`;
try {
if (providerInputs.gatewayId) {
const k8sHost = url.hostname;
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort
},
serviceAccountGetCallback
);
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
providerInputs.credentialType === KubernetesCredentialType.Static
? serviceAccountStaticCallback
: serviceAccountDynamicCallback
);
} else {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
providerInputs.credentialType === KubernetesCredentialType.Static
? serviceAccountStaticCallback
: serviceAccountDynamicCallback
);
}
} else if (providerInputs.credentialType === KubernetesCredentialType.Static) {
await serviceAccountStaticCallback(k8sHost, k8sPort);
} else {
const k8sHost = `${url.protocol}//${url.hostname}`;
await serviceAccountGetCallback(k8sHost, k8sPort);
await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
return true;
@@ -117,10 +312,119 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
}
};
const create = async ({ inputs, expireAt }: { inputs: unknown; expireAt: number }) => {
const create = async ({
inputs,
expireAt,
usernameTemplate
}: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
}) => {
const providerInputs = await validateProviderInputs(inputs);
const tokenRequestCallback = async (host: string, port: number) => {
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const serviceAccountName = generateUsername(usernameTemplate);
const roleBindingName = `${serviceAccountName}-role-binding`;
// 1. Create the service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts`,
{
metadata: {
name: serviceAccountName,
namespace: providerInputs.namespace
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 2. Create the role binding
const roleBindingUrl =
providerInputs.roleType === KubernetesRoleType.ClusterRole
? `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings`
: `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${providerInputs.namespace}/rolebindings`;
const roleBindingMetadata = {
name: roleBindingName,
...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace: providerInputs.namespace })
};
await axios.post(
roleBindingUrl,
{
metadata: roleBindingMetadata,
roleRef: {
kind: providerInputs.roleType === KubernetesRoleType.ClusterRole ? "ClusterRole" : "Role",
name: providerInputs.role,
apiGroup: "rbac.authorization.k8s.io"
},
subjects: [
{
kind: "ServiceAccount",
name: serviceAccountName,
namespace: providerInputs.namespace
}
]
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 3. Request a token for the service account
const res = await axios.post<TKubernetesTokenRequest>(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${serviceAccountName}/token`,
{
spec: {
expirationSeconds: Math.floor((expireAt - Date.now()) / 1000),
...(providerInputs.audiences?.length ? { audiences: providerInputs.audiences } : {})
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
return { ...res.data, serviceAccountName };
};
const tokenRequestStaticCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Static) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const res = await axios.post<TKubernetesTokenRequest>(
@@ -134,18 +438,17 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
{
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${providerInputs.clusterToken}`
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent: new https.Agent({
ca: providerInputs.ca,
rejectUnauthorized: providerInputs.sslEnabled
})
httpsAgent
}
);
return res.data;
return { ...res.data, serviceAccountName: providerInputs.serviceAccountName };
};
const url = new URL(providerInputs.url);
@@ -154,19 +457,46 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
const k8sPort = url.port ? Number(url.port) : 443;
try {
const tokenData = providerInputs.gatewayId
? await $gatewayProxyWrapper(
let tokenData;
if (providerInputs.gatewayId) {
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
tokenData = await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
providerInputs.credentialType === KubernetesCredentialType.Static
? tokenRequestStaticCallback
: serviceAccountDynamicCallback
);
} else {
tokenData = await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
tokenRequestCallback
)
: await tokenRequestCallback(k8sHost, k8sPort);
providerInputs.credentialType === KubernetesCredentialType.Static
? tokenRequestStaticCallback
: serviceAccountDynamicCallback
);
}
} else {
tokenData =
providerInputs.credentialType === KubernetesCredentialType.Static
? await tokenRequestStaticCallback(k8sHost, k8sPort)
: await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
return {
entityId: providerInputs.serviceAccountName,
entityId: tokenData.serviceAccountName,
data: { TOKEN: tokenData.status.token }
};
} catch (error) {
@@ -181,7 +511,97 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
}
};
const revoke = async (_inputs: unknown, entityId: string) => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const roleBindingName = `${entityId}-role-binding`;
if (providerInputs.roleType === KubernetesRoleType.Role) {
await axios.delete(
`${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${providerInputs.namespace}/rolebindings/${roleBindingName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
} else {
await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
});
}
// Delete the service account
await axios.delete(`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${entityId}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
});
};
if (providerInputs.credentialType === KubernetesCredentialType.Dynamic) {
const url = new URL(providerInputs.url);
const k8sGatewayHost = url.hostname;
const k8sPort = url.port ? Number(url.port) : 443;
const k8sHost = `${url.protocol}//${url.hostname}`;
if (providerInputs.gatewayId) {
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
serviceAccountDynamicCallback
);
} else {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
serviceAccountDynamicCallback
);
}
} else {
await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
}
return { entityId };
};

View File

@@ -9,6 +9,7 @@ import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
@@ -22,13 +23,13 @@ const encodePassword = (password?: string) => {
return base64Password;
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -196,8 +197,8 @@ export const LdapProvider = (): TDynamicProviderFns => {
return dnArray;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
@@ -224,7 +225,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
});
}
} else {
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });

View File

@@ -20,6 +20,11 @@ export enum SqlProviders {
Vertica = "vertica"
}
export enum AwsIamAuthType {
AssumeRole = "assume-role",
AccessKey = "access-key"
}
export enum ElasticSearchAuthTypes {
User = "user",
ApiKey = "api-key"
@@ -31,7 +36,18 @@ export enum LdapCredentialType {
}
export enum KubernetesCredentialType {
Static = "static"
Static = "static",
Dynamic = "dynamic"
}
export enum KubernetesRoleType {
ClusterRole = "cluster-role",
Role = "role"
}
export enum KubernetesAuthMethod {
Gateway = "gateway",
Api = "api"
}
export enum TotpConfigType {
@@ -168,16 +184,38 @@ export const DynamicSecretSapAseSchema = z.object({
revocationStatement: z.string().trim()
});
export const DynamicSecretAwsIamSchema = z.object({
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
});
export const DynamicSecretAwsIamSchema = z.preprocess(
(val) => {
if (typeof val === "object" && val !== null && !Object.hasOwn(val, "method")) {
// eslint-disable-next-line no-param-reassign
(val as { method: string }).method = AwsIamAuthType.AccessKey;
}
return val;
},
z.discriminatedUnion("method", [
z.object({
method: z.literal(AwsIamAuthType.AccessKey),
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
}),
z.object({
method: z.literal(AwsIamAuthType.AssumeRole),
roleArn: z.string().trim().min(1, "Role ARN required"),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
})
])
);
export const DynamicSecretMongoAtlasSchema = z.object({
adminPublicKey: z.string().trim().min(1).describe("Admin user public api key"),
@@ -282,17 +320,50 @@ export const LdapSchema = z.union([
})
]);
export const DynamicSecretKubernetesSchema = z.object({
url: z.string().url().trim().min(1),
gatewayId: z.string().nullable().optional(),
sslEnabled: z.boolean().default(true),
clusterToken: z.string().trim().min(1),
ca: z.string().optional(),
serviceAccountName: z.string().trim().min(1),
credentialType: z.literal(KubernetesCredentialType.Static),
namespace: z.string().trim().min(1),
audiences: z.array(z.string().trim().min(1))
});
export const DynamicSecretKubernetesSchema = z
.discriminatedUnion("credentialType", [
z.object({
url: z.string().url().trim().min(1),
clusterToken: z.string().trim().optional(),
ca: z.string().optional(),
sslEnabled: z.boolean().default(false),
credentialType: z.literal(KubernetesCredentialType.Static),
serviceAccountName: z.string().trim().min(1),
namespace: z.string().trim().min(1),
gatewayId: z.string().optional(),
audiences: z.array(z.string().trim().min(1)),
authMethod: z.nativeEnum(KubernetesAuthMethod).default(KubernetesAuthMethod.Api)
}),
z.object({
url: z.string().url().trim().min(1),
clusterToken: z.string().trim().optional(),
ca: z.string().optional(),
sslEnabled: z.boolean().default(false),
credentialType: z.literal(KubernetesCredentialType.Dynamic),
namespace: z.string().trim().min(1),
gatewayId: z.string().optional(),
audiences: z.array(z.string().trim().min(1)),
roleType: z.nativeEnum(KubernetesRoleType),
role: z.string().trim().min(1),
authMethod: z.nativeEnum(KubernetesAuthMethod).default(KubernetesAuthMethod.Api)
})
])
.superRefine((data, ctx) => {
if (data.authMethod === KubernetesAuthMethod.Gateway && !data.gatewayId) {
ctx.addIssue({
path: ["gatewayId"],
code: z.ZodIssueCode.custom,
message: "When auth method is set to Gateway, a gateway must be selected"
});
}
if ((data.authMethod === KubernetesAuthMethod.Api || !data.authMethod) && !data.clusterToken) {
ctx.addIssue({
path: ["clusterToken"],
code: z.ZodIssueCode.custom,
message: "When auth method is set to Manual Token, a cluster token must be provided"
});
}
});
export const DynamicSecretVerticaSchema = z.object({
host: z.string().trim().toLowerCase(),
@@ -400,9 +471,18 @@ export type TDynamicProviderFns = {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
metadata: { projectId: string };
}) => Promise<{ entityId: string; data: unknown }>;
validateConnection: (inputs: unknown) => Promise<boolean>;
validateProviderInputs: (inputs: object) => Promise<unknown>;
revoke: (inputs: unknown, entityId: string) => Promise<{ entityId: string }>;
renew: (inputs: unknown, entityId: string, expireAt: number) => Promise<{ entityId: string }>;
validateConnection: (inputs: unknown, metadata: { projectId: string }) => Promise<boolean>;
validateProviderInputs: (inputs: object, metadata: { projectId: string }) => Promise<unknown>;
revoke: (inputs: unknown, entityId: string, metadata: { projectId: string }) => Promise<{ entityId: string }>;
renew: (
inputs: unknown,
entityId: string,
expireAt: number,
metadata: { projectId: string }
) => Promise<{ entityId: string }>;
};

View File

@@ -1,5 +1,4 @@
import axios, { AxiosError } from "axios";
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import { z } from "zod";
@@ -7,19 +6,20 @@ import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -64,12 +64,17 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();
await client({

View File

@@ -1,4 +1,3 @@
import handlebars from "handlebars";
import { MongoClient } from "mongodb";
import { customAlphabet } from "nanoid";
import { z } from "zod";
@@ -7,19 +6,20 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -60,12 +60,12 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const db = client.db(providerInputs.database);

View File

@@ -1,5 +1,4 @@
import axios, { Axios } from "axios";
import handlebars from "handlebars";
import https from "https";
import { customAlphabet } from "nanoid";
import { z } from "zod";
@@ -9,19 +8,20 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -117,12 +117,12 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
return infoResponse;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
await createRabbitMqUser({

View File

@@ -9,19 +9,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -121,12 +122,17 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
return pingResponse;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();

View File

@@ -9,19 +9,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = `inf_${alphaNumericNanoId(25)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -87,11 +88,11 @@ export const SapAseProvider = (): TDynamicProviderFns => {
return true;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const client = await $getClient(providerInputs);

View File

@@ -15,19 +15,20 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -97,11 +98,16 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
return testResult;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();

View File

@@ -8,6 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
// destroy client requires callback...
const noop = () => {};
@@ -17,13 +18,13 @@ const generatePassword = (size = 48) => {
return customAlphabet(charset, 48)(size);
};
const generateUsername = (usernameTemplate?: string | null) => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = `infisical_${alphaNumericNanoId(32)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity
});
};
@@ -88,13 +89,18 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
return isValidConnection;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername(usernameTemplate);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
try {

View File

@@ -10,6 +10,7 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSqlDBSchema, PasswordRequirements, SqlProviders, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
@@ -104,9 +105,8 @@ const generatePassword = (provider: SqlProviders, requirements?: PasswordRequire
}
};
const generateUsername = (provider: SqlProviders, usernameTemplate?: string | null) => {
const generateUsername = (provider: SqlProviders, usernameTemplate?: string | null, identity?: { name: string }) => {
let randomUsername = "";
// For oracle, the client assumes everything is upper case when not using quotes around the password
if (provider === SqlProviders.Oracle) {
randomUsername = alphaNumericNanoId(32).toUpperCase();
@@ -114,10 +114,13 @@ const generateUsername = (provider: SqlProviders, usernameTemplate?: string | nu
randomUsername = alphaNumericNanoId(32);
}
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
identity,
options: {
toUpperCase: provider === SqlProviders.Oracle
}
});
};
@@ -221,11 +224,16 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
return isConnected;
};
const create = async (data: { inputs: unknown; expireAt: number; usernameTemplate?: string | null }) => {
const { inputs, expireAt, usernameTemplate } = data;
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(providerInputs.client, usernameTemplate);
const username = generateUsername(providerInputs.client, usernameTemplate, identity);
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {

View File

@@ -0,0 +1,80 @@
/* eslint-disable func-names */
import handlebars from "handlebars";
import RE2 from "re2";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
export const compileUsernameTemplate = ({
usernameTemplate,
randomUsername,
identity,
unixTimestamp,
options
}: {
usernameTemplate: string;
randomUsername: string;
identity?: { name: string };
unixTimestamp?: number;
options?: {
toUpperCase?: boolean;
};
}): string => {
// Create isolated handlebars instance
const hbs = handlebars.create();
// Register random helper on local instance
hbs.registerHelper("random", function (length: number) {
if (typeof length !== "number" || length <= 0 || length > 100) {
return "";
}
return alphaNumericNanoId(length);
});
// Register replace helper on local instance
hbs.registerHelper("replace", function (text: string, searchValue: string, replaceValue: string) {
// Convert to string if it's not already
const textStr = String(text || "");
if (!textStr) {
return textStr;
}
try {
const re2Pattern = new RE2(searchValue, "g");
// Replace all occurrences
return re2Pattern.replace(textStr, replaceValue);
} catch (error) {
logger.error(error, "RE2 pattern failed, using original template");
return textStr;
}
});
// Register truncate helper on local instance
hbs.registerHelper("truncate", function (text: string, length: number) {
// Convert to string if it's not already
const textStr = String(text || "");
if (!textStr) {
return textStr;
}
if (typeof length !== "number" || length <= 0) return textStr;
return textStr.substring(0, length);
});
// Compile template with context using local instance
const context = {
randomUsername,
unixTimestamp: unixTimestamp || Math.floor(Date.now() / 100),
identity: {
name: identity?.name
}
};
const result = hbs.compile(usernameTemplate)(context);
if (options?.toUpperCase) {
return result.toUpperCase();
}
return result;
};

View File

@@ -42,6 +42,10 @@ export type TListGroupUsersDTO = {
filter?: EFilterReturnedUsers;
} & TGenericPermission;
export type TListProjectGroupUsersDTO = TListGroupUsersDTO & {
projectId: string;
};
export type TAddUserToGroupDTO = {
id: string;
username: string;

View File

@@ -709,6 +709,10 @@ export const licenseServiceFactory = ({
return licenses;
};
const invalidateGetPlan = async (orgId: string) => {
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
};
return {
generateOrgCustomerId,
removeOrgCustomer,
@@ -723,6 +727,7 @@ export const licenseServiceFactory = ({
return onPremFeatures;
},
getPlan,
invalidateGetPlan,
updateSubscriptionOrgMemberCount,
refreshPlan,
getOrgPlan,

View File

@@ -4,6 +4,7 @@ import {
ProjectPermissionActions,
ProjectPermissionCertificateActions,
ProjectPermissionCmekActions,
ProjectPermissionCommitsActions,
ProjectPermissionDynamicSecretActions,
ProjectPermissionGroupActions,
ProjectPermissionIdentityActions,
@@ -90,6 +91,11 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.Certificates
);
can(
[ProjectPermissionCommitsActions.Read, ProjectPermissionCommitsActions.PerformRollback],
ProjectPermissionSub.Commits
);
can(
[
ProjectPermissionSshHostActions.Edit,
@@ -292,6 +298,11 @@ const buildMemberPermissionRules = () => {
ProjectPermissionSub.SecretImports
);
can(
[ProjectPermissionCommitsActions.Read, ProjectPermissionCommitsActions.PerformRollback],
ProjectPermissionSub.Commits
);
can([ProjectPermissionActions.Read], ProjectPermissionSub.SecretApproval);
can([ProjectPermissionSecretRotationActions.Read], ProjectPermissionSub.SecretRotation);
@@ -479,6 +490,7 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificates);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificateTemplates);
can(ProjectPermissionSecretSyncActions.Read, ProjectPermissionSub.SecretSyncs);
can(ProjectPermissionCommitsActions.Read, ProjectPermissionSub.Commits);
can(
[

View File

@@ -17,6 +17,11 @@ export enum ProjectPermissionActions {
Delete = "delete"
}
export enum ProjectPermissionCommitsActions {
Read = "read",
PerformRollback = "perform-rollback"
}
export enum ProjectPermissionCertificateActions {
Read = "read",
Create = "create",
@@ -172,6 +177,7 @@ export enum ProjectPermissionSub {
SecretRollback = "secret-rollback",
SecretApproval = "secret-approval",
SecretRotation = "secret-rotation",
Commits = "commits",
Identity = "identity",
CertificateAuthorities = "certificate-authorities",
Certificates = "certificates",
@@ -325,6 +331,7 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms]
| [ProjectPermissionCommitsActions, ProjectPermissionSub.Commits]
| [ProjectPermissionSecretScanningDataSourceActions, ProjectPermissionSub.SecretScanningDataSources]
| [ProjectPermissionSecretScanningFindingActions, ProjectPermissionSub.SecretScanningFindings]
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs];
@@ -376,7 +383,8 @@ const DynamicSecretConditionV2Schema = z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
@@ -404,6 +412,23 @@ const DynamicSecretConditionV2Schema = z
})
.partial();
const SecretImportConditionSchema = z
.object({
environment: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
secretPath: SECRET_PATH_PERMISSION_OPERATOR_SCHEMA
})
.partial();
const SecretConditionV2Schema = z
.object({
environment: z.union([
@@ -658,6 +683,12 @@ const GeneralPermissionSchema = [
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.Commits).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionCommitsActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z
.literal(ProjectPermissionSub.SecretScanningDataSources)
@@ -741,7 +772,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
),
conditions: SecretConditionV1Schema.describe(
conditions: SecretImportConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),

View File

@@ -0,0 +1,485 @@
/* eslint-disable no-await-in-loop */
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { ProjectPermissionCommitsActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { ResourceType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import {
isFolderCommitChange,
isSecretCommitChange
} from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
import { TPermissionServiceFactory } from "../permission/permission-service";
type TPitServiceFactoryDep = {
folderCommitService: TFolderCommitServiceFactory;
secretService: Pick<TSecretServiceFactory, "getSecretVersionsV2ByIds" | "getChangeVersions">;
folderService: Pick<TSecretFolderServiceFactory, "getFolderById" | "getFolderVersions">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
folderDAL: Pick<TSecretFolderDALFactory, "findSecretPathByFolderIds">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
};
export type TPitServiceFactory = ReturnType<typeof pitServiceFactory>;
export const pitServiceFactory = ({
folderCommitService,
secretService,
folderService,
permissionService,
folderDAL,
projectEnvDAL
}: TPitServiceFactoryDep) => {
const getCommitsCount = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
environment: string;
path: string;
}) => {
const result = await folderCommitService.getCommitsCount({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path
});
return result;
};
const getCommitsForFolder = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path,
offset,
limit,
search,
sort
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
environment: string;
path: string;
offset: number;
limit: number;
search?: string;
sort: "asc" | "desc";
}) => {
const result = await folderCommitService.getCommitsForFolder({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path,
offset,
limit,
search,
sort
});
return {
commits: result.commits.map((commit) => ({
...commit,
commitId: commit.commitId.toString()
})),
total: result.total,
hasMore: result.hasMore
};
};
const getCommitChanges = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
}) => {
const changes = await folderCommitService.getCommitChanges({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
});
const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(projectId, [changes.folderId]);
for (const change of changes.changes) {
if (isSecretCommitChange(change)) {
change.versions = await secretService.getChangeVersions(
{
secretVersion: change.secretVersion,
secretId: change.secretId,
id: change.id,
isUpdate: change.isUpdate,
changeType: change.changeType
},
(Number.parseInt(change.secretVersion, 10) - 1).toString(),
actorId,
actor,
actorOrgId,
actorAuthMethod,
changes.envId,
projectId,
folderWithPath?.path || ""
);
} else if (isFolderCommitChange(change)) {
change.versions = await folderService.getFolderVersions(
change,
(Number.parseInt(change.folderVersion, 10) - 1).toString(),
change.folderChangeId
);
}
}
return {
changes: {
...changes,
commitId: changes.commitId.toString()
}
};
};
const compareCommitChanges = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId,
folderId,
environment,
deepRollback,
secretPath
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
folderId: string;
environment: string;
deepRollback: boolean;
secretPath: string;
}) => {
const latestCommit = await folderCommitService.getLatestCommit({
folderId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
const targetCommit = await folderCommitService.getCommitById({
commitId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
const env = await projectEnvDAL.findOne({
projectId,
slug: environment
});
if (!latestCommit) {
throw new NotFoundError({ message: "Latest commit not found" });
}
let diffs;
if (deepRollback) {
diffs = await folderCommitService.deepCompareFolder({
targetCommitId: targetCommit.id,
envId: env.id,
projectId
});
} else {
const folderData = await folderService.getFolderById({
actor,
actorId,
actorOrgId,
actorAuthMethod,
id: folderId
});
diffs = [
{
folderId: folderData.id,
folderName: folderData.name,
folderPath: secretPath,
changes: await folderCommitService.compareFolderStates({
targetCommitId: commitId,
currentCommitId: latestCommit.id
})
}
];
}
for (const diff of diffs) {
for (const change of diff.changes) {
// Use discriminated union type checking
if (change.type === ResourceType.SECRET) {
// TypeScript now knows this is a SecretChange
if (change.secretKey && change.secretVersion && change.secretId) {
change.versions = await secretService.getChangeVersions(
{
secretVersion: change.secretVersion,
secretId: change.secretId,
id: change.id,
isUpdate: change.isUpdate,
changeType: change.changeType
},
change.fromVersion || "1",
actorId,
actor,
actorOrgId,
actorAuthMethod,
env.id,
projectId,
diff.folderPath || ""
);
}
} else if (change.type === ResourceType.FOLDER) {
// TypeScript now knows this is a FolderChange
if (change.folderVersion) {
change.versions = await folderService.getFolderVersions(change, change.fromVersion || "1", change.id);
}
}
}
}
return diffs;
};
const rollbackToCommit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId,
folderId,
deepRollback,
message,
environment
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
folderId: string;
deepRollback: boolean;
message?: string;
environment: string;
}) => {
const { permission: userPermission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
ForbiddenError.from(userPermission).throwUnlessCan(
ProjectPermissionCommitsActions.PerformRollback,
ProjectPermissionSub.Commits
);
const latestCommit = await folderCommitService.getLatestCommit({
folderId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
if (!latestCommit) {
throw new NotFoundError({ message: "Latest commit not found" });
}
logger.info(`PIT - Attempting to rollback folder ${folderId} from commit ${latestCommit.id} to commit ${commitId}`);
const targetCommit = await folderCommitService.getCommitById({
commitId,
actor,
actorId,
actorAuthMethod,
actorOrgId,
projectId
});
const env = await projectEnvDAL.findOne({
projectId,
slug: environment
});
if (!targetCommit || targetCommit.folderId !== folderId || targetCommit.envId !== env.id) {
throw new NotFoundError({ message: "Target commit not found" });
}
if (!latestCommit || latestCommit.envId !== env.id) {
throw new NotFoundError({ message: "Latest commit not found" });
}
if (deepRollback) {
await folderCommitService.deepRollbackFolder(commitId, env.id, actorId, actor, projectId, message);
return { success: true };
}
const diff = await folderCommitService.compareFolderStates({
currentCommitId: latestCommit.id,
targetCommitId: commitId
});
const response = await folderCommitService.applyFolderStateDifferences({
differences: diff,
actorInfo: {
actorType: actor,
actorId,
message: message || "Rollback to previous commit"
},
folderId,
projectId,
reconstructNewFolders: deepRollback
});
return {
success: true,
secretChangesCount: response.secretChangesCount,
folderChangesCount: response.folderChangesCount,
totalChanges: response.totalChanges
};
};
const revertCommit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
}) => {
const response = await folderCommitService.revertCommitChanges({
commitId,
actor,
actorId,
actorAuthMethod,
actorOrgId,
projectId
});
return response;
};
const getFolderStateAtCommit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
}) => {
const commit = await folderCommitService.getCommitById({
commitId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
if (!commit) {
throw new NotFoundError({ message: `Commit with ID ${commitId} not found` });
}
const response = await folderCommitService.reconstructFolderState(commitId);
return response.map((item) => {
if (item.type === ResourceType.SECRET) {
return {
...item,
secretVersion: Number(item.secretVersion)
};
}
if (item.type === ResourceType.FOLDER) {
return {
...item,
folderVersion: Number(item.folderVersion)
};
}
return item;
});
};
return {
getCommitsCount,
getCommitsForFolder,
getCommitChanges,
compareCommitChanges,
rollbackToCommit,
revertCommit,
getFolderStateAtCommit
};
};

View File

@@ -20,6 +20,7 @@ import { EnforcementLevel } from "@app/lib/types";
import { triggerWorkflowIntegrationNotification } from "@app/lib/workflow-integrations/trigger-notification";
import { TriggerFeature } from "@app/lib/workflow-integrations/types";
import { ActorType } from "@app/services/auth/auth-type";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TMicrosoftTeamsServiceFactory } from "@app/services/microsoft-teams/microsoft-teams-service";
@@ -130,6 +131,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
projectMicrosoftTeamsConfigDAL: Pick<TProjectMicrosoftTeamsConfigDALFactory, "getIntegrationDetailsByProject">;
microsoftTeamsService: Pick<TMicrosoftTeamsServiceFactory, "sendNotification">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretApprovalRequestServiceFactory>;
@@ -161,7 +163,8 @@ export const secretApprovalRequestServiceFactory = ({
projectSlackConfigDAL,
resourceMetadataDAL,
projectMicrosoftTeamsConfigDAL,
microsoftTeamsService
microsoftTeamsService,
folderCommitService
}: TSecretApprovalRequestServiceFactoryDep) => {
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
@@ -597,6 +600,10 @@ export const secretApprovalRequestServiceFactory = ({
? await fnSecretV2BridgeBulkInsert({
tx,
folderId,
actor: {
actorId,
type: actor
},
orgId: actorOrgId,
inputSecrets: secretCreationCommits.map((el) => ({
tagIds: el?.tags.map(({ id }) => id),
@@ -619,13 +626,18 @@ export const secretApprovalRequestServiceFactory = ({
secretDAL: secretV2BridgeDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
secretTagDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
folderCommitService
})
: [];
const updatedSecrets = secretUpdationCommits.length
? await fnSecretV2BridgeBulkUpdate({
folderId,
orgId: actorOrgId,
actor: {
actorId,
type: actor
},
tx,
inputSecrets: secretUpdationCommits.map((el) => {
const encryptedValue =
@@ -659,7 +671,8 @@ export const secretApprovalRequestServiceFactory = ({
secretVersionDAL: secretVersionV2BridgeDAL,
secretTagDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
resourceMetadataDAL
resourceMetadataDAL,
folderCommitService
})
: [];
const deletedSecret = secretDeletionCommits.length
@@ -667,10 +680,13 @@ export const secretApprovalRequestServiceFactory = ({
projectId,
folderId,
tx,
actorId: "",
actorId,
actorType: actor,
secretDAL: secretV2BridgeDAL,
secretQueueService,
inputSecrets: secretDeletionCommits.map(({ key }) => ({ secretKey: key, type: SecretType.Shared }))
inputSecrets: secretDeletionCommits.map(({ key }) => ({ secretKey: key, type: SecretType.Shared })),
folderCommitService,
secretVersionDAL: secretVersionV2BridgeDAL
})
: [];
const updatedSecretApproval = await secretApprovalRequestDAL.updateById(

View File

@@ -10,6 +10,7 @@ import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -87,6 +88,7 @@ type TSecretReplicationServiceFactoryDep = {
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretReplicationServiceFactory = ReturnType<typeof secretReplicationServiceFactory>;
@@ -132,6 +134,7 @@ export const secretReplicationServiceFactory = ({
secretVersionV2BridgeDAL,
secretV2BridgeDAL,
kmsService,
folderCommitService,
resourceMetadataDAL
}: TSecretReplicationServiceFactoryDep) => {
const $getReplicatedSecrets = (
@@ -419,7 +422,7 @@ export const secretReplicationServiceFactory = ({
return {
op: operation,
requestId: approvalRequestDoc.id,
metadata: doc.metadata,
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
secretMetadata: JSON.stringify(doc.secretMetadata),
key: doc.key,
encryptedValue: doc.encryptedValue,
@@ -446,11 +449,12 @@ export const secretReplicationServiceFactory = ({
tx,
secretTagDAL,
resourceMetadataDAL,
folderCommitService,
secretVersionTagDAL: secretVersionV2TagBridgeDAL,
inputSecrets: locallyCreatedSecrets.map((doc) => {
return {
type: doc.type,
metadata: doc.metadata,
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
key: doc.key,
encryptedValue: doc.encryptedValue,
encryptedComment: doc.encryptedComment,
@@ -466,6 +470,7 @@ export const secretReplicationServiceFactory = ({
orgId,
folderId: destinationReplicationFolderId,
secretVersionDAL: secretVersionV2BridgeDAL,
folderCommitService,
secretDAL: secretV2BridgeDAL,
tx,
resourceMetadataDAL,
@@ -479,7 +484,7 @@ export const secretReplicationServiceFactory = ({
},
data: {
type: doc.type,
metadata: doc.metadata,
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
key: doc.key,
encryptedValue: doc.encryptedValue as Buffer,
encryptedComment: doc.encryptedComment,

View File

@@ -63,6 +63,7 @@ import { TAppConnectionDALFactory } from "@app/services/app-connection/app-conne
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { ActorType } from "@app/services/auth/auth-type";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -98,7 +99,7 @@ export type TSecretRotationV2ServiceFactoryDep = {
TSecretV2BridgeDALFactory,
"bulkUpdate" | "insertMany" | "deleteMany" | "upsertSecretReferences" | "find" | "invalidateSecretCacheByProjectId"
>;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "deleteTagsToSecretV2" | "find">;
@@ -106,6 +107,7 @@ export type TSecretRotationV2ServiceFactoryDep = {
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
queueService: Pick<TQueueServiceFactory, "queuePg">;
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretRotationV2ServiceFactory = ReturnType<typeof secretRotationV2ServiceFactory>;
@@ -145,6 +147,7 @@ export const secretRotationV2ServiceFactory = ({
snapshotService,
keyStore,
queueService,
folderCommitService,
appConnectionDAL
}: TSecretRotationV2ServiceFactoryDep) => {
const $queueSendSecretRotationStatusNotification = async (secretRotation: TSecretRotationV2Raw) => {
@@ -538,7 +541,12 @@ export const secretRotationV2ServiceFactory = ({
secretVersionDAL: secretVersionV2BridgeDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
secretTagDAL,
resourceMetadataDAL
folderCommitService,
resourceMetadataDAL,
actor: {
type: actor.type,
actorId: actor.id
}
});
await secretRotationV2DAL.insertSecretMappings(
@@ -674,7 +682,12 @@ export const secretRotationV2ServiceFactory = ({
secretVersionDAL: secretVersionV2BridgeDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
secretTagDAL,
resourceMetadataDAL
folderCommitService,
resourceMetadataDAL,
actor: {
type: actor.type,
actorId: actor.id
}
});
secretsMappingUpdated = true;
@@ -792,6 +805,9 @@ export const secretRotationV2ServiceFactory = ({
projectId,
folderId,
actorId: actor.id, // not actually used since rotated secrets are shared
actorType: actor.type,
folderCommitService,
secretVersionDAL: secretVersionV2BridgeDAL,
tx
});
}
@@ -935,6 +951,10 @@ export const secretRotationV2ServiceFactory = ({
secretDAL: secretV2BridgeDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
folderCommitService,
actor: {
type: ActorType.PLATFORM
},
secretTagDAL,
resourceMetadataDAL
});

View File

@@ -14,6 +14,7 @@ import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { CommitType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -53,6 +54,7 @@ type TSecretRotationQueueFactoryDep = {
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
telemetryService: Pick<TTelemetryServiceFactory, "sendPostHogEvents">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
// These error should stop the repeatable job and ask user to reconfigure rotation
@@ -77,6 +79,7 @@ export const secretRotationQueueFactory = ({
telemetryService,
secretV2BridgeDAL,
secretVersionV2BridgeDAL,
folderCommitService,
kmsService
}: TSecretRotationQueueFactoryDep) => {
const addToQueue = async (rotationId: string, interval: number) => {
@@ -330,7 +333,7 @@ export const secretRotationQueueFactory = ({
})),
tx
);
await secretVersionV2BridgeDAL.insertMany(
const secretVersions = await secretVersionV2BridgeDAL.insertMany(
updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({
...el,
actorType: ActorType.PLATFORM,
@@ -338,6 +341,22 @@ export const secretRotationQueueFactory = ({
})),
tx
);
await folderCommitService.createCommit(
{
actor: {
type: ActorType.PLATFORM
},
message: "Changed by Secret rotation",
folderId: secretVersions[0].folderId,
changes: secretVersions.map((sv) => ({
type: CommitType.ADD,
isUpdate: true,
secretVersionId: sv.id
}))
},
tx
);
});
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(secretRotation.projectId);

View File

@@ -8,6 +8,7 @@ import { InternalServerError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { CommitType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -51,8 +52,8 @@ type TSecretSnapshotServiceFactoryDep = {
snapshotSecretV2BridgeDAL: TSnapshotSecretV2DALFactory;
snapshotFolderDAL: TSnapshotFolderDALFactory;
secretVersionDAL: Pick<TSecretVersionDALFactory, "insertMany" | "findLatestVersionByFolderId">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionByFolderId">;
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "findLatestVersionByFolderId" | "insertMany">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionByFolderId" | "findOne">;
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "findLatestVersionByFolderId" | "insertMany" | "findOne">;
secretDAL: Pick<TSecretDALFactory, "delete" | "insertMany">;
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "delete" | "insertMany">;
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecret" | "saveTagsToSecretV2">;
@@ -63,6 +64,7 @@ type TSecretSnapshotServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "isValidLicense">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretSnapshotServiceFactory = ReturnType<typeof secretSnapshotServiceFactory>;
@@ -84,7 +86,8 @@ export const secretSnapshotServiceFactory = ({
snapshotSecretV2BridgeDAL,
secretVersionV2TagBridgeDAL,
kmsService,
projectBotService
projectBotService,
folderCommitService
}: TSecretSnapshotServiceFactoryDep) => {
const projectSecretSnapshotCount = async ({
environment,
@@ -403,6 +406,18 @@ export const secretSnapshotServiceFactory = ({
.filter((el) => el.isRotatedSecret)
.map((el) => el.secretId);
const deletedSecretsChanges = new Map(); // secretId -> version info
const deletedFoldersChanges = new Map(); // folderId -> version info
const addedSecretsChanges = new Map(); // secretId -> version info
const addedFoldersChanges = new Map(); // folderId -> version info
const commitChanges: {
type: string;
secretVersionId?: string;
folderVersionId?: string;
isUpdate?: boolean;
folderId?: string;
}[] = [];
// this will remove all secrets in current folder except rotated secrets which we ignore
const deletedTopLevelSecs = await secretV2BridgeDAL.delete(
{
@@ -424,7 +439,35 @@ export const secretSnapshotServiceFactory = ({
},
tx
);
await Promise.all(
deletedTopLevelSecs.map(async (sec) => {
const version = await secretVersionV2BridgeDAL.findOne({ secretId: sec.id, version: sec.version }, tx);
deletedSecretsChanges.set(sec.id, {
id: sec.id,
version: sec.version,
// Store the version ID if available from the snapshot
versionId: version?.id
});
})
);
const deletedTopLevelSecsGroupById = groupBy(deletedTopLevelSecs, (item) => item.id);
const deletedFoldersData = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
await Promise.all(
deletedFoldersData.map(async (folder) => {
const version = await folderVersionDAL.findOne({ folderId: folder.id, version: folder.version }, tx);
deletedFoldersChanges.set(folder.id, {
id: folder.id,
version: folder.version,
// Store the version ID if available
versionId: version?.id
});
})
);
// this will remove all secrets and folders on child
// due to sql foreign key and link list connection removing the folders removes everything below too
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
@@ -489,14 +532,21 @@ export const secretSnapshotServiceFactory = ({
});
await secretTagDAL.saveTagsToSecretV2(secretTagsToBeInsert, tx);
const folderVersions = await folderVersionDAL.insertMany(
folders.map(({ version, name, id, envId }) => ({
folders.map(({ version, name, id, envId, description }) => ({
name,
version,
folderId: id,
envId
envId,
description
})),
tx
);
// Track added folders
folderVersions.forEach((fv) => {
addedFoldersChanges.set(fv.folderId, fv);
});
const userActorId = actor === ActorType.USER ? actorId : undefined;
const identityActorId = actor !== ActorType.USER ? actorId : undefined;
const actorType = actor || ActorType.PLATFORM;
@@ -511,6 +561,11 @@ export const secretSnapshotServiceFactory = ({
})),
tx
);
secretVersions.forEach((sv) => {
addedSecretsChanges.set(sv.secretId, sv);
});
await secretVersionV2TagBridgeDAL.insertMany(
secretVersions.flatMap(({ secretId, id }) =>
secretVerTagToBeInsert?.[secretId]?.length
@@ -522,6 +577,70 @@ export const secretSnapshotServiceFactory = ({
),
tx
);
// Compute commit changes
// Handle secrets
deletedSecretsChanges.forEach((deletedInfo, secretId) => {
const addedSecret = addedSecretsChanges.get(secretId);
if (addedSecret) {
// Secret was deleted and re-added - this is an update only if versions are different
if (deletedInfo.versionId !== addedSecret.id) {
commitChanges.push({
type: CommitType.ADD, // In the commit system, updates are tracked as "add" with isUpdate=true
secretVersionId: addedSecret.id,
isUpdate: true
});
}
// Remove from addedSecrets since we've handled it
addedSecretsChanges.delete(secretId);
} else if (deletedInfo.versionId) {
// Secret was only deleted
commitChanges.push({
type: CommitType.DELETE,
secretVersionId: deletedInfo.versionId
});
}
});
// Add remaining new secrets (not updates)
addedSecretsChanges.forEach((addedSecret) => {
commitChanges.push({
type: CommitType.ADD,
secretVersionId: addedSecret.id
});
});
// Handle folders
deletedFoldersChanges.forEach((deletedInfo, folderId) => {
const addedFolder = addedFoldersChanges.get(folderId);
if (addedFolder) {
// Folder was deleted and re-added - this is an update only if versions are different
if (deletedInfo.versionId !== addedFolder.id) {
commitChanges.push({
type: CommitType.ADD,
folderVersionId: addedFolder.id,
isUpdate: true
});
}
// Remove from addedFolders since we've handled it
addedFoldersChanges.delete(folderId);
} else if (deletedInfo.versionId) {
// Folder was only deleted
commitChanges.push({
type: CommitType.DELETE,
folderVersionId: deletedInfo.versionId,
folderId: deletedInfo.id
});
}
});
// Add remaining new folders (not updates)
addedFoldersChanges.forEach((addedFolder) => {
commitChanges.push({
type: CommitType.ADD,
folderVersionId: addedFolder.id
});
});
const newSnapshot = await snapshotDAL.create(
{
folderId: snapshot.folderId,
@@ -550,6 +669,22 @@ export const secretSnapshotServiceFactory = ({
})),
tx
);
if (commitChanges.length > 0) {
await folderCommitService.createCommit(
{
actor: {
type: actorType,
metadata: {
id: userActorId || identityActorId
}
},
message: "Rollback to snapshot",
folderId: snapshot.folderId,
changes: commitChanges
},
tx
);
}
return { ...newSnapshot, snapshotSecrets, snapshotFolders };
});
@@ -609,11 +744,12 @@ export const secretSnapshotServiceFactory = ({
});
await secretTagDAL.saveTagsToSecret(secretTagsToBeInsert, tx);
const folderVersions = await folderVersionDAL.insertMany(
folders.map(({ version, name, id, envId }) => ({
folders.map(({ version, name, id, envId, description }) => ({
name,
version,
folderId: id,
envId
envId,
description
})),
tx
);

View File

@@ -27,6 +27,7 @@ export const KeyStorePrefixes = {
KmsOrgDataKeyCreation: "kms-org-data-key-creation-lock",
WaitUntilReadyKmsOrgKeyCreation: "wait-until-ready-kms-org-key-creation-",
WaitUntilReadyKmsOrgDataKeyCreation: "wait-until-ready-kms-org-data-key-creation-",
FolderTreeCheckpoint: (envId: string) => `folder-tree-checkpoint-${envId}`,
WaitUntilReadyProjectEnvironmentOperation: (projectId: string) =>
`wait-until-ready-project-environments-operation-${projectId}`,

View File

@@ -89,6 +89,7 @@ export const GROUPS = {
limit: "The number of users to return.",
username: "The username to search for.",
search: "The text string that user email or name will be filtered by.",
projectId: "The ID of the project the group belongs to.",
filterUsers:
"Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization."
},
@@ -625,7 +626,8 @@ export const PROJECTS = {
autoCapitalization: "Disable or enable auto-capitalization for the project.",
slug: "An optional slug for the project. (must be unique within the organization)",
hasDeleteProtection: "Enable or disable delete protection for the project.",
secretSharing: "Enable or disable secret sharing for the project."
secretSharing: "Enable or disable secret sharing for the project.",
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project."
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."
@@ -2276,7 +2278,8 @@ export const SecretSyncs = {
},
GCP: {
scope: "The Google project scope that secrets should be synced to.",
projectId: "The ID of the Google project secrets should be synced to."
projectId: "The ID of the Google project secrets should be synced to.",
locationId: 'The ID of the Google project location secrets should be synced to (ie "us-west4").'
},
DATABRICKS: {
scope: "The Databricks secret scope that secrets should be synced to."

View File

@@ -213,6 +213,12 @@ const envSchema = z
GATEWAY_RELAY_AUTH_SECRET: zpStr(z.string().optional()),
DYNAMIC_SECRET_ALLOW_INTERNAL_IP: zodStrBool.default("false"),
DYNAMIC_SECRET_AWS_ACCESS_KEY_ID: zpStr(z.string().optional()).default(
process.env.INF_APP_CONNECTION_AWS_ACCESS_KEY_ID
),
DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY: zpStr(z.string().optional()).default(
process.env.INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY
),
/* ----------------------------------------------------------------------------- */
/* App Connections ----------------------------------------------------------------------------- */
@@ -255,6 +261,10 @@ const envSchema = z
DATADOG_SERVICE: zpStr(z.string().optional().default("infisical-core")),
DATADOG_HOSTNAME: zpStr(z.string().optional()),
// PIT
PIT_CHECKPOINT_WINDOW: zpStr(z.string().optional().default("2")),
PIT_TREE_CHECKPOINT_WINDOW: zpStr(z.string().optional().default("30")),
/* CORS ----------------------------------------------------------------------------- */
CORS_ALLOWED_ORIGINS: zpStr(
z

View File

@@ -7,13 +7,24 @@ type SanitizationArg = {
allowedExpressions?: (arg: string) => boolean;
};
const isValidExpression = (expression: string, dto: SanitizationArg): boolean => {
// Allow helper functions (replace, truncate)
const allowedHelpers = ["replace", "truncate", "random"];
if (allowedHelpers.includes(expression)) {
return true;
}
// Check regular allowed expressions
return dto?.allowedExpressions?.(expression) || false;
};
export const validateHandlebarTemplate = (templateName: string, template: string, dto: SanitizationArg) => {
const parsedAst = handlebars.parse(template);
parsedAst.body.forEach((el) => {
if (el.type === "ContentStatement") return;
if (el.type === "MustacheStatement" && "path" in el) {
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return;
if (path.type === "PathExpression" && isValidExpression(path.original, dto)) return;
}
logger.error(el, "Template sanitization failed");
throw new BadRequestError({ message: `Template sanitization failed: ${templateName}` });
@@ -26,7 +37,7 @@ export const isValidHandleBarTemplate = (template: string, dto: SanitizationArg)
if (el.type === "ContentStatement") return true;
if (el.type === "MustacheStatement" && "path" in el) {
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return true;
if (path.type === "PathExpression" && isValidExpression(path.original, dto)) return true;
}
return false;
});

View File

@@ -60,6 +60,7 @@ export enum QueueName {
ImportSecretsFromExternalSource = "import-secrets-from-external-source",
AppConnectionSecretSync = "app-connection-secret-sync",
SecretRotationV2 = "secret-rotation-v2",
FolderTreeCheckpoint = "folder-tree-checkpoint",
InvalidateCache = "invalidate-cache",
SecretScanningV2 = "secret-scanning-v2"
}
@@ -94,6 +95,7 @@ export enum QueueJobs {
SecretRotationV2QueueRotations = "secret-rotation-v2-queue-rotations",
SecretRotationV2RotateSecrets = "secret-rotation-v2-rotate-secrets",
SecretRotationV2SendNotification = "secret-rotation-v2-send-notification",
CreateFolderTreeCheckpoint = "create-folder-tree-checkpoint",
InvalidateCache = "invalidate-cache",
SecretScanningV2FullScan = "secret-scanning-v2-full-scan",
SecretScanningV2DiffScan = "secret-scanning-v2-diff-scan",
@@ -209,6 +211,12 @@ export type TQueueJobTypes = {
name: QueueJobs.ProjectV3Migration;
payload: { projectId: string };
};
[QueueName.FolderTreeCheckpoint]: {
name: QueueJobs.CreateFolderTreeCheckpoint;
payload: {
envId: string;
};
};
[QueueName.ImportSecretsFromExternalSource]: {
name: QueueJobs.ImportSecretsFromExternalSource;
payload: {

View File

@@ -155,6 +155,12 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
oidc: token?.identityAuth?.oidc
});
}
if (token?.identityAuth?.kubernetes) {
requestContext.set("identityAuthInfo", {
identityId: identity.identityId,
kubernetes: token?.identityAuth?.kubernetes
});
}
break;
}
case AuthMode.SERVICE_TOKEN: {

View File

@@ -57,9 +57,12 @@ export const registerServeUI = async (
reply.callNotFound();
return;
}
// reference: https://github.com/fastify/fastify-static?tab=readme-ov-file#managing-cache-control-headers
// to avoid ui bundle skew on new deployment
return reply.sendFile("index.html", { maxAge: 0, immutable: false });
// This should help avoid caching any chunks (temp fix)
void reply.header("Cache-Control", "no-cache, no-store, must-revalidate, private, max-age=0");
void reply.header("Pragma", "no-cache");
void reply.header("Expires", "0");
return reply.sendFile("index.html");
}
});
}

View File

@@ -60,6 +60,7 @@ import { oidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
import { oidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { permissionDALFactory } from "@app/ee/services/permission/permission-dal";
import { permissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { pitServiceFactory } from "@app/ee/services/pit/pit-service";
import { projectTemplateDALFactory } from "@app/ee/services/project-template/project-template-dal";
import { projectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { projectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal";
@@ -154,6 +155,14 @@ import { externalGroupOrgRoleMappingDALFactory } from "@app/services/external-gr
import { externalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { externalMigrationQueueFactory } from "@app/services/external-migration/external-migration-queue";
import { externalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
import { folderCommitQueueServiceFactory } from "@app/services/folder-commit/folder-commit-queue";
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
import { groupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { groupProjectMembershipRoleDALFactory } from "@app/services/group-project/group-project-membership-role-dal";
import { groupProjectServiceFactory } from "@app/services/group-project/group-project-service";
@@ -583,6 +592,41 @@ export const registerRoutes = async (
projectRoleDAL,
permissionService
});
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
const folderCheckpointDAL = folderCheckpointDALFactory(db);
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
const folderCommitDAL = folderCommitDALFactory(db);
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
const folderCommitQueueService = folderCommitQueueServiceFactory({
queueService,
folderTreeCheckpointDAL,
keyStore,
folderTreeCheckpointResourcesDAL,
folderCommitDAL,
folderDAL
});
const folderCommitService = folderCommitServiceFactory({
folderCommitDAL,
folderCommitChangesDAL,
folderCheckpointDAL,
folderTreeCheckpointDAL,
userDAL,
identityDAL,
folderDAL,
folderVersionDAL,
secretVersionV2BridgeDAL,
projectDAL,
folderCheckpointResourcesDAL,
secretV2BridgeDAL,
folderTreeCheckpointResourcesDAL,
folderCommitQueueService,
permissionService,
kmsService,
secretTagDAL,
resourceMetadataDAL
});
const scimService = scimServiceFactory({
licenseService,
scimDAL,
@@ -987,6 +1031,7 @@ export const registerRoutes = async (
projectMembershipDAL,
projectBotDAL,
secretDAL,
folderCommitService,
secretBlindIndexDAL,
secretVersionDAL,
secretTagDAL,
@@ -1034,6 +1079,7 @@ export const registerRoutes = async (
secretReminderRecipientsDAL,
orgService,
resourceMetadataDAL,
folderCommitService,
secretSyncQueue
});
@@ -1110,6 +1156,7 @@ export const registerRoutes = async (
snapshotDAL,
snapshotFolderDAL,
snapshotSecretDAL,
folderCommitService,
secretVersionDAL,
folderVersionDAL,
secretTagDAL,
@@ -1136,7 +1183,8 @@ export const registerRoutes = async (
folderVersionDAL,
projectEnvDAL,
snapshotService,
projectDAL
projectDAL,
folderCommitService
});
const secretImportService = secretImportServiceFactory({
@@ -1161,6 +1209,7 @@ export const registerRoutes = async (
const secretV2BridgeService = secretV2BridgeServiceFactory({
folderDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
folderCommitService,
secretQueueService,
secretDAL: secretV2BridgeDAL,
permissionService,
@@ -1204,7 +1253,8 @@ export const registerRoutes = async (
projectSlackConfigDAL,
resourceMetadataDAL,
projectMicrosoftTeamsConfigDAL,
microsoftTeamsService
microsoftTeamsService,
folderCommitService
});
const secretService = secretServiceFactory({
@@ -1291,7 +1341,8 @@ export const registerRoutes = async (
secretV2BridgeDAL,
secretVersionV2TagBridgeDAL: secretVersionTagV2BridgeDAL,
secretVersionV2BridgeDAL,
resourceMetadataDAL
resourceMetadataDAL,
folderCommitService
});
const secretRotationQueue = secretRotationQueueFactory({
@@ -1303,6 +1354,7 @@ export const registerRoutes = async (
projectBotService,
secretVersionV2BridgeDAL,
secretV2BridgeDAL,
folderCommitService,
kmsService
});
@@ -1454,6 +1506,15 @@ export const registerRoutes = async (
permissionService
});
const pitService = pitServiceFactory({
folderCommitService,
secretService,
folderService,
permissionService,
folderDAL,
projectEnvDAL
});
const identityOidcAuthService = identityOidcAuthServiceFactory({
identityOidcAuthDAL,
identityOrgMembershipDAL,
@@ -1516,7 +1577,9 @@ export const registerRoutes = async (
dynamicSecretProviders,
folderDAL,
licenseService,
kmsService
kmsService,
userDAL,
identityDAL
});
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
auditLogDAL,
@@ -1595,7 +1658,9 @@ export const registerRoutes = async (
secretDAL: secretV2BridgeDAL,
queueService,
secretV2BridgeService,
resourceMetadataDAL
resourceMetadataDAL,
folderCommitService,
folderVersionDAL
});
const migrationService = externalMigrationServiceFactory({
@@ -1705,6 +1770,7 @@ export const registerRoutes = async (
auditLogService,
secretV2BridgeDAL,
secretTagDAL,
folderCommitService,
secretVersionTagV2BridgeDAL,
secretVersionV2BridgeDAL,
keyStore,
@@ -1893,6 +1959,7 @@ export const registerRoutes = async (
certificateTemplate: certificateTemplateService,
certificateAuthorityCrl: certificateAuthorityCrlService,
certificateEst: certificateEstService,
pit: pitService,
pkiAlert: pkiAlertService,
pkiCollection: pkiCollectionService,
pkiSubscriber: pkiSubscriberService,
@@ -1927,6 +1994,7 @@ export const registerRoutes = async (
microsoftTeams: microsoftTeamsService,
assumePrivileges: assumePrivilegeService,
githubOrgSync: githubOrgSyncConfigService,
folderCommit: folderCommitService,
secretScanningV2: secretScanningV2Service
});

View File

@@ -262,7 +262,8 @@ export const SanitizedProjectSchema = ProjectsSchema.pick({
kmsCertificateKeyId: true,
auditLogsRetentionDays: true,
hasDeleteProtection: true,
secretSharing: true
secretSharing: true,
showSnapshotsLegacy: true
});
export const SanitizedTagSchema = SecretTagsSchema.pick({

View File

@@ -45,4 +45,37 @@ export const registerGcpConnectionRouter = async (server: FastifyZodProvider) =>
return projects;
}
});
server.route({
method: "GET",
url: `/:connectionId/secret-manager-project-locations`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
querystring: z.object({
projectId: z.string()
}),
response: {
200: z.object({ displayName: z.string(), locationId: z.string() }).array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
params: { connectionId },
query: { projectId }
} = req;
const locations = await server.services.appConnection.gcp.listSecretManagerProjectLocations(
{ connectionId, projectId },
req.permission
);
return locations;
}
});
};

View File

@@ -376,7 +376,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
})
.optional()
.describe(PROJECTS.UPDATE.slug),
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing)
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing),
showSnapshotsLegacy: z.boolean().optional().describe(PROJECTS.UPDATE.showSnapshotsLegacy)
}),
response: {
200: z.object({
@@ -397,7 +398,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
autoCapitalization: req.body.autoCapitalization,
hasDeleteProtection: req.body.hasDeleteProtection,
slug: req.body.slug,
secretSharing: req.body.secretSharing
secretSharing: req.body.secretSharing,
showSnapshotsLegacy: req.body.showSnapshotsLegacy
},
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,

View File

@@ -4,9 +4,11 @@ import {
GroupProjectMembershipsSchema,
GroupsSchema,
ProjectMembershipRole,
ProjectUserMembershipRolesSchema
ProjectUserMembershipRolesSchema,
UsersSchema
} from "@app/db/schemas";
import { ApiDocsTags, PROJECTS } from "@app/lib/api-docs";
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
import { ApiDocsTags, GROUPS, PROJECTS } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@@ -301,4 +303,61 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) =>
return { groupMembership };
}
});
server.route({
method: "GET",
url: "/:projectId/groups/:groupId/users",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.ProjectGroups],
description: "Return project group users",
params: z.object({
projectId: z.string().trim().describe(GROUPS.LIST_USERS.projectId),
groupId: z.string().trim().describe(GROUPS.LIST_USERS.id)
}),
querystring: z.object({
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
}),
response: {
200: z.object({
users: UsersSchema.pick({
email: true,
username: true,
firstName: true,
lastName: true,
id: true
})
.merge(
z.object({
isPartOfGroup: z.boolean(),
joinedGroupAt: z.date().nullable()
})
)
.array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const { users, totalCount } = await server.services.groupProject.listProjectGroupUsers({
id: req.params.groupId,
projectId: req.params.projectId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
return { users, totalCount };
}
});
};

View File

@@ -11,8 +11,10 @@ import { AppConnection } from "../app-connection-enums";
import { GcpConnectionMethod } from "./gcp-connection-enums";
import {
GCPApp,
GCPGetProjectLocationsRes,
GCPGetProjectsRes,
GCPGetServiceRes,
GCPLocation,
TGcpConnection,
TGcpConnectionConfig
} from "./gcp-connection-types";
@@ -145,6 +147,45 @@ export const getGcpSecretManagerProjects = async (appConnection: TGcpConnection)
return projects;
};
export const getGcpSecretManagerProjectLocations = async (projectId: string, appConnection: TGcpConnection) => {
const accessToken = await getGcpConnectionAuthToken(appConnection);
let gcpLocations: GCPLocation[] = [];
const pageSize = 100;
let pageToken: string | undefined;
let hasMorePages = true;
while (hasMorePages) {
const params = new URLSearchParams({
pageSize: String(pageSize),
...(pageToken ? { pageToken } : {})
});
// eslint-disable-next-line no-await-in-loop
const { data } = await request.get<GCPGetProjectLocationsRes>(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${projectId}/locations`,
{
params,
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
}
);
gcpLocations = gcpLocations.concat(data.locations);
if (!data.nextPageToken) {
hasMorePages = false;
}
pageToken = data.nextPageToken;
}
return gcpLocations.sort((a, b) => a.displayName.localeCompare(b.displayName));
};
export const validateGcpConnectionCredentials = async (appConnection: TGcpConnectionConfig) => {
// Check if provided service account email suffix matches organization ID.
// We do this to mitigate confused deputy attacks in multi-tenant instances

View File

@@ -1,8 +1,8 @@
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { getGcpSecretManagerProjects } from "./gcp-connection-fns";
import { TGcpConnection } from "./gcp-connection-types";
import { getGcpSecretManagerProjectLocations, getGcpSecretManagerProjects } from "./gcp-connection-fns";
import { TGcpConnection, TGetGCPProjectLocationsDTO } from "./gcp-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
@@ -23,7 +23,23 @@ export const gcpConnectionService = (getAppConnection: TGetAppConnectionFunc) =>
}
};
const listSecretManagerProjectLocations = async (
{ connectionId, projectId }: TGetGCPProjectLocationsDTO,
actor: OrgServiceActor
) => {
const appConnection = await getAppConnection(AppConnection.GCP, connectionId, actor);
try {
const locations = await getGcpSecretManagerProjectLocations(projectId, appConnection);
return locations;
} catch (error) {
return [];
}
};
return {
listSecretManagerProjects
listSecretManagerProjects,
listSecretManagerProjectLocations
};
};

View File

@@ -38,6 +38,22 @@ export type GCPGetProjectsRes = {
nextPageToken?: string;
};
export type GCPLocation = {
name: string;
locationId: string;
displayName: string;
};
export type GCPGetProjectLocationsRes = {
locations: GCPLocation[];
nextPageToken?: string;
};
export type TGetGCPProjectLocationsDTO = {
projectId: string;
connectionId: string;
};
export type GCPGetServiceRes = {
name: string;
parent: string;

View File

@@ -397,7 +397,7 @@ export const authLoginServiceFactory = ({
// Check if the user actually has access to the specified organization.
const userOrgs = await orgDAL.findAllOrgsByUserId(user.id);
const hasOrganizationMembership = userOrgs.some((org) => org.id === organizationId);
const hasOrganizationMembership = userOrgs.some((org) => org.id === organizationId && org.userStatus !== "invited");
const selectedOrg = await orgDAL.findById(organizationId);
if (!hasOrganizationMembership) {

View File

@@ -10,6 +10,7 @@ import { chunkArray } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { CommitType, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "../kms/kms-service";
import { KmsDataKey } from "../kms/kms-types";
import { TProjectDALFactory } from "../project/project-dal";
@@ -18,6 +19,7 @@ import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TProjectEnvServiceFactory } from "../project-env/project-env-service";
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal";
import { TSecretTagDALFactory } from "../secret-tag/secret-tag-dal";
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
import { fnSecretBulkInsert, getAllSecretReferences } from "../secret-v2-bridge/secret-v2-bridge-fns";
@@ -42,6 +44,8 @@ export type TImportDataIntoInfisicalDTO = {
projectService: Pick<TProjectServiceFactory, "createProject">;
projectEnvService: Pick<TProjectEnvServiceFactory, "createEnvironment">;
secretV2BridgeService: Pick<TSecretV2BridgeServiceFactory, "createManySecret">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "create">;
input: TImportInfisicalDataCreate;
};
@@ -507,6 +511,8 @@ export const importDataIntoInfisicalFn = async ({
secretVersionTagDAL,
folderDAL,
resourceMetadataDAL,
folderVersionDAL,
folderCommitService,
input: { data, actor, actorId, actorOrgId, actorAuthMethod }
}: TImportDataIntoInfisicalDTO) => {
// Import data to infisical
@@ -599,6 +605,36 @@ export const importDataIntoInfisicalFn = async ({
tx
);
const newFolderVersion = await folderVersionDAL.create(
{
name: newFolder.name,
envId: newFolder.envId,
version: newFolder.version,
folderId: newFolder.id
},
tx
);
await folderCommitService.createCommit(
{
actor: {
type: actor,
metadata: {
id: actorId
}
},
message: "Changed by external migration",
folderId: parentEnv.rootFolderId,
changes: [
{
type: CommitType.ADD,
folderVersionId: newFolderVersion.id
}
]
},
tx
);
originalToNewFolderId.set(folder.id, {
folderId: newFolder.id,
projectId: parentEnv.projectId
@@ -772,6 +808,7 @@ export const importDataIntoInfisicalFn = async ({
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
folderCommitService,
actor: {
type: actor,
actorId

View File

@@ -3,6 +3,7 @@ import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "../kms/kms-service";
import { TProjectDALFactory } from "../project/project-dal";
import { TProjectServiceFactory } from "../project/project-service";
@@ -10,6 +11,7 @@ import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TProjectEnvServiceFactory } from "../project-env/project-env-service";
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal";
import { TSecretTagDALFactory } from "../secret-tag/secret-tag-dal";
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
import { TSecretV2BridgeServiceFactory } from "../secret-v2-bridge/secret-v2-bridge-service";
@@ -36,6 +38,8 @@ export type TExternalMigrationQueueFactoryDep = {
projectService: Pick<TProjectServiceFactory, "createProject">;
projectEnvService: Pick<TProjectEnvServiceFactory, "createEnvironment">;
secretV2BridgeService: Pick<TSecretV2BridgeServiceFactory, "createManySecret">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "create">;
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
};
@@ -56,6 +60,8 @@ export const externalMigrationQueueFactory = ({
secretTagDAL,
secretVersionTagDAL,
folderDAL,
folderCommitService,
folderVersionDAL,
resourceMetadataDAL
}: TExternalMigrationQueueFactoryDep) => {
const startImport = async (dto: {
@@ -114,6 +120,8 @@ export const externalMigrationQueueFactory = ({
projectService,
projectEnvService,
secretV2BridgeService,
folderCommitService,
folderVersionDAL,
resourceMetadataDAL
});

View File

@@ -0,0 +1,118 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import {
TableName,
TFolderCheckpointResources,
TFolderCheckpoints,
TSecretFolderVersions,
TSecretVersionsV2
} from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
export type TFolderCheckpointResourcesDALFactory = ReturnType<typeof folderCheckpointResourcesDALFactory>;
export type ResourceWithCheckpointInfo = TFolderCheckpointResources & {
folderCommitId: string;
};
export const folderCheckpointResourcesDALFactory = (db: TDbClient) => {
const folderCheckpointResourcesOrm = ormify(db, TableName.FolderCheckpointResources);
const findByCheckpointId = async (
folderCheckpointId: string,
tx?: Knex
): Promise<
(TFolderCheckpointResources & {
referencedSecretId?: string;
referencedFolderId?: string;
folderName?: string;
folderVersion?: string;
secretKey?: string;
secretVersion?: string;
})[]
> => {
try {
const docs = await (tx || db.replicaNode())<TFolderCheckpointResources>(TableName.FolderCheckpointResources)
.where({ folderCheckpointId })
.leftJoin<TSecretVersionsV2>(
TableName.SecretVersionV2,
`${TableName.FolderCheckpointResources}.secretVersionId`,
`${TableName.SecretVersionV2}.id`
)
.leftJoin<TSecretFolderVersions>(
TableName.SecretFolderVersion,
`${TableName.FolderCheckpointResources}.folderVersionId`,
`${TableName.SecretFolderVersion}.id`
)
.select(selectAllTableCols(TableName.FolderCheckpointResources))
.select(
db.ref("secretId").withSchema(TableName.SecretVersionV2).as("referencedSecretId"),
db.ref("folderId").withSchema(TableName.SecretFolderVersion).as("referencedFolderId"),
db.ref("name").withSchema(TableName.SecretFolderVersion).as("folderName"),
db.ref("version").withSchema(TableName.SecretFolderVersion).as("folderVersion"),
db.ref("key").withSchema(TableName.SecretVersionV2).as("secretKey"),
db.ref("version").withSchema(TableName.SecretVersionV2).as("secretVersion")
);
return docs.map((doc) => ({
...doc,
folderVersion: doc.folderVersion?.toString(),
secretVersion: doc.secretVersion?.toString()
}));
} catch (error) {
throw new DatabaseError({ error, name: "FindByCheckpointId" });
}
};
const findBySecretVersionId = async (secretVersionId: string, tx?: Knex): Promise<ResourceWithCheckpointInfo[]> => {
try {
const docs = await (tx || db.replicaNode())<
TFolderCheckpointResources & Pick<TFolderCheckpoints, "folderCommitId" | "createdAt">
>(TableName.FolderCheckpointResources)
.where({ secretVersionId })
.select(selectAllTableCols(TableName.FolderCheckpointResources))
.join(
TableName.FolderCheckpoint,
`${TableName.FolderCheckpointResources}.folderCheckpointId`,
`${TableName.FolderCheckpoint}.id`
)
.select(
db.ref("folderCommitId").withSchema(TableName.FolderCheckpoint),
db.ref("createdAt").withSchema(TableName.FolderCheckpoint)
);
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "FindBySecretVersionId" });
}
};
const findByFolderVersionId = async (folderVersionId: string, tx?: Knex): Promise<ResourceWithCheckpointInfo[]> => {
try {
const docs = await (tx || db.replicaNode())<
TFolderCheckpointResources & Pick<TFolderCheckpoints, "folderCommitId" | "createdAt">
>(TableName.FolderCheckpointResources)
.where({ folderVersionId })
.select(selectAllTableCols(TableName.FolderCheckpointResources))
.join(
TableName.FolderCheckpoint,
`${TableName.FolderCheckpointResources}.folderCheckpointId`,
`${TableName.FolderCheckpoint}.id`
)
.select(
db.ref("folderCommitId").withSchema(TableName.FolderCheckpoint),
db.ref("createdAt").withSchema(TableName.FolderCheckpoint)
);
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "FindByFolderVersionId" });
}
};
return {
...folderCheckpointResourcesOrm,
findByCheckpointId,
findBySecretVersionId,
findByFolderVersionId
};
};

View File

@@ -0,0 +1,129 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName, TFolderCheckpoints, TFolderCommits } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
export type TFolderCheckpointDALFactory = ReturnType<typeof folderCheckpointDALFactory>;
type CheckpointWithCommitInfo = TFolderCheckpoints & {
actorMetadata: unknown;
actorType: string;
message?: string | null;
commitDate: Date;
folderId: string;
};
export const folderCheckpointDALFactory = (db: TDbClient) => {
const folderCheckpointOrm = ormify(db, TableName.FolderCheckpoint);
const findByCommitId = async (folderCommitId: string, tx?: Knex): Promise<TFolderCheckpoints | undefined> => {
try {
const doc = await (tx || db.replicaNode())<TFolderCheckpoints>(TableName.FolderCheckpoint)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ folderCommitId }, TableName.FolderCheckpoint))
.select(selectAllTableCols(TableName.FolderCheckpoint))
.first();
return doc;
} catch (error) {
throw new DatabaseError({ error, name: "FindByCommitId" });
}
};
const findByFolderId = async (folderId: string, limit?: number, tx?: Knex): Promise<CheckpointWithCommitInfo[]> => {
try {
let query = (tx || db.replicaNode())(TableName.FolderCheckpoint)
.join<TFolderCommits>(
TableName.FolderCommit,
`${TableName.FolderCheckpoint}.folderCommitId`,
`${TableName.FolderCommit}.id`
)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ folderId }, TableName.FolderCommit))
.select(selectAllTableCols(TableName.FolderCheckpoint))
.select(
db.ref("actorMetadata").withSchema(TableName.FolderCommit),
db.ref("actorType").withSchema(TableName.FolderCommit),
db.ref("message").withSchema(TableName.FolderCommit),
db.ref("createdAt").withSchema(TableName.FolderCommit).as("commitDate"),
db.ref("folderId").withSchema(TableName.FolderCommit)
)
.orderBy(`${TableName.FolderCheckpoint}.createdAt`, "desc");
if (limit !== undefined) {
query = query.limit(limit);
}
return await query;
} catch (error) {
throw new DatabaseError({ error, name: "FindByFolderId" });
}
};
const findLatestByFolderId = async (folderId: string, tx?: Knex): Promise<CheckpointWithCommitInfo | undefined> => {
try {
const doc = await (tx || db.replicaNode())(TableName.FolderCheckpoint)
.join<TFolderCommits>(
TableName.FolderCommit,
`${TableName.FolderCheckpoint}.folderCommitId`,
`${TableName.FolderCommit}.id`
)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ folderId }, TableName.FolderCommit))
.select(selectAllTableCols(TableName.FolderCheckpoint))
.select(
db.ref("actorMetadata").withSchema(TableName.FolderCommit),
db.ref("actorType").withSchema(TableName.FolderCommit),
db.ref("message").withSchema(TableName.FolderCommit),
db.ref("createdAt").withSchema(TableName.FolderCommit).as("commitDate"),
db.ref("folderId").withSchema(TableName.FolderCommit)
)
.orderBy(`${TableName.FolderCheckpoint}.createdAt`, "desc")
.first();
return doc;
} catch (error) {
throw new DatabaseError({ error, name: "FindLatestByFolderId" });
}
};
const findNearestCheckpoint = async (
folderCommitId: bigint,
folderId: string,
tx?: Knex
): Promise<(CheckpointWithCommitInfo & { commitId: bigint }) | undefined> => {
try {
// Get the checkpoint with the highest commitId that's still less than or equal to our commit
const nearestCheckpoint = await (tx || db.replicaNode())(TableName.FolderCheckpoint)
.join<TFolderCommits>(
TableName.FolderCommit,
`${TableName.FolderCheckpoint}.folderCommitId`,
`${TableName.FolderCommit}.id`
)
.where(`${TableName.FolderCommit}.folderId`, "=", folderId)
.where(`${TableName.FolderCommit}.commitId`, "<=", folderCommitId.toString())
.select(selectAllTableCols(TableName.FolderCheckpoint))
.select(
db.ref("actorMetadata").withSchema(TableName.FolderCommit),
db.ref("actorType").withSchema(TableName.FolderCommit),
db.ref("message").withSchema(TableName.FolderCommit),
db.ref("commitId").withSchema(TableName.FolderCommit),
db.ref("createdAt").withSchema(TableName.FolderCommit).as("commitDate"),
db.ref("folderId").withSchema(TableName.FolderCommit)
)
.orderBy(`${TableName.FolderCommit}.commitId`, "desc")
.first();
return nearestCheckpoint;
} catch (error) {
throw new DatabaseError({ error, name: "FindNearestCheckpoint" });
}
};
return {
...folderCheckpointOrm,
findByCommitId,
findByFolderId,
findLatestByFolderId,
findNearestCheckpoint
};
};

View File

@@ -0,0 +1,233 @@
/* eslint-disable @typescript-eslint/no-misused-promises */
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import {
TableName,
TFolderCommitChanges,
TFolderCommits,
TProjectEnvironments,
TSecretFolderVersions,
TSecretVersionsV2
} from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
export type TFolderCommitChangesDALFactory = ReturnType<typeof folderCommitChangesDALFactory>;
// Base type with common fields
type BaseCommitChangeInfo = TFolderCommitChanges & {
actorMetadata: unknown;
actorType: string;
message?: string | null;
folderId: string;
createdAt: Date;
};
// Secret-specific change
export type SecretCommitChange = BaseCommitChangeInfo & {
resourceType: "secret";
secretKey: string;
changeType: string;
secretVersionId?: string | null;
secretVersion: string;
secretId: string;
versions?: {
secretKey: string;
secretComment: string;
skipMultilineEncoding?: boolean | null;
secretReminderRepeatDays?: number | null;
secretReminderNote?: string | null;
metadata?: unknown;
tags?: string[] | null;
secretReminderRecipients?: string[] | null;
secretValue: string;
}[];
};
// Folder-specific change
export type FolderCommitChange = BaseCommitChangeInfo & {
resourceType: "folder";
folderName: string;
folderVersion: string;
folderChangeId: string;
versions?: {
version: string;
name?: string;
}[];
};
// Discriminated union
export type CommitChangeWithCommitInfo = SecretCommitChange | FolderCommitChange;
// Type guards
export const isSecretCommitChange = (change: CommitChangeWithCommitInfo): change is SecretCommitChange =>
change.resourceType === "secret";
export const isFolderCommitChange = (change: CommitChangeWithCommitInfo): change is FolderCommitChange =>
change.resourceType === "folder";
export const folderCommitChangesDALFactory = (db: TDbClient) => {
const folderCommitChangesOrm = ormify(db, TableName.FolderCommitChanges);
const findByCommitId = async (
folderCommitId: string,
projectId: string,
tx?: Knex
): Promise<CommitChangeWithCommitInfo[]> => {
try {
const docs = await (tx || db.replicaNode())<TFolderCommitChanges>(TableName.FolderCommitChanges)
.where(buildFindFilter({ folderCommitId }, TableName.FolderCommitChanges))
.leftJoin<TFolderCommits>(
TableName.FolderCommit,
`${TableName.FolderCommitChanges}.folderCommitId`,
`${TableName.FolderCommit}.id`
)
.leftJoin<TSecretVersionsV2>(
TableName.SecretVersionV2,
`${TableName.FolderCommitChanges}.secretVersionId`,
`${TableName.SecretVersionV2}.id`
)
.leftJoin<TSecretFolderVersions>(
TableName.SecretFolderVersion,
`${TableName.FolderCommitChanges}.folderVersionId`,
`${TableName.SecretFolderVersion}.id`
)
.leftJoin<TProjectEnvironments>(
TableName.Environment,
`${TableName.FolderCommit}.envId`,
`${TableName.Environment}.id`
)
.where((qb) => {
if (projectId) {
void qb.where(`${TableName.Environment}.projectId`, "=", projectId);
}
})
.select(selectAllTableCols(TableName.FolderCommitChanges))
.select(
db.ref("name").withSchema(TableName.SecretFolderVersion).as("folderName"),
db.ref("folderId").withSchema(TableName.SecretFolderVersion).as("folderChangeId"),
db.ref("version").withSchema(TableName.SecretFolderVersion).as("folderVersion"),
db.ref("key").withSchema(TableName.SecretVersionV2).as("secretKey"),
db.ref("version").withSchema(TableName.SecretVersionV2).as("secretVersion"),
db.ref("secretId").withSchema(TableName.SecretVersionV2),
db.ref("actorMetadata").withSchema(TableName.FolderCommit),
db.ref("actorType").withSchema(TableName.FolderCommit),
db.ref("message").withSchema(TableName.FolderCommit),
db.ref("createdAt").withSchema(TableName.FolderCommit),
db.ref("folderId").withSchema(TableName.FolderCommit)
);
return docs.map((doc) => {
// Determine if this is a secret or folder change based on populated fields
if (doc.secretKey && doc.secretVersion && doc.secretId) {
return {
...doc,
resourceType: "secret",
secretKey: doc.secretKey,
secretVersion: doc.secretVersion.toString(),
secretId: doc.secretId
} as SecretCommitChange;
}
return {
...doc,
resourceType: "folder",
folderName: doc.folderName,
folderVersion: doc.folderVersion.toString(),
folderChangeId: doc.folderChangeId
} as FolderCommitChange;
});
} catch (error) {
throw new DatabaseError({ error, name: "FindByCommitId" });
}
};
const findBySecretVersionId = async (secretVersionId: string, tx?: Knex): Promise<SecretCommitChange[]> => {
try {
const docs = await (tx || db.replicaNode())<
TFolderCommitChanges &
Pick<TFolderCommits, "actorMetadata" | "actorType" | "message" | "createdAt" | "folderId">
>(TableName.FolderCommitChanges)
.where(buildFindFilter({ secretVersionId }, TableName.FolderCommitChanges))
.select(selectAllTableCols(TableName.FolderCommitChanges))
.join(TableName.FolderCommit, `${TableName.FolderCommitChanges}.folderCommitId`, `${TableName.FolderCommit}.id`)
.leftJoin<TSecretVersionsV2>(
TableName.SecretVersionV2,
`${TableName.FolderCommitChanges}.secretVersionId`,
`${TableName.SecretVersionV2}.id`
)
.select(
db.ref("actorMetadata").withSchema(TableName.FolderCommit),
db.ref("actorType").withSchema(TableName.FolderCommit),
db.ref("message").withSchema(TableName.FolderCommit),
db.ref("createdAt").withSchema(TableName.FolderCommit),
db.ref("folderId").withSchema(TableName.FolderCommit),
db.ref("key").withSchema(TableName.SecretVersionV2).as("secretKey"),
db.ref("version").withSchema(TableName.SecretVersionV2).as("secretVersion"),
db.ref("secretId").withSchema(TableName.SecretVersionV2)
);
return docs
.filter((doc) => doc.secretKey && doc.secretVersion && doc.secretId)
.map(
(doc): SecretCommitChange => ({
...doc,
resourceType: "secret",
secretKey: doc.secretKey,
secretVersion: doc.secretVersion.toString(),
secretId: doc.secretId
})
);
} catch (error) {
throw new DatabaseError({ error, name: "FindBySecretVersionId" });
}
};
const findByFolderVersionId = async (folderVersionId: string, tx?: Knex): Promise<FolderCommitChange[]> => {
try {
const docs = await (tx || db.replicaNode())<
TFolderCommitChanges &
Pick<TFolderCommits, "actorMetadata" | "actorType" | "message" | "createdAt" | "folderId">
>(TableName.FolderCommitChanges)
.where(buildFindFilter({ folderVersionId }, TableName.FolderCommitChanges))
.select(selectAllTableCols(TableName.FolderCommitChanges))
.join(TableName.FolderCommit, `${TableName.FolderCommitChanges}.folderCommitId`, `${TableName.FolderCommit}.id`)
.leftJoin<TSecretFolderVersions>(
TableName.SecretFolderVersion,
`${TableName.FolderCommitChanges}.folderVersionId`,
`${TableName.SecretFolderVersion}.id`
)
.select(
db.ref("actorMetadata").withSchema(TableName.FolderCommit),
db.ref("actorType").withSchema(TableName.FolderCommit),
db.ref("message").withSchema(TableName.FolderCommit),
db.ref("createdAt").withSchema(TableName.FolderCommit),
db.ref("folderId").withSchema(TableName.FolderCommit),
db.ref("name").withSchema(TableName.SecretFolderVersion).as("folderName"),
db.ref("folderId").withSchema(TableName.SecretFolderVersion).as("folderChangeId"),
db.ref("version").withSchema(TableName.SecretFolderVersion).as("folderVersion")
);
return docs
.filter((doc) => doc.folderName && doc.folderVersion && doc.folderChangeId)
.map(
(doc): FolderCommitChange => ({
...doc,
resourceType: "folder",
folderName: doc.folderName,
folderVersion: doc.folderVersion!.toString(),
folderChangeId: doc.folderChangeId
})
);
} catch (error) {
throw new DatabaseError({ error, name: "FindByFolderVersionId" });
}
};
return {
...folderCommitChangesOrm,
findByCommitId,
findBySecretVersionId,
findByFolderVersionId
};
};

View File

@@ -0,0 +1,513 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import {
TableName,
TFolderCommitChanges,
TFolderCommits,
TProjectEnvironments,
TSecretFolderVersions,
TSecretVersionsV2
} from "@app/db/schemas";
import { DatabaseError, NotFoundError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
export type TFolderCommitDALFactory = ReturnType<typeof folderCommitDALFactory>;
export const folderCommitDALFactory = (db: TDbClient) => {
const folderCommitOrm = ormify(db, TableName.FolderCommit);
const { delete: deleteOp, deleteById, ...restOfOrm } = folderCommitOrm;
const findByFolderId = async (folderId: string, tx?: Knex): Promise<TFolderCommits[]> => {
try {
const trx = tx || db.replicaNode();
// First, get all folder commits
const folderCommits = await trx(TableName.FolderCommit)
.where({ folderId })
.select("*")
.orderBy("createdAt", "desc");
if (folderCommits.length === 0) return [];
// Get all commit IDs
const commitIds = folderCommits.map((commit) => commit.id);
// Then get all related changes
const changes = await trx(TableName.FolderCommitChanges).whereIn("folderCommitId", commitIds).select("*");
const changesMap = changes.reduce(
(acc, change) => {
const { folderCommitId } = change;
if (!acc[folderCommitId]) acc[folderCommitId] = [];
acc[folderCommitId].push(change);
return acc;
},
{} as Record<string, TFolderCommitChanges[]>
);
return folderCommits.map((commit) => ({
...commit,
changes: changesMap[commit.id] || []
}));
} catch (error) {
throw new DatabaseError({ error, name: "FindByFolderId" });
}
};
const findLatestCommit = async (
folderId: string,
projectId?: string,
tx?: Knex
): Promise<TFolderCommits | undefined> => {
try {
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
.where({ folderId })
.leftJoin(TableName.Environment, `${TableName.FolderCommit}.envId`, `${TableName.Environment}.id`)
.where((qb) => {
if (projectId) {
void qb.where(`${TableName.Environment}.projectId`, "=", projectId);
}
})
.select(selectAllTableCols(TableName.FolderCommit))
.orderBy("commitId", "desc")
.first();
return doc;
} catch (error) {
throw new DatabaseError({ error, name: "FindLatestCommit" });
}
};
const findLatestCommitByFolderIds = async (folderIds: string[], tx?: Knex): Promise<TFolderCommits[] | undefined> => {
try {
// First get max commitId for each folderId
const maxCommitIdSubquery = (tx || db.replicaNode())(TableName.FolderCommit)
.select("folderId")
.max("commitId as maxCommitId")
.whereIn("folderId", folderIds)
.groupBy("folderId");
// Join with main table to get complete records for each max commitId
const docs = await (tx || db.replicaNode())(TableName.FolderCommit)
.select(selectAllTableCols(TableName.FolderCommit))
// eslint-disable-next-line func-names
.join<TFolderCommits>(maxCommitIdSubquery.as("latest"), function () {
this.on(`${TableName.FolderCommit}.folderId`, "=", "latest.folderId").andOn(
`${TableName.FolderCommit}.commitId`,
"=",
"latest.maxCommitId"
);
});
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "FindLatestCommitByFolderIds" });
}
};
const findLatestEnvCommit = async (envId: string, tx?: Knex): Promise<TFolderCommits | undefined> => {
try {
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
.where(`${TableName.FolderCommit}.envId`, "=", envId)
.select(selectAllTableCols(TableName.FolderCommit))
.orderBy("commitId", "desc")
.first();
return doc;
} catch (error) {
throw new DatabaseError({ error, name: "FindLatestCommit" });
}
};
const findMultipleLatestCommits = async (folderIds: string[], tx?: Knex): Promise<TFolderCommits[]> => {
try {
const knexInstance = tx || db.replicaNode();
// Get the latest commitId for each folderId
const subquery = knexInstance(TableName.FolderCommit)
.whereIn("folderId", folderIds)
.groupBy("folderId")
.select("folderId")
.max("commitId as maxCommitId");
// Then fetch the complete rows matching those latest commits
const docs = await knexInstance(TableName.FolderCommit)
// eslint-disable-next-line func-names
.innerJoin<TFolderCommits>(subquery.as("latest"), function () {
this.on(`${TableName.FolderCommit}.folderId`, "=", "latest.folderId").andOn(
`${TableName.FolderCommit}.commitId`,
"=",
"latest.maxCommitId"
);
})
.select(selectAllTableCols(TableName.FolderCommit));
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "FindMultipleLatestCommits" });
}
};
const getNumberOfCommitsSince = async (folderId: string, folderCommitId: string, tx?: Knex): Promise<number> => {
try {
const referencedCommit = await (tx || db.replicaNode())(TableName.FolderCommit)
.where({ id: folderCommitId })
.select("commitId")
.first();
if (referencedCommit?.commitId) {
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
.where({ folderId })
.where("commitId", ">", referencedCommit.commitId)
.count();
return Number(doc?.[0].count);
}
return 0;
} catch (error) {
throw new DatabaseError({ error, name: "getNumberOfCommitsSince" });
}
};
const getEnvNumberOfCommitsSince = async (envId: string, folderCommitId: string, tx?: Knex): Promise<number> => {
try {
const referencedCommit = await (tx || db.replicaNode())(TableName.FolderCommit)
.where({ id: folderCommitId })
.select("commitId")
.first();
if (referencedCommit?.commitId) {
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
.where(`${TableName.FolderCommit}.envId`, "=", envId)
.where("commitId", ">", referencedCommit.commitId)
.count();
return Number(doc?.[0].count);
}
return 0;
} catch (error) {
throw new DatabaseError({ error, name: "getNumberOfCommitsSince" });
}
};
const findCommitsToRecreate = async (
folderId: string,
targetCommitNumber: bigint,
checkpointCommitNumber: bigint,
tx?: Knex
): Promise<
(TFolderCommits & {
changes: (TFolderCommitChanges & {
referencedSecretId?: string;
referencedFolderId?: string;
folderName?: string;
folderVersion?: string;
secretKey?: string;
secretVersion?: string;
})[];
})[]
> => {
try {
// First get all the commits in the range
const commits = await (tx || db.replicaNode())(TableName.FolderCommit)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ folderId }, TableName.FolderCommit))
.andWhere(`${TableName.FolderCommit}.commitId`, ">", checkpointCommitNumber.toString())
.andWhere(`${TableName.FolderCommit}.commitId`, "<=", targetCommitNumber.toString())
.select(selectAllTableCols(TableName.FolderCommit))
.orderBy(`${TableName.FolderCommit}.commitId`, "asc");
// If no commits found, return empty array
if (!commits.length) {
return [];
}
// Get all the commit IDs
const commitIds = commits.map((commit) => commit.id);
// Get all changes for these commits in a single query
const allChanges = await (tx || db.replicaNode())(TableName.FolderCommitChanges)
.whereIn(`${TableName.FolderCommitChanges}.folderCommitId`, commitIds)
.leftJoin<TSecretVersionsV2>(
TableName.SecretVersionV2,
`${TableName.FolderCommitChanges}.secretVersionId`,
`${TableName.SecretVersionV2}.id`
)
.leftJoin<TSecretFolderVersions>(
TableName.SecretFolderVersion,
`${TableName.FolderCommitChanges}.folderVersionId`,
`${TableName.SecretFolderVersion}.id`
)
.select(selectAllTableCols(TableName.FolderCommitChanges))
.select(
db.ref("secretId").withSchema(TableName.SecretVersionV2).as("referencedSecretId"),
db.ref("folderId").withSchema(TableName.SecretFolderVersion).as("referencedFolderId"),
db.ref("name").withSchema(TableName.SecretFolderVersion).as("folderName"),
db.ref("version").withSchema(TableName.SecretFolderVersion).as("folderVersion"),
db.ref("key").withSchema(TableName.SecretVersionV2).as("secretKey"),
db.ref("version").withSchema(TableName.SecretVersionV2).as("secretVersion")
);
// Organize changes by commit ID
const changesByCommitId = allChanges.reduce(
(acc, change) => {
if (!acc[change.folderCommitId]) {
acc[change.folderCommitId] = [];
}
acc[change.folderCommitId].push(change);
return acc;
},
{} as Record<string, TFolderCommitChanges[]>
);
// Attach changes to each commit
return commits.map((commit) => ({
...commit,
changes: changesByCommitId[commit.id] || []
}));
} catch (error) {
throw new DatabaseError({ error, name: "FindCommitsToRecreate" });
}
};
const findLatestCommitBetween = async ({
folderId,
startCommitId,
endCommitId,
tx
}: {
folderId: string;
startCommitId?: string;
endCommitId: string;
tx?: Knex;
}): Promise<TFolderCommits | undefined> => {
try {
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
.where("commitId", "<=", endCommitId)
.where({ folderId })
.where((qb) => {
if (startCommitId) {
void qb.where("commitId", ">=", startCommitId);
}
})
.select(selectAllTableCols(TableName.FolderCommit))
.orderBy("commitId", "desc")
.first();
return doc;
} catch (error) {
throw new DatabaseError({ error, name: "FindLatestCommitBetween" });
}
};
const findAllCommitsBetween = async ({
envId,
startCommitId,
endCommitId,
tx
}: {
envId?: string;
startCommitId?: string;
endCommitId?: string;
tx?: Knex;
}): Promise<TFolderCommits[]> => {
try {
const docs = await (tx || db.replicaNode())(TableName.FolderCommit)
.where((qb) => {
if (envId) {
void qb.where(`${TableName.FolderCommit}.envId`, "=", envId);
}
if (startCommitId) {
void qb.where("commitId", ">=", startCommitId);
}
if (endCommitId) {
void qb.where("commitId", "<=", endCommitId);
}
})
.select(selectAllTableCols(TableName.FolderCommit))
.orderBy("commitId", "desc");
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "FindLatestCommitBetween" });
}
};
const findAllFolderCommitsAfter = async ({
envId,
startCommitId,
tx
}: {
envId?: string;
startCommitId?: string;
tx?: Knex;
}): Promise<TFolderCommits[]> => {
try {
const docs = await (tx || db.replicaNode())(TableName.FolderCommit)
.where((qb) => {
if (envId) {
void qb.where(`${TableName.FolderCommit}.envId`, "=", envId);
}
if (startCommitId) {
void qb.where("commitId", ">=", startCommitId);
}
})
.select(selectAllTableCols(TableName.FolderCommit))
.orderBy("commitId", "desc");
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "FindLatestCommitBetween" });
}
};
const findPreviousCommitTo = async (
folderId: string,
commitId: string,
tx?: Knex
): Promise<TFolderCommits | undefined> => {
try {
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
.where({ folderId })
.where("commitId", "<=", commitId)
.select(selectAllTableCols(TableName.FolderCommit))
.orderBy("commitId", "desc")
.first();
return doc;
} catch (error) {
throw new DatabaseError({ error, name: "FindPreviousCommitTo" });
}
};
const findById = async (id: string, tx?: Knex, projectId?: string): Promise<TFolderCommits> => {
try {
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ id }, TableName.FolderCommit))
.leftJoin<TProjectEnvironments>(
TableName.Environment,
`${TableName.FolderCommit}.envId`,
`${TableName.Environment}.id`
)
.where((qb) => {
if (projectId) {
void qb.where(`${TableName.Environment}.projectId`, "=", projectId);
}
})
.select(selectAllTableCols(TableName.FolderCommit))
.orderBy("commitId", "desc")
.first();
if (!doc) {
throw new NotFoundError({
message: `Folder commit not found for ID ${id}`
});
}
return doc;
} catch (error) {
throw new DatabaseError({ error, name: "FindById" });
}
};
const findByFolderIdPaginated = async (
folderId: string,
options: {
offset?: number;
limit?: number;
search?: string;
sort?: "asc" | "desc";
} = {},
tx?: Knex
): Promise<{
commits: TFolderCommits[];
total: number;
hasMore: boolean;
}> => {
try {
const { offset = 0, limit = 20, search, sort = "desc" } = options;
const trx = tx || db.replicaNode();
// Build base query
let baseQuery = trx(TableName.FolderCommit).where({ folderId });
// Add search functionality
if (search) {
baseQuery = baseQuery.where((qb) => {
void qb.whereILike("message", `%${search}%`);
});
}
// Get total count
const totalResult = await baseQuery.clone().count("*", { as: "count" }).first();
const total = Number(totalResult?.count || 0);
// Get paginated commits
const folderCommits = await baseQuery.select("*").orderBy("createdAt", sort).limit(limit).offset(offset);
if (folderCommits.length === 0) {
return { commits: [], total, hasMore: false };
}
// Get all commit IDs for changes
const commitIds = folderCommits.map((commit) => commit.id);
// Get all related changes
const changes = await trx(TableName.FolderCommitChanges).whereIn("folderCommitId", commitIds).select("*");
const changesMap = changes.reduce(
(acc, change) => {
const { folderCommitId } = change;
if (!acc[folderCommitId]) acc[folderCommitId] = [];
acc[folderCommitId].push(change);
return acc;
},
{} as Record<string, TFolderCommitChanges[]>
);
const commitsWithChanges = folderCommits.map((commit) => ({
...commit,
changes: changesMap[commit.id] || []
}));
const hasMore = offset + limit < total;
return {
commits: commitsWithChanges,
total,
hasMore
};
} catch (error) {
throw new DatabaseError({ error, name: "FindByFolderIdPaginated" });
}
};
const findCommitBefore = async (
folderId: string,
commitId: bigint,
tx?: Knex
): Promise<TFolderCommits | undefined> => {
try {
const doc = await (tx || db.replicaNode())(TableName.FolderCommit)
.where({ folderId })
.where("commitId", "<", commitId.toString())
.select(selectAllTableCols(TableName.FolderCommit))
.orderBy("commitId", "desc")
.first();
return doc;
} catch (error) {
throw new DatabaseError({ error, name: "FindCommitBefore" });
}
};
return {
...restOfOrm,
findByFolderId,
findLatestCommit,
getNumberOfCommitsSince,
findCommitsToRecreate,
findMultipleLatestCommits,
findAllCommitsBetween,
findLatestCommitBetween,
findLatestEnvCommit,
getEnvNumberOfCommitsSince,
findLatestCommitByFolderIds,
findAllFolderCommitsAfter,
findPreviousCommitTo,
findById,
findByFolderIdPaginated,
findCommitBefore
};
};

View File

@@ -0,0 +1,282 @@
import { Knex } from "knex";
import { TSecretFolders } from "@app/db/schemas";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TFolderTreeCheckpointDALFactory } from "../folder-tree-checkpoint/folder-tree-checkpoint-dal";
import { TFolderTreeCheckpointResourcesDALFactory } from "../folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TFolderCommitDALFactory } from "./folder-commit-dal";
// Define types for job data
type TCreateFolderTreeCheckpointDTO = {
envId: string;
failedToAcquireLockCount?: number;
folderCommitId?: string;
};
type TFolderCommitQueueServiceFactoryDep = {
queueService: TQueueServiceFactory;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem" | "deleteItem">;
folderTreeCheckpointDAL: Pick<
TFolderTreeCheckpointDALFactory,
"create" | "findLatestByEnvId" | "findNearestCheckpoint"
>;
folderTreeCheckpointResourcesDAL: Pick<
TFolderTreeCheckpointResourcesDALFactory,
"insertMany" | "findByTreeCheckpointId"
>;
folderCommitDAL: Pick<
TFolderCommitDALFactory,
"findLatestEnvCommit" | "getEnvNumberOfCommitsSince" | "findMultipleLatestCommits" | "findById"
>;
folderDAL: Pick<TSecretFolderDALFactory, "findByEnvId">;
};
export type TFolderCommitQueueServiceFactory = ReturnType<typeof folderCommitQueueServiceFactory>;
export const folderCommitQueueServiceFactory = ({
queueService,
keyStore,
folderTreeCheckpointDAL,
folderTreeCheckpointResourcesDAL,
folderCommitDAL,
folderDAL
}: TFolderCommitQueueServiceFactoryDep) => {
const appCfg = getConfig();
// Helper function to calculate delay for requeuing
const getRequeueDelay = (failureCount?: number) => {
if (!failureCount) return 0;
const baseDelay = 5000;
const maxDelay = 30000;
const delay = Math.min(baseDelay * 2 ** failureCount, maxDelay);
const jitter = delay * (0.5 + Math.random() * 0.5);
return jitter;
};
const scheduleTreeCheckpoint = async (payload: TCreateFolderTreeCheckpointDTO) => {
const { envId, failedToAcquireLockCount = 0 } = payload;
// Create a unique jobId for each retry to prevent conflicts
const jobId =
failedToAcquireLockCount > 0 ? `${envId}-retry-${failedToAcquireLockCount}-${Date.now()}` : `${envId}`;
await queueService.queue(QueueName.FolderTreeCheckpoint, QueueJobs.CreateFolderTreeCheckpoint, payload, {
jobId,
delay: getRequeueDelay(failedToAcquireLockCount),
backoff: {
type: "exponential",
delay: 3000
},
removeOnFail: {
count: 3
},
removeOnComplete: true
});
};
// Sort folders by hierarchy (copied from the source code)
const sortFoldersByHierarchy = (folders: TSecretFolders[]) => {
const childrenMap = new Map<string, TSecretFolders[]>();
const allFolderIds = new Set<string>();
folders.forEach((folder) => {
if (folder.id) allFolderIds.add(folder.id);
});
folders.forEach((folder) => {
if (folder.parentId) {
const children = childrenMap.get(folder.parentId) || [];
children.push(folder);
childrenMap.set(folder.parentId, children);
}
});
const rootFolders = folders.filter((folder) => !folder.parentId || !allFolderIds.has(folder.parentId));
const result = [];
let currentLevel = rootFolders;
while (currentLevel.length > 0) {
result.push(...currentLevel);
const nextLevel = [];
for (const folder of currentLevel) {
if (folder.id) {
const children = childrenMap.get(folder.id) || [];
nextLevel.push(...children);
}
}
currentLevel = nextLevel;
}
return result;
};
const createFolderTreeCheckpoint = async (jobData: TCreateFolderTreeCheckpointDTO, tx?: Knex) => {
const { envId, folderCommitId, failedToAcquireLockCount = 0 } = jobData;
logger.info(`Folder tree checkpoint creation started [envId=${envId}] [attempt=${failedToAcquireLockCount + 1}]`);
// First, try to clear any stale locks before attempting to acquire
if (failedToAcquireLockCount > 1) {
try {
await keyStore.deleteItem(KeyStorePrefixes.FolderTreeCheckpoint(envId));
logger.info(`Cleared potential stale lock for envId ${envId} before attempt ${failedToAcquireLockCount + 1}`);
} catch (error) {
// This is fine if it fails, we'll still try to acquire the lock
logger.info(`No stale lock found for envId ${envId}`);
}
}
let lock: Awaited<ReturnType<typeof keyStore.acquireLock>> | undefined;
try {
// Attempt to acquire the lock with a shorter timeout for first attempts
const timeout = failedToAcquireLockCount > 3 ? 60 * 1000 : 15 * 1000;
logger.info(`Attempting to acquire lock for envId=${envId} with timeout ${timeout}ms`);
lock = await keyStore.acquireLock([KeyStorePrefixes.FolderTreeCheckpoint(envId)], timeout);
logger.info(`Successfully acquired lock for envId=${envId}`);
} catch (e) {
logger.info(
`Failed to acquire lock for folder tree checkpoint [envId=${envId}] [attempt=${failedToAcquireLockCount + 1}]`
);
// Requeue with incremented failure count if under max attempts
if (failedToAcquireLockCount < 10) {
// Force a delay between retries
const nextRetryCount = failedToAcquireLockCount + 1;
logger.info(`Scheduling retry #${nextRetryCount} for folder tree checkpoint [envId=${envId}]`);
// Create a new job with incremented counter
await scheduleTreeCheckpoint({
envId,
folderCommitId,
failedToAcquireLockCount: nextRetryCount
});
} else {
// Max retries reached
logger.error(`Maximum lock acquisition attempts (10) reached for envId ${envId}. Giving up.`);
// Try to force-clear the lock for next time
try {
await keyStore.deleteItem(KeyStorePrefixes.FolderTreeCheckpoint(envId));
} catch (clearError) {
logger.error(clearError, `Failed to clear lock after maximum retries for envId=${envId}`);
}
}
return;
}
if (!lock) {
logger.error(`Lock is undefined after acquisition for envId=${envId}. This should never happen.`);
return;
}
try {
logger.info(`Processing tree checkpoint data for envId=${envId}`);
const latestTreeCheckpoint = await folderTreeCheckpointDAL.findLatestByEnvId(envId, tx);
let latestCommit;
if (folderCommitId) {
latestCommit = await folderCommitDAL.findById(folderCommitId, tx);
} else {
latestCommit = await folderCommitDAL.findLatestEnvCommit(envId, tx);
}
if (!latestCommit) {
logger.info(`Latest commit ID not found for envId ${envId}`);
return;
}
const latestCommitId = latestCommit.id;
if (latestTreeCheckpoint) {
const commitsSinceLastCheckpoint = await folderCommitDAL.getEnvNumberOfCommitsSince(
envId,
latestTreeCheckpoint.folderCommitId,
tx
);
if (commitsSinceLastCheckpoint < Number(appCfg.PIT_TREE_CHECKPOINT_WINDOW)) {
logger.info(
`Commits since last checkpoint ${commitsSinceLastCheckpoint} is less than ${appCfg.PIT_TREE_CHECKPOINT_WINDOW}`
);
return;
}
}
const folders = await folderDAL.findByEnvId(envId, tx);
const sortedFolders = sortFoldersByHierarchy(folders);
const filteredFoldersIds = sortedFolders.filter((folder) => !folder.isReserved).map((folder) => folder.id);
const folderCommits = await folderCommitDAL.findMultipleLatestCommits(filteredFoldersIds, tx);
const folderTreeCheckpoint = await folderTreeCheckpointDAL.create(
{
folderCommitId: latestCommitId
},
tx
);
await folderTreeCheckpointResourcesDAL.insertMany(
folderCommits.map((folderCommit) => ({
folderTreeCheckpointId: folderTreeCheckpoint.id,
folderId: folderCommit.folderId,
folderCommitId: folderCommit.id
})),
tx
);
logger.info(`Folder tree checkpoint created successfully: ${folderTreeCheckpoint.id}`);
} catch (error) {
logger.error(error, `Error processing folder tree checkpoint [envId=${envId}]`);
throw error;
} finally {
// Always release the lock
try {
if (lock) {
await lock.release();
logger.info(`Released lock for folder tree checkpoint [envId=${envId}]`);
} else {
logger.error(`No lock to release for envId=${envId}. This should never happen.`);
}
} catch (releaseError) {
logger.error(releaseError, `Error releasing lock for folder tree checkpoint [envId=${envId}]`);
// Try to force delete the lock if release fails
try {
await keyStore.deleteItem(KeyStorePrefixes.FolderTreeCheckpoint(envId));
logger.info(`Force deleted lock after release failure for envId=${envId}`);
} catch (deleteError) {
logger.error(deleteError, `Failed to force delete lock after release failure for envId=${envId}`);
}
}
}
};
queueService.start(QueueName.FolderTreeCheckpoint, async (job) => {
try {
if (job.name === QueueJobs.CreateFolderTreeCheckpoint) {
const jobData = job.data as TCreateFolderTreeCheckpointDTO;
await createFolderTreeCheckpoint(jobData);
}
} catch (error) {
logger.error(error, "Error creating folder tree checkpoint:");
throw error;
}
});
return {
scheduleTreeCheckpoint: (envId: string) => scheduleTreeCheckpoint({ envId }),
createFolderTreeCheckpoint: (envId: string, folderCommitId?: string, tx?: Knex) =>
createFolderTreeCheckpoint({ envId, folderCommitId }, tx)
};
};

View File

@@ -0,0 +1,143 @@
import { z } from "zod";
// Base schema shared by both secret and folder changes
const baseChangeSchema = z.object({
id: z.string(),
folderCommitId: z.string(),
changeType: z.string(),
isUpdate: z.boolean().optional(),
createdAt: z.union([z.string(), z.date()]),
updatedAt: z.union([z.string(), z.date()]),
actorMetadata: z
.union([
z.object({
id: z.string().optional(),
name: z.string().optional()
}),
z.unknown()
])
.optional(),
actorType: z.string(),
message: z.string().nullable().optional(),
folderId: z.string()
});
// Secret-specific versions schema
const secretVersionSchema = z.object({
secretKey: z.string(),
secretComment: z.string(),
skipMultilineEncoding: z.boolean().nullable().optional(),
tags: z.array(z.string()).nullable().optional(),
metadata: z.unknown().nullable().optional(),
secretValue: z.string()
});
// Folder-specific versions schema
const folderVersionSchema = z.object({
version: z.string().optional(),
name: z.string().optional(),
description: z.string().optional().nullable()
});
// Secret commit change schema
const secretCommitChangeSchema = baseChangeSchema.extend({
resourceType: z.literal("secret"),
secretVersionId: z.string().optional().nullable(),
secretKey: z.string(),
secretVersion: z.union([z.string(), z.number()]),
secretId: z.string(),
versions: z.array(secretVersionSchema).optional()
});
// Folder commit change schema
const folderCommitChangeSchema = baseChangeSchema.extend({
resourceType: z.literal("folder"),
folderVersionId: z.string().optional().nullable(),
folderName: z.string(),
folderChangeId: z.string(),
folderVersion: z.union([z.string(), z.number()]),
versions: z.array(folderVersionSchema).optional()
});
// Discriminated union for commit changes
export const commitChangeSchema = z.discriminatedUnion("resourceType", [
secretCommitChangeSchema,
folderCommitChangeSchema
]);
// Commit schema
const commitSchema = z.object({
id: z.string(),
commitId: z.string(),
actorMetadata: z
.union([
z.object({
id: z.string().optional(),
name: z.string().optional()
}),
z.unknown()
])
.optional(),
actorType: z.string(),
message: z.string().nullable().optional(),
folderId: z.string(),
envId: z.string(),
createdAt: z.union([z.string(), z.date()]),
updatedAt: z.union([z.string(), z.date()]),
isLatest: z.boolean().default(false),
changes: z.array(commitChangeSchema).optional()
});
// Response schema
export const commitChangesResponseSchema = z.object({
changes: commitSchema
});
// Base resource change schema for comparison results
const baseResourceChangeSchema = z.object({
id: z.string(),
versionId: z.string(),
oldVersionId: z.string().optional(),
changeType: z.enum(["add", "delete", "update", "create"]),
commitId: z.union([z.string(), z.bigint()]),
createdAt: z.union([z.string(), z.date()]).optional(),
parentId: z.string().optional(),
isUpdate: z.boolean().optional(),
fromVersion: z.union([z.string(), z.number()]).optional()
});
// Secret resource change schema
const secretResourceChangeSchema = baseResourceChangeSchema.extend({
type: z.literal("secret"),
secretKey: z.string(),
secretVersion: z.union([z.string(), z.number()]),
secretId: z.string(),
versions: z
.array(
z.object({
secretKey: z.string().optional(),
secretComment: z.string().optional(),
skipMultilineEncoding: z.boolean().nullable().optional(),
secretReminderRepeatDays: z.number().nullable().optional(),
tags: z.array(z.string()).nullable().optional(),
metadata: z.unknown().nullable().optional(),
secretReminderNote: z.string().nullable().optional(),
secretValue: z.string().optional()
})
)
.optional()
});
// Folder resource change schema
const folderResourceChangeSchema = baseResourceChangeSchema.extend({
type: z.literal("folder"),
folderName: z.string(),
folderVersion: z.union([z.string(), z.number()]),
versions: z.array(folderVersionSchema).optional()
});
// Discriminated union for resource changes
export const resourceChangeSchema = z.discriminatedUnion("type", [
secretResourceChangeSchema,
folderResourceChangeSchema
]);

View File

@@ -0,0 +1,671 @@
/* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable @typescript-eslint/return-await */
/* eslint-disable @typescript-eslint/no-unsafe-return */
import { Knex } from "knex";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { ProjectType, TSecretFolderVersions, TSecretVersionsV2 } from "@app/db/schemas";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { ActorType } from "../auth/auth-type";
import {
ChangeType,
CommitType,
folderCommitServiceFactory,
ResourceChange,
TFolderCommitServiceFactory
} from "./folder-commit-service";
// Mock config
vi.mock("@app/lib/config/env", () => ({
getConfig: () => ({
PIT_CHECKPOINT_WINDOW: 5,
PIT_TREE_CHECKPOINT_WINDOW: 10
})
}));
// Mock logger
vi.mock("@app/lib/logger", () => ({
logger: {
info: vi.fn(),
error: vi.fn()
}
}));
describe("folderCommitServiceFactory", () => {
// Properly type the mock functions
type TransactionCallback<T> = (trx: Knex) => Promise<T>;
// Mock dependencies
const mockFolderCommitDAL = {
create: vi.fn().mockResolvedValue({}),
findById: vi.fn().mockResolvedValue({}),
findByFolderId: vi.fn().mockResolvedValue([]),
findLatestCommit: vi.fn().mockResolvedValue({}),
transaction: vi.fn().mockImplementation(<T>(callback: TransactionCallback<T>) => callback({} as Knex)),
getNumberOfCommitsSince: vi.fn().mockResolvedValue(0),
getEnvNumberOfCommitsSince: vi.fn().mockResolvedValue(0),
findCommitsToRecreate: vi.fn().mockResolvedValue([]),
findMultipleLatestCommits: vi.fn().mockResolvedValue([]),
findLatestCommitBetween: vi.fn().mockResolvedValue({}),
findAllCommitsBetween: vi.fn().mockResolvedValue([]),
findLatestEnvCommit: vi.fn().mockResolvedValue({}),
findLatestCommitByFolderIds: vi.fn().mockResolvedValue({})
};
const mockKmsService = {
createCipherPairWithDataKey: vi.fn().mockResolvedValue({})
};
const mockFolderCommitChangesDAL = {
create: vi.fn().mockResolvedValue({}),
findByCommitId: vi.fn().mockResolvedValue([]),
insertMany: vi.fn().mockResolvedValue([])
};
const mockFolderCheckpointDAL = {
create: vi.fn().mockResolvedValue({}),
findByFolderId: vi.fn().mockResolvedValue([]),
findLatestByFolderId: vi.fn().mockResolvedValue(null),
findNearestCheckpoint: vi.fn().mockResolvedValue({})
};
const mockFolderCheckpointResourcesDAL = {
insertMany: vi.fn().mockResolvedValue([]),
findByCheckpointId: vi.fn().mockResolvedValue([])
};
const mockFolderTreeCheckpointDAL = {
create: vi.fn().mockResolvedValue({}),
findByProjectId: vi.fn().mockResolvedValue([]),
findLatestByProjectId: vi.fn().mockResolvedValue({}),
findNearestCheckpoint: vi.fn().mockResolvedValue({}),
findLatestByEnvId: vi.fn().mockResolvedValue({})
};
const mockFolderTreeCheckpointResourcesDAL = {
insertMany: vi.fn().mockResolvedValue([]),
findByTreeCheckpointId: vi.fn().mockResolvedValue([])
};
const mockUserDAL = {
findById: vi.fn().mockResolvedValue({})
};
const mockIdentityDAL = {
findById: vi.fn().mockResolvedValue({})
};
const mockFolderDAL = {
findByParentId: vi.fn().mockResolvedValue([]),
findByProjectId: vi.fn().mockResolvedValue([]),
deleteById: vi.fn().mockResolvedValue({}),
create: vi.fn().mockResolvedValue({}),
updateById: vi.fn().mockResolvedValue({}),
update: vi.fn().mockResolvedValue({}),
find: vi.fn().mockResolvedValue([]),
findById: vi.fn().mockResolvedValue({}),
findByEnvId: vi.fn().mockResolvedValue([]),
findFoldersByRootAndIds: vi.fn().mockResolvedValue([])
};
const mockFolderVersionDAL = {
findLatestFolderVersions: vi.fn().mockResolvedValue({}),
findById: vi.fn().mockResolvedValue({}),
deleteById: vi.fn().mockResolvedValue({}),
create: vi.fn().mockResolvedValue({}),
updateById: vi.fn().mockResolvedValue({}),
find: vi.fn().mockResolvedValue({}), // Changed from [] to {} to match Object.values() expectation
findByIdsWithLatestVersion: vi.fn().mockResolvedValue({})
};
const mockSecretVersionV2BridgeDAL = {
findLatestVersionByFolderId: vi.fn().mockResolvedValue([]),
findById: vi.fn().mockResolvedValue({}),
deleteById: vi.fn().mockResolvedValue({}),
create: vi.fn().mockResolvedValue({}),
updateById: vi.fn().mockResolvedValue({}),
find: vi.fn().mockResolvedValue([]),
findByIdsWithLatestVersion: vi.fn().mockResolvedValue({}),
findLatestVersionMany: vi.fn().mockResolvedValue({})
};
const mockSecretV2BridgeDAL = {
deleteById: vi.fn().mockResolvedValue({}),
create: vi.fn().mockResolvedValue({}),
updateById: vi.fn().mockResolvedValue({}),
update: vi.fn().mockResolvedValue({}),
insertMany: vi.fn().mockResolvedValue([]),
invalidateSecretCacheByProjectId: vi.fn().mockResolvedValue({})
};
const mockProjectDAL = {
findById: vi.fn().mockResolvedValue({}),
findProjectByEnvId: vi.fn().mockResolvedValue({})
};
const mockFolderCommitQueueService = {
scheduleTreeCheckpoint: vi.fn().mockResolvedValue({}),
createFolderTreeCheckpoint: vi.fn().mockResolvedValue({})
};
const mockPermissionService = {
getProjectPermission: vi.fn().mockResolvedValue({})
};
const mockSecretTagDAL = {
findSecretTagsByVersionId: vi.fn().mockResolvedValue([]),
saveTagsToSecretV2: vi.fn().mockResolvedValue([]),
findSecretTagsBySecretId: vi.fn().mockResolvedValue([]),
deleteTagsToSecretV2: vi.fn().mockResolvedValue([]),
saveTagsToSecretVersionV2: vi.fn().mockResolvedValue([])
};
const mockResourceMetadataDAL = {
find: vi.fn().mockResolvedValue([]),
insertMany: vi.fn().mockResolvedValue([]),
delete: vi.fn().mockResolvedValue([])
};
let folderCommitService: TFolderCommitServiceFactory;
beforeEach(() => {
vi.clearAllMocks();
folderCommitService = folderCommitServiceFactory({
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
folderCommitDAL: mockFolderCommitDAL,
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
folderCommitChangesDAL: mockFolderCommitChangesDAL,
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
folderCheckpointDAL: mockFolderCheckpointDAL,
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
folderCheckpointResourcesDAL: mockFolderCheckpointResourcesDAL,
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
folderTreeCheckpointDAL: mockFolderTreeCheckpointDAL,
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
folderTreeCheckpointResourcesDAL: mockFolderTreeCheckpointResourcesDAL,
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
userDAL: mockUserDAL,
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
identityDAL: mockIdentityDAL,
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
folderDAL: mockFolderDAL,
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
folderVersionDAL: mockFolderVersionDAL,
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
secretVersionV2BridgeDAL: mockSecretVersionV2BridgeDAL,
projectDAL: mockProjectDAL,
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
secretV2BridgeDAL: mockSecretV2BridgeDAL,
folderCommitQueueService: mockFolderCommitQueueService,
// @ts-expect-error - Mock implementation doesn't need all interface methods for testing
permissionService: mockPermissionService,
kmsService: mockKmsService,
secretTagDAL: mockSecretTagDAL,
resourceMetadataDAL: mockResourceMetadataDAL
});
});
afterEach(() => {
vi.resetAllMocks();
});
describe("createCommit", () => {
it("should successfully create a commit with user actor", async () => {
// Arrange
const userData = { id: "user-id", username: "testuser" };
const folderData = { id: "folder-id", envId: "env-id" };
const commitData = { id: "commit-id", folderId: "folder-id" };
mockUserDAL.findById.mockResolvedValue(userData);
mockFolderDAL.findById.mockResolvedValue(folderData);
mockFolderCommitDAL.create.mockResolvedValue(commitData);
mockFolderCheckpointDAL.findLatestByFolderId.mockResolvedValue(null);
mockFolderCommitDAL.findLatestCommit.mockResolvedValue({ id: "latest-commit-id" });
mockFolderDAL.findByParentId.mockResolvedValue([]);
mockSecretVersionV2BridgeDAL.findLatestVersionByFolderId.mockResolvedValue([]);
const data = {
actor: {
type: ActorType.USER,
metadata: { id: userData.id }
},
message: "Test commit",
folderId: folderData.id,
changes: [
{
type: CommitType.ADD,
secretVersionId: "secret-version-1"
}
]
};
// Act
const result = await folderCommitService.createCommit(data);
// Assert
expect(mockUserDAL.findById).toHaveBeenCalledWith(userData.id, undefined);
expect(mockFolderDAL.findById).toHaveBeenCalledWith(folderData.id, undefined);
expect(mockFolderCommitDAL.create).toHaveBeenCalledWith(
expect.objectContaining({
actorType: ActorType.USER,
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
actorMetadata: expect.objectContaining({ name: userData.username }),
message: data.message,
folderId: data.folderId,
envId: folderData.envId
}),
undefined
);
expect(mockFolderCommitChangesDAL.insertMany).toHaveBeenCalledWith(
expect.arrayContaining([
expect.objectContaining({
folderCommitId: commitData.id,
changeType: data.changes[0].type,
secretVersionId: data.changes[0].secretVersionId
})
]),
undefined
);
expect(mockFolderCommitQueueService.scheduleTreeCheckpoint).toHaveBeenCalledWith(folderData.envId);
expect(result).toEqual(commitData);
});
it("should successfully create a commit with identity actor", async () => {
// Arrange
const identityData = { id: "identity-id", name: "testidentity" };
const folderData = { id: "folder-id", envId: "env-id" };
const commitData = { id: "commit-id", folderId: "folder-id" };
mockIdentityDAL.findById.mockResolvedValue(identityData);
mockFolderDAL.findById.mockResolvedValue(folderData);
mockFolderCommitDAL.create.mockResolvedValue(commitData);
mockFolderCheckpointDAL.findLatestByFolderId.mockResolvedValue(null);
mockFolderCommitDAL.findLatestCommit.mockResolvedValue({ id: "latest-commit-id" });
mockFolderDAL.findByParentId.mockResolvedValue([]);
mockSecretVersionV2BridgeDAL.findLatestVersionByFolderId.mockResolvedValue([]);
// Mock folderVersionDAL.find to return an object with folder version data
mockFolderVersionDAL.find.mockResolvedValue({
"folder-version-1": {
id: "folder-version-1",
folderId: "sub-folder-id",
envId: "env-id",
name: "Test Folder",
version: 1
}
});
const data = {
actor: {
type: ActorType.IDENTITY,
metadata: { id: identityData.id }
},
message: "Test commit",
folderId: folderData.id,
changes: [
{
type: CommitType.ADD,
folderVersionId: "folder-version-1"
}
],
omitIgnoreFilter: true
};
// Act
const result = await folderCommitService.createCommit(data);
// Assert
expect(mockIdentityDAL.findById).toHaveBeenCalledWith(identityData.id, undefined);
expect(mockFolderDAL.findById).toHaveBeenCalledWith(folderData.id, undefined);
expect(mockFolderCommitDAL.create).toHaveBeenCalledWith(
expect.objectContaining({
actorType: ActorType.IDENTITY,
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
actorMetadata: expect.objectContaining({ name: identityData.name }),
message: data.message,
folderId: data.folderId,
envId: folderData.envId
}),
undefined
);
expect(mockFolderCommitChangesDAL.insertMany).toHaveBeenCalledWith(
expect.arrayContaining([
expect.objectContaining({
folderCommitId: commitData.id,
changeType: data.changes[0].type,
folderVersionId: data.changes[0].folderVersionId
})
]),
undefined
);
expect(mockFolderCommitQueueService.scheduleTreeCheckpoint).toHaveBeenCalledWith(folderData.envId);
expect(result).toEqual(commitData);
});
it("should throw NotFoundError when folder does not exist", async () => {
// Arrange
mockFolderDAL.findById.mockResolvedValue(null);
const data = {
actor: {
type: ActorType.PLATFORM
},
message: "Test commit",
folderId: "non-existent-folder",
changes: []
};
// Act & Assert
await expect(folderCommitService.createCommit(data)).rejects.toThrow(NotFoundError);
expect(mockFolderDAL.findById).toHaveBeenCalledWith("non-existent-folder", undefined);
});
});
describe("addCommitChange", () => {
it("should successfully add a change to an existing commit", async () => {
// Arrange
const commitData = { id: "commit-id", folderId: "folder-id" };
const changeData = { id: "change-id", folderCommitId: "commit-id" };
mockFolderCommitDAL.findById.mockResolvedValue(commitData);
mockFolderCommitChangesDAL.create.mockResolvedValue(changeData);
const data = {
folderCommitId: commitData.id,
changeType: CommitType.ADD,
secretVersionId: "secret-version-1"
};
// Act
const result = await folderCommitService.addCommitChange(data);
// Assert
expect(mockFolderCommitDAL.findById).toHaveBeenCalledWith(commitData.id, undefined);
expect(mockFolderCommitChangesDAL.create).toHaveBeenCalledWith(data, undefined);
expect(result).toEqual(changeData);
});
it("should throw BadRequestError when neither secretVersionId nor folderVersionId is provided", async () => {
// Arrange
const data = {
folderCommitId: "commit-id",
changeType: CommitType.ADD
};
// Act & Assert
await expect(folderCommitService.addCommitChange(data)).rejects.toThrow(BadRequestError);
});
it("should throw NotFoundError when commit does not exist", async () => {
// Arrange
mockFolderCommitDAL.findById.mockResolvedValue(null);
const data = {
folderCommitId: "non-existent-commit",
changeType: CommitType.ADD,
secretVersionId: "secret-version-1"
};
// Act & Assert
await expect(folderCommitService.addCommitChange(data)).rejects.toThrow(NotFoundError);
expect(mockFolderCommitDAL.findById).toHaveBeenCalledWith("non-existent-commit", undefined);
});
});
// Note: reconstructFolderState is an internal function not exposed in the public API
// We'll test it indirectly through compareFolderStates
describe("compareFolderStates", () => {
it("should mark all resources as creates when currentCommitId is not provided", async () => {
// Arrange
const targetCommitId = "target-commit-id";
const targetCommit = { id: targetCommitId, commitId: 1, folderId: "folder-id" };
mockFolderCommitDAL.findById.mockResolvedValue(targetCommit);
// Mock how compareFolderStates would process the results internally
mockFolderCheckpointDAL.findNearestCheckpoint.mockResolvedValue({ id: "checkpoint-id", commitId: "hash-0" });
mockFolderCheckpointResourcesDAL.findByCheckpointId.mockResolvedValue([
{ secretVersionId: "secret-version-1", referencedSecretId: "secret-1" },
{ folderVersionId: "folder-version-1", referencedFolderId: "folder-1" }
]);
mockFolderCommitDAL.findCommitsToRecreate.mockResolvedValue([]);
mockProjectDAL.findProjectByEnvId.mockResolvedValue({
id: "project-id",
name: "test-project",
type: ProjectType.SecretManager
});
// Act
const result = await folderCommitService.compareFolderStates({
targetCommitId
});
// Assert
expect(mockFolderCommitDAL.findById).toHaveBeenCalledWith(targetCommitId, undefined);
// Verify we get resources marked as create
expect(result).toEqual(
expect.arrayContaining([
expect.objectContaining({
changeType: "create",
commitId: targetCommit.commitId
})
])
);
});
});
describe("createFolderCheckpoint", () => {
it("should successfully create a checkpoint when force is true", async () => {
// Arrange
const folderCommitId = "commit-id";
const folderId = "folder-id";
const checkpointData = { id: "checkpoint-id", folderCommitId };
mockFolderDAL.findByParentId.mockResolvedValue([{ id: "subfolder-id" }]);
mockFolderVersionDAL.findLatestFolderVersions.mockResolvedValue({ "subfolder-id": { id: "folder-version-1" } });
mockSecretVersionV2BridgeDAL.findLatestVersionByFolderId.mockResolvedValue([{ id: "secret-version-1" }]);
mockFolderCheckpointDAL.create.mockResolvedValue(checkpointData);
// Act
const result = await folderCommitService.createFolderCheckpoint({
folderId,
folderCommitId,
force: true
});
// Assert
expect(mockFolderCheckpointDAL.create).toHaveBeenCalledWith({ folderCommitId }, undefined);
expect(mockFolderCheckpointResourcesDAL.insertMany).toHaveBeenCalled();
expect(result).toBe(folderCommitId);
});
});
describe("deepRollbackFolder", () => {
it("should throw NotFoundError when commit doesn't exist", async () => {
// Arrange
const targetCommitId = "non-existent-commit";
const envId = "env-id";
const actorId = "user-id";
const actorType = ActorType.USER;
const projectId = "project-id";
// Mock the transaction to properly handle the error
mockFolderCommitDAL.transaction.mockImplementation(async (callback) => {
return await callback({} as Knex);
});
// Mock findById to return null inside the transaction
mockFolderCommitDAL.findById.mockResolvedValue(null);
// Act & Assert
await expect(
folderCommitService.deepRollbackFolder(targetCommitId, envId, actorId, actorType, projectId)
).rejects.toThrow(NotFoundError);
});
});
describe("createFolderTreeCheckpoint", () => {
it("should create a tree checkpoint when checkpoint window is exceeded", async () => {
// Arrange
const envId = "env-id";
const folderCommitId = "commit-id";
const latestCommit = { id: folderCommitId };
const latestTreeCheckpoint = { id: "tree-checkpoint-id", folderCommitId: "old-commit-id" };
const folders = [
{ id: "folder-1", isReserved: false },
{ id: "folder-2", isReserved: false },
{ id: "folder-3", isReserved: true } // Reserved folders should be filtered out
];
const folderCommits = [
{ folderId: "folder-1", id: "commit-1" },
{ folderId: "folder-2", id: "commit-2" }
];
const treeCheckpoint = { id: "new-tree-checkpoint-id" };
mockFolderCommitDAL.findLatestEnvCommit.mockResolvedValue(latestCommit);
mockFolderTreeCheckpointDAL.findLatestByEnvId.mockResolvedValue(latestTreeCheckpoint);
mockFolderCommitDAL.getEnvNumberOfCommitsSince.mockResolvedValue(15); // More than PIT_TREE_CHECKPOINT_WINDOW (10)
mockFolderDAL.findByEnvId.mockResolvedValue(folders);
mockFolderCommitDAL.findMultipleLatestCommits.mockResolvedValue(folderCommits);
mockFolderTreeCheckpointDAL.create.mockResolvedValue(treeCheckpoint);
// Act
await folderCommitService.createFolderTreeCheckpoint(envId);
// Assert
expect(mockFolderCommitDAL.findLatestEnvCommit).toHaveBeenCalledWith(envId, undefined);
expect(mockFolderTreeCheckpointDAL.create).toHaveBeenCalledWith({ folderCommitId }, undefined);
});
});
describe("applyFolderStateDifferences", () => {
it("should process changes correctly", async () => {
// Arrange
const folderId = "folder-id";
const projectId = "project-id";
const actorId = "user-id";
const actorType = ActorType.USER;
const differences = [
{
id: "secret-1",
versionId: "v1",
changeType: ChangeType.CREATE,
commitId: BigInt(1)
} as ResourceChange,
{
id: "folder-1",
versionId: "v2",
changeType: ChangeType.UPDATE,
commitId: BigInt(1),
folderName: "Test Folder",
folderVersion: "v2"
} as ResourceChange
];
const secretVersions = {
"secret-1": {
id: "secret-version-1",
createdAt: new Date(),
updatedAt: new Date(),
type: "shared",
folderId: "folder-1",
secretId: "secret-1",
version: 1,
key: "SECRET_KEY",
encryptedValue: Buffer.from("encrypted"),
encryptedComment: Buffer.from("comment"),
skipMultilineEncoding: false,
userId: "user-1",
envId: "env-1",
metadata: {}
} as TSecretVersionsV2
};
const folderVersions = {
"folder-1": {
folderId: "folder-1",
version: 1,
name: "Test Folder",
envId: "env-1"
} as TSecretFolderVersions
};
// Mock folder lookup for the folder being processed
mockFolderDAL.findById.mockImplementation((id) => {
if (id === folderId) {
return Promise.resolve({ id: folderId, envId: "env-1" });
}
return Promise.resolve(null);
});
// Mock latest commit lookup
mockFolderCommitDAL.findLatestCommit.mockImplementation((id) => {
if (id === folderId) {
return Promise.resolve({ id: "latest-commit-id", folderId });
}
return Promise.resolve(null);
});
// Make sure findByParentId returns an array, not undefined
mockFolderDAL.findByParentId.mockResolvedValue([]);
// Make sure other required functions return appropriate values
mockFolderCheckpointDAL.findLatestByFolderId.mockResolvedValue(null);
mockSecretVersionV2BridgeDAL.findLatestVersionByFolderId.mockResolvedValue([]);
// These mocks need to return objects with an id field
mockSecretVersionV2BridgeDAL.findByIdsWithLatestVersion.mockResolvedValue(Object.values(secretVersions));
mockFolderVersionDAL.findByIdsWithLatestVersion.mockResolvedValue(Object.values(folderVersions));
mockSecretV2BridgeDAL.insertMany.mockResolvedValue([{ id: "new-secret-1" }]);
mockSecretVersionV2BridgeDAL.create.mockResolvedValue({ id: "new-secret-version-1" });
mockFolderDAL.updateById.mockResolvedValue({ id: "updated-folder-1" });
mockFolderVersionDAL.create.mockResolvedValue({ id: "new-folder-version-1" });
mockFolderCommitDAL.create.mockResolvedValue({ id: "new-commit-id" });
mockSecretVersionV2BridgeDAL.findLatestVersionMany.mockResolvedValue([
{
id: "secret-version-1",
createdAt: new Date(),
updatedAt: new Date(),
type: "shared",
folderId: "folder-1",
secretId: "secret-1",
version: 1,
key: "SECRET_KEY",
encryptedValue: Buffer.from("encrypted"),
encryptedComment: Buffer.from("comment"),
skipMultilineEncoding: false,
userId: "user-1",
envId: "env-1",
metadata: {}
}
]);
// Mock transaction
mockFolderCommitDAL.transaction.mockImplementation(<T>(callback: TransactionCallback<T>) => callback({} as Knex));
// Act
const result = await folderCommitService.applyFolderStateDifferences({
differences,
actorInfo: {
actorType,
actorId,
message: "Applying changes"
},
folderId,
projectId,
reconstructNewFolders: false
});
// Assert
expect(mockFolderCommitDAL.create).toHaveBeenCalled();
expect(mockSecretV2BridgeDAL.invalidateSecretCacheByProjectId).toHaveBeenCalledWith(projectId);
// Check that we got the right counts
expect(result.totalChanges).toEqual(2);
});
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,44 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName, TFolderTreeCheckpointResources } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
export type TFolderTreeCheckpointResourcesDALFactory = ReturnType<typeof folderTreeCheckpointResourcesDALFactory>;
type TFolderTreeCheckpointResourcesWithCommitId = TFolderTreeCheckpointResources & {
commitId: bigint;
};
export const folderTreeCheckpointResourcesDALFactory = (db: TDbClient) => {
const folderTreeCheckpointResourcesOrm = ormify(db, TableName.FolderTreeCheckpointResources);
const findByTreeCheckpointId = async (
folderTreeCheckpointId: string,
tx?: Knex
): Promise<TFolderTreeCheckpointResourcesWithCommitId[]> => {
try {
const docs = await (tx || db.replicaNode())<TFolderTreeCheckpointResources>(
TableName.FolderTreeCheckpointResources
)
.join(
TableName.FolderCommit,
`${TableName.FolderTreeCheckpointResources}.folderCommitId`,
`${TableName.FolderCommit}.id`
)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ folderTreeCheckpointId }, TableName.FolderTreeCheckpointResources))
.select(selectAllTableCols(TableName.FolderTreeCheckpointResources))
.select(db.ref("commitId").withSchema(TableName.FolderCommit).as("commitId"));
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "FindByTreeCheckpointId" });
}
};
return {
...folderTreeCheckpointResourcesOrm,
findByTreeCheckpointId
};
};

View File

@@ -0,0 +1,79 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName, TFolderCommits, TFolderTreeCheckpoints } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
export type TFolderTreeCheckpointDALFactory = ReturnType<typeof folderTreeCheckpointDALFactory>;
type TreeCheckpointWithCommitInfo = TFolderTreeCheckpoints & {
commitId: bigint;
};
export const folderTreeCheckpointDALFactory = (db: TDbClient) => {
const folderTreeCheckpointOrm = ormify(db, TableName.FolderTreeCheckpoint);
const findByCommitId = async (folderCommitId: string, tx?: Knex): Promise<TFolderTreeCheckpoints | undefined> => {
try {
const doc = await (tx || db.replicaNode())<TFolderTreeCheckpoints>(TableName.FolderTreeCheckpoint)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ folderCommitId }, TableName.FolderTreeCheckpoint))
.select(selectAllTableCols(TableName.FolderTreeCheckpoint))
.first();
return doc;
} catch (error) {
throw new DatabaseError({ error, name: "FindByCommitId" });
}
};
const findNearestCheckpoint = async (
folderCommitId: bigint,
envId: string,
tx?: Knex
): Promise<TreeCheckpointWithCommitInfo | undefined> => {
try {
const nearestCheckpoint = await (tx || db.replicaNode())(TableName.FolderTreeCheckpoint)
.join<TFolderCommits>(
TableName.FolderCommit,
`${TableName.FolderTreeCheckpoint}.folderCommitId`,
`${TableName.FolderCommit}.id`
)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(`${TableName.FolderCommit}.envId`, "=", envId)
.andWhere(`${TableName.FolderCommit}.commitId`, "<=", folderCommitId.toString())
.select(selectAllTableCols(TableName.FolderTreeCheckpoint))
.select(db.ref("commitId").withSchema(TableName.FolderCommit))
.orderBy(`${TableName.FolderCommit}.commitId`, "desc")
.first();
return nearestCheckpoint;
} catch (error) {
throw new DatabaseError({ error, name: "FindNearestCheckpoint" });
}
};
const findLatestByEnvId = async (envId: string, tx?: Knex): Promise<TFolderTreeCheckpoints | undefined> => {
try {
const doc = await (tx || db.replicaNode())<TFolderTreeCheckpoints>(TableName.FolderTreeCheckpoint)
.join<TFolderCommits>(
TableName.FolderCommit,
`${TableName.FolderTreeCheckpoint}.folderCommitId`,
`${TableName.FolderCommit}.id`
)
.where(`${TableName.FolderCommit}.envId`, "=", envId)
.orderBy(`${TableName.FolderTreeCheckpoint}.createdAt`, "desc")
.first();
return doc;
} catch (error) {
throw new DatabaseError({ error, name: "FindLatestByEnvId" });
}
};
return {
...folderTreeCheckpointOrm,
findByCommitId,
findNearestCheckpoint,
findLatestByEnvId
};
};

View File

@@ -1,6 +1,7 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType, ProjectMembershipRole, SecretKeyEncoding, TGroups } from "@app/db/schemas";
import { TListProjectGroupUsersDTO } from "@app/ee/services/group/group-types";
import {
constructPermissionErrorMessage,
validatePrivilegeChangeOperation
@@ -42,7 +43,7 @@ type TGroupProjectServiceFactoryDep = {
projectKeyDAL: Pick<TProjectKeyDALFactory, "findLatestProjectKey" | "delete" | "insertMany" | "transaction">;
projectRoleDAL: Pick<TProjectRoleDALFactory, "find">;
projectBotDAL: TProjectBotDALFactory;
groupDAL: Pick<TGroupDALFactory, "findOne">;
groupDAL: Pick<TGroupDALFactory, "findOne" | "findAllGroupPossibleMembers">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getProjectPermissionByRole">;
};
@@ -471,11 +472,54 @@ export const groupProjectServiceFactory = ({
return groupMembership;
};
const listProjectGroupUsers = async ({
id,
projectId,
offset,
limit,
username,
actor,
actorId,
actorAuthMethod,
actorOrgId,
search,
filter
}: TListProjectGroupUsersDTO) => {
const project = await projectDAL.findById(projectId);
if (!project) {
throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` });
}
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
const { members, totalCount } = await groupDAL.findAllGroupPossibleMembers({
orgId: project.orgId,
groupId: id,
offset,
limit,
username,
search,
filter
});
return { users: members, totalCount };
};
return {
addGroupToProject,
updateGroupInProject,
removeGroupFromProject,
listGroupsInProject,
getGroupInProject
getGroupInProject,
listProjectGroupUsers
};
};

View File

@@ -11,5 +11,9 @@ export type TIdentityAccessTokenJwtPayload = {
oidc?: {
claims: Record<string, string>;
};
kubernetes?: {
namespace: string;
name: string;
};
};
};

View File

@@ -416,7 +416,13 @@ export const identityKubernetesAuthServiceFactory = ({
{
identityId: identityKubernetesAuth.identityId,
identityAccessTokenId: identityAccessToken.id,
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN,
identityAuth: {
kubernetes: {
namespace: targetNamespace,
name: targetName
}
}
} as TIdentityAccessTokenJwtPayload,
appCfg.AUTH_SECRET,
// akhilmhdh: for non-expiry tokens you should not even set the value, including undefined. Even for undefined jsonwebtoken throws error

View File

@@ -212,7 +212,7 @@ export const orgDALFactory = (db: TDbClient) => {
// special query
const findAllOrgsByUserId = async (
userId: string
): Promise<(TOrganizations & { orgAuthMethod: string; userRole: string })[]> => {
): Promise<(TOrganizations & { orgAuthMethod: string; userRole: string; userStatus: string })[]> => {
try {
const org = (await db
.replicaNode()(TableName.OrgMembership)
@@ -234,6 +234,7 @@ export const orgDALFactory = (db: TDbClient) => {
})
.select(selectAllTableCols(TableName.Organization))
.select(db.ref("role").withSchema(TableName.OrgMembership).as("userRole"))
.select(db.ref("status").withSchema(TableName.OrgMembership).as("userStatus"))
.select(
db.raw(`
CASE
@@ -242,7 +243,7 @@ export const orgDALFactory = (db: TDbClient) => {
ELSE ''
END as "orgAuthMethod"
`)
)) as (TOrganizations & { orgAuthMethod: string; userRole: string })[];
)) as (TOrganizations & { orgAuthMethod: string; userRole: string; userStatus: string })[];
return org;
} catch (error) {

View File

@@ -183,7 +183,9 @@ export const orgServiceFactory = ({
* */
const findAllOrganizationOfUser = async (userId: string) => {
const orgs = await orgDAL.findAllOrgsByUserId(userId);
return orgs;
// Filter out orgs where the membership object is an invitation
return orgs.filter((org) => org.userStatus !== "invited");
};
/*
* Get all workspace members
@@ -835,16 +837,22 @@ export const orgServiceFactory = ({
// if the user doesn't exist we create the user with the email
if (!inviteeUser) {
inviteeUser = await userDAL.create(
{
isAccepted: false,
email: inviteeEmail,
username: inviteeEmail,
authMethods: [AuthMethod.EMAIL],
isGhost: false
},
tx
);
// TODO(carlos): will be removed once the function receives usernames instead of emails
const usersByEmail = await userDAL.findUserByEmail(inviteeEmail, tx);
if (usersByEmail?.length === 1) {
[inviteeUser] = usersByEmail;
} else {
inviteeUser = await userDAL.create(
{
isAccepted: false,
email: inviteeEmail,
username: inviteeEmail,
authMethods: [AuthMethod.EMAIL],
isGhost: false
},
tx
);
}
}
const inviteeUserId = inviteeUser?.id;

View File

@@ -12,7 +12,7 @@ import {
TProjectsUpdate
} from "@app/db/schemas";
import { BadRequestError, DatabaseError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { ActorType } from "../auth/auth-type";
import { Filter, ProjectFilterType, SearchProjectSortBy } from "./project-types";
@@ -475,6 +475,16 @@ export const projectDALFactory = (db: TDbClient) => {
return { docs, totalCount: Number(docs?.[0]?.count ?? 0) };
};
const findProjectByEnvId = async (envId: string, tx?: Knex) => {
const project = await (tx || db.replicaNode())(TableName.Project)
.leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ id: envId }, TableName.Environment))
.select(selectAllTableCols(TableName.Project))
.first();
return project;
};
const countOfOrgProjects = async (orgId: string | null, tx?: Knex) => {
try {
const doc = await (tx || db.replicaNode())(TableName.Project)
@@ -504,6 +514,7 @@ export const projectDALFactory = (db: TDbClient) => {
checkProjectUpgradeStatus,
getProjectFromSplitId,
searchProjects,
findProjectByEnvId,
countOfOrgProjects
};
};

View File

@@ -165,7 +165,7 @@ type TProjectServiceFactoryDep = {
sshHostGroupDAL: Pick<TSshHostGroupDALFactory, "find" | "findSshHostGroupsWithLoginMappings">;
permissionService: TPermissionServiceFactory;
orgService: Pick<TOrgServiceFactory, "addGhostUser">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "invalidateGetPlan">;
queueService: Pick<TQueueServiceFactory, "stopRepeatableJob">;
smtpService: Pick<TSmtpService, "sendMail">;
orgDAL: Pick<TOrgDALFactory, "findOne">;
@@ -494,6 +494,10 @@ export const projectServiceFactory = ({
);
}
// no need to invalidate if there was no limit
if (plan.workspaceLimit) {
await licenseService.invalidateGetPlan(organization.id);
}
return {
...project,
environments: envs,
@@ -667,7 +671,8 @@ export const projectServiceFactory = ({
enforceCapitalization: update.autoCapitalization,
hasDeleteProtection: update.hasDeleteProtection,
slug: update.slug,
secretSharing: update.secretSharing
secretSharing: update.secretSharing,
showSnapshotsLegacy: update.showSnapshotsLegacy
});
return updatedProject;

View File

@@ -94,6 +94,7 @@ export type TUpdateProjectDTO = {
hasDeleteProtection?: boolean;
slug?: string;
secretSharing?: boolean;
showSnapshotsLegacy?: boolean;
};
} & Omit<TProjectPermission, "projectId">;

View File

@@ -488,6 +488,75 @@ export const secretFolderDALFactory = (db: TDbClient) => {
}
};
const findFoldersByRootAndIds = async ({ rootId, folderIds }: { rootId: string; folderIds: string[] }, tx?: Knex) => {
try {
// First, get all descendant folders of rootId
const descendants = await (tx || db.replicaNode())
.withRecursive("descendants", (qb) =>
qb
.select(
selectAllTableCols(TableName.SecretFolder),
db.raw("0 as depth"),
db.raw(`'/' as path`),
db.ref(`${TableName.Environment}.slug`).as("environment")
)
.from(TableName.SecretFolder)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.where(`${TableName.SecretFolder}.id`, rootId)
.union((un) => {
void un
.select(
selectAllTableCols(TableName.SecretFolder),
db.raw("descendants.depth + 1 as depth"),
db.raw(
`CONCAT(
CASE WHEN descendants.path = '/' THEN '' ELSE descendants.path END,
CASE WHEN ${TableName.SecretFolder}."parentId" is NULL THEN '' ELSE CONCAT('/', secret_folders.name) END
)`
),
db.ref("descendants.environment")
)
.from(TableName.SecretFolder)
.where(`${TableName.SecretFolder}.isReserved`, false)
.join("descendants", `${TableName.SecretFolder}.parentId`, "descendants.id");
})
)
.select<(TSecretFolders & { path: string; depth: number; environment: string })[]>("*")
.from("descendants")
.whereIn(`id`, folderIds)
.orderBy("depth")
.orderBy(`name`);
return descendants;
} catch (error) {
throw new DatabaseError({ error, name: "FindFoldersByRootAndIds" });
}
};
const findByParentId = async (parentId: string, tx?: Knex) => {
try {
const folders = await (tx || db.replicaNode())(TableName.SecretFolder)
.where({ parentId })
.andWhere({ isReserved: false })
.select(selectAllTableCols(TableName.SecretFolder));
return folders;
} catch (error) {
throw new DatabaseError({ error, name: "findByParentId" });
}
};
const findByEnvId = async (envId: string, tx?: Knex) => {
try {
const folders = await (tx || db.replicaNode())(TableName.SecretFolder)
.where({ envId })
.andWhere({ isReserved: false })
.select(selectAllTableCols(TableName.SecretFolder));
return folders;
} catch (error) {
throw new DatabaseError({ error, name: "findByEnvId" });
}
};
return {
...secretFolderOrm,
update,
@@ -499,6 +568,9 @@ export const secretFolderDALFactory = (db: TDbClient) => {
findClosestFolder,
findByProjectId,
findByMultiEnv,
findByEnvsDeep
findByEnvsDeep,
findByParentId,
findByEnvId,
findFoldersByRootAndIds
};
};

View File

@@ -10,6 +10,7 @@ import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
import { buildFolderPath } from "@app/services/secret-folder/secret-folder-fns";
import { ChangeType, CommitType, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
import { TProjectDALFactory } from "../project/project-dal";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TSecretFolderDALFactory } from "./secret-folder-dal";
@@ -29,7 +30,8 @@ type TSecretFolderServiceFactoryDep = {
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
folderDAL: TSecretFolderDALFactory;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne" | "findBySlugs" | "find">;
folderVersionDAL: TSecretFolderVersionDALFactory;
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "findLatestFolderVersions" | "create" | "insertMany" | "find">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
};
@@ -41,6 +43,7 @@ export const secretFolderServiceFactory = ({
permissionService,
projectEnvDAL,
folderVersionDAL,
folderCommitService,
projectDAL
}: TSecretFolderServiceFactoryDep) => {
const createFolder = async ({
@@ -111,15 +114,33 @@ export const secretFolderServiceFactory = ({
});
parentFolderId = newFolders.at(-1)?.id as string;
const docs = await folderDAL.insertMany(newFolders, tx);
await folderVersionDAL.insertMany(
const folderVersions = await folderVersionDAL.insertMany(
docs.map((doc) => ({
name: doc.name,
envId: doc.envId,
version: doc.version,
folderId: doc.id
folderId: doc.id,
description: doc.description
})),
tx
);
await folderCommitService.createCommit(
{
actor: {
type: actor,
metadata: {
id: actorId
}
},
message: "Folder created",
folderId: parentFolderId,
changes: folderVersions.map((fv) => ({
type: CommitType.ADD,
folderVersionId: fv.id
}))
},
tx
);
}
}
@@ -127,12 +148,32 @@ export const secretFolderServiceFactory = ({
{ name, envId: env.id, version: 1, parentId: parentFolderId, description },
tx
);
await folderVersionDAL.create(
const folderVersion = await folderVersionDAL.create(
{
name: doc.name,
envId: doc.envId,
version: doc.version,
folderId: doc.id
folderId: doc.id,
description: doc.description
},
tx
);
await folderCommitService.createCommit(
{
actor: {
type: actor,
metadata: {
id: actorId
}
},
message: "Folder created",
folderId: parentFolderId,
changes: [
{
type: CommitType.ADD,
folderVersionId: folderVersion.id
}
]
},
tx
);
@@ -225,12 +266,33 @@ export const secretFolderServiceFactory = ({
{ name, description },
tx
);
await folderVersionDAL.create(
const folderVersion = await folderVersionDAL.create(
{
name: doc.name,
envId: doc.envId,
version: doc.version,
folderId: doc.id
folderId: doc.id,
description: doc.description
},
tx
);
await folderCommitService.createCommit(
{
actor: {
type: actor,
metadata: {
id: actorId
}
},
message: "Folder updated",
folderId: parentFolder.id,
changes: [
{
type: CommitType.ADD,
isUpdate: true,
folderVersionId: folderVersion.id
}
]
},
tx
);
@@ -321,12 +383,33 @@ export const secretFolderServiceFactory = ({
{ name, description },
tx
);
await folderVersionDAL.create(
const folderVersion = await folderVersionDAL.create(
{
name: doc.name,
envId: doc.envId,
version: doc.version,
folderId: doc.id
folderId: doc.id,
description: doc.description
},
tx
);
await folderCommitService.createCommit(
{
actor: {
type: actor,
metadata: {
id: actorId
}
},
message: "Folder updated",
folderId: parentFolder.id,
changes: [
{
type: CommitType.ADD,
isUpdate: true,
folderVersionId: folderVersion.id
}
]
},
tx
);
@@ -381,7 +464,31 @@ export const secretFolderServiceFactory = ({
},
tx
);
if (!doc) throw new NotFoundError({ message: `Failed to delete folder with ID '${idOrName}', not found` });
const folderVersions = await folderVersionDAL.findLatestFolderVersions([doc.id], tx);
await folderCommitService.createCommit(
{
actor: {
type: actor,
metadata: {
id: actorId
}
},
message: "Folder deleted",
folderId: parentFolder.id,
changes: [
{
type: CommitType.DELETE,
folderVersionId: folderVersions[doc.id].id,
folderId: doc.id
}
]
},
tx
);
return doc;
});
@@ -665,6 +772,45 @@ export const secretFolderServiceFactory = ({
return environmentFolders;
};
const getFolderVersionsByIds = async ({
folderId,
folderVersions
}: {
folderId: string;
folderVersions: string[];
}) => {
const versions = await folderVersionDAL.find({
folderId,
$in: {
version: folderVersions.map((v) => Number.parseInt(v, 10))
}
});
return versions;
};
const getFolderVersions = async (
change: {
folderVersion?: string;
isUpdate?: boolean;
changeType?: string;
},
fromVersion: string,
folderId: string
) => {
const currentVersion = change.folderVersion || "1";
// eslint-disable-next-line no-await-in-loop
const versions = await getFolderVersionsByIds({
folderId,
folderVersions:
change.isUpdate || change.changeType === ChangeType.UPDATE ? [currentVersion, fromVersion] : [currentVersion]
});
return versions.map((v) => ({
version: v.version?.toString() || "1",
name: v.name,
description: v.description
}));
};
return {
createFolder,
updateFolder,
@@ -675,6 +821,8 @@ export const secretFolderServiceFactory = ({
getProjectFolderCount,
getFoldersMultiEnv,
getFoldersDeepByEnvs,
getProjectEnvironmentsFolders
getProjectEnvironmentsFolders,
getFolderVersionsByIds,
getFolderVersions
};
};

View File

@@ -43,7 +43,7 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
const docs: Array<TSecretFolderVersions & { max: number }> = await (tx || db.replicaNode())(
TableName.SecretFolderVersion
)
.whereIn("folderId", folderIds)
.whereIn(`${TableName.SecretFolderVersion}.folderId`, folderIds)
.join(
(tx || db)(TableName.SecretFolderVersion)
.groupBy("folderId")
@@ -85,6 +85,8 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("folder_cte", "folder_cte.id", `${TableName.SecretFolderVersion}.id`)
.whereRaw(`folder_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
// Projects with version >= 3 will require to have all folder versions for PIT
.andWhere(`${TableName.Project}.version`, "<", 3)
.delete();
} catch (error) {
throw new DatabaseError({
@@ -95,5 +97,107 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret folder versions completed`);
};
return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId, pruneExcessVersions };
// Get latest versions by folderIds
const getLatestFolderVersions = async (folderIds: string[], tx?: Knex): Promise<Array<TSecretFolderVersions>> => {
if (!folderIds.length) return [];
const knexInstance = tx || db.replicaNode();
return knexInstance(TableName.SecretFolderVersion)
.whereIn(`${TableName.SecretFolderVersion}.folderId`, folderIds)
.join(
knexInstance(TableName.SecretFolderVersion)
.groupBy("folderId")
.max("version")
.select("folderId")
.as("latestVersion"),
(bd) => {
bd.on(`${TableName.SecretFolderVersion}.folderId`, "latestVersion.folderId").andOn(
`${TableName.SecretFolderVersion}.version`,
"latestVersion.max"
);
}
);
};
// Get specific versions and update with max version
const getSpecificFolderVersionsWithLatest = async (
versionIds: string[],
tx?: Knex
): Promise<Array<TSecretFolderVersions>> => {
if (!versionIds.length) return [];
const knexInstance = tx || db.replicaNode();
// Get specific versions
const specificVersions = await knexInstance(TableName.SecretFolderVersion).whereIn("id", versionIds);
// Get folderIds from these versions
const specificFolderIds = [...new Set(specificVersions.map((v) => v.folderId).filter(Boolean))];
if (!specificFolderIds.length) return specificVersions;
// Get max versions for these folderIds
const maxVersionsQuery = await knexInstance(TableName.SecretFolderVersion)
.whereIn("folderId", specificFolderIds)
.groupBy("folderId")
.select("folderId")
.max("version", { as: "maxVersion" });
// Create lookup map for max versions
const maxVersionMap = maxVersionsQuery.reduce<Record<string, number>>((acc, item) => {
if (item.maxVersion) {
acc[item.folderId] = item.maxVersion;
}
return acc;
}, {});
// Replace version with max version
return specificVersions.map((version) => ({
...version,
version: maxVersionMap[version.folderId] || version.version
}));
};
const findByIdsWithLatestVersion = async (folderIds: string[], versionIds?: string[], tx?: Knex) => {
try {
if (!folderIds.length && (!versionIds || !versionIds.length)) return {};
// Run both queries in parallel
const [latestVersions, specificVersionsWithLatest] = await Promise.all([
folderIds.length ? getLatestFolderVersions(folderIds, tx) : [],
versionIds?.length ? getSpecificFolderVersionsWithLatest(versionIds, tx) : []
]);
const allDocs = [...latestVersions, ...specificVersionsWithLatest];
// Convert array to record with folderId as key
return allDocs.reduce<Record<string, TSecretFolderVersions>>(
(prev, curr) => ({ ...prev, [curr.folderId || ""]: curr }),
{}
);
} catch (error) {
throw new DatabaseError({ error, name: "FindByIdsWithLatestVersion" });
}
};
const findLatestVersion = async (folderId: string, tx?: Knex) => {
try {
const doc = await (tx || db.replicaNode())(TableName.SecretFolderVersion)
.where(`${TableName.SecretFolderVersion}.folderId`, folderId)
.select(selectAllTableCols(TableName.SecretFolderVersion))
.first();
return doc;
} catch (error) {
throw new DatabaseError({ error, name: "findLatestVersion" });
}
};
return {
...secretFolderVerOrm,
findLatestFolderVersions,
findLatestVersionByFolderId,
pruneExcessVersions,
findByIdsWithLatestVersion,
findLatestVersion
};
};

View File

@@ -1,3 +1,63 @@
export enum GcpSyncScope {
Global = "global"
Global = "global",
Region = "region"
}
export enum GCPSecretManagerLocation {
// Asia Pacific
ASIA_SOUTHEAST3 = "asia-southeast3", // Bangkok
ASIA_SOUTH2 = "asia-south2", // Delhi
ASIA_EAST2 = "asia-east2", // Hong Kong
ASIA_SOUTHEAST2 = "asia-southeast2", // Jakarta
AUSTRALIA_SOUTHEAST2 = "australia-southeast2", // Melbourne
ASIA_SOUTH1 = "asia-south1", // Mumbai
ASIA_NORTHEAST2 = "asia-northeast2", // Osaka
ASIA_NORTHEAST3 = "asia-northeast3", // Seoul
ASIA_SOUTHEAST1 = "asia-southeast1", // Singapore
AUSTRALIA_SOUTHEAST1 = "australia-southeast1", // Sydney
ASIA_EAST1 = "asia-east1", // Taiwan
ASIA_NORTHEAST1 = "asia-northeast1", // Tokyo
// Europe
EUROPE_WEST1 = "europe-west1", // Belgium
EUROPE_WEST10 = "europe-west10", // Berlin
EUROPE_NORTH1 = "europe-north1", // Finland
EUROPE_NORTH2 = "europe-north2", // Stockholm
EUROPE_WEST3 = "europe-west3", // Frankfurt
EUROPE_WEST2 = "europe-west2", // London
EUROPE_SOUTHWEST1 = "europe-southwest1", // Madrid
EUROPE_WEST8 = "europe-west8", // Milan
EUROPE_WEST4 = "europe-west4", // Netherlands
EUROPE_WEST12 = "europe-west12", // Turin
EUROPE_WEST9 = "europe-west9", // Paris
EUROPE_CENTRAL2 = "europe-central2", // Warsaw
EUROPE_WEST6 = "europe-west6", // Zurich
// North America
US_CENTRAL1 = "us-central1", // Iowa
US_WEST4 = "us-west4", // Las Vegas
US_WEST2 = "us-west2", // Los Angeles
NORTHAMERICA_SOUTH1 = "northamerica-south1", // Mexico
NORTHAMERICA_NORTHEAST1 = "northamerica-northeast1", // Montréal
US_EAST4 = "us-east4", // Northern Virginia
US_CENTRAL2 = "us-central2", // Oklahoma
US_WEST1 = "us-west1", // Oregon
US_WEST3 = "us-west3", // Salt Lake City
US_EAST1 = "us-east1", // South Carolina
NORTHAMERICA_NORTHEAST2 = "northamerica-northeast2", // Toronto
US_EAST5 = "us-east5", // Columbus
US_SOUTH1 = "us-south1", // Dallas
US_WEST8 = "us-west8", // Phoenix
// South America
SOUTHAMERICA_EAST1 = "southamerica-east1", // São Paulo
SOUTHAMERICA_WEST1 = "southamerica-west1", // Santiago
// Middle East
ME_CENTRAL2 = "me-central2", // Dammam
ME_CENTRAL1 = "me-central1", // Doha
ME_WEST1 = "me-west1", // Tel Aviv
// Africa
AFRICA_SOUTH1 = "africa-south1" // Johannesburg
}

View File

@@ -4,6 +4,7 @@ import { request } from "@app/lib/config/request";
import { logger } from "@app/lib/logger";
import { getGcpConnectionAuthToken } from "@app/services/app-connection/gcp";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { GcpSyncScope } from "@app/services/secret-sync/gcp/gcp-sync-enums";
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
import { SecretSyncError } from "../secret-sync-errors";
@@ -15,9 +16,17 @@ import {
TGcpSyncWithCredentials
} from "./gcp-sync-types";
const getGcpSecrets = async (accessToken: string, secretSync: TGcpSyncWithCredentials) => {
const getProjectUrl = (secretSync: TGcpSyncWithCredentials) => {
const { destinationConfig } = secretSync;
if (destinationConfig.scope === GcpSyncScope.Global) {
return `${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}`;
}
return `https://secretmanager.${destinationConfig.locationId}.rep.googleapis.com/v1/projects/${destinationConfig.projectId}/locations/${destinationConfig.locationId}`;
};
const getGcpSecrets = async (accessToken: string, secretSync: TGcpSyncWithCredentials) => {
let gcpSecrets: GCPSecret[] = [];
const pageSize = 100;
@@ -31,16 +40,13 @@ const getGcpSecrets = async (accessToken: string, secretSync: TGcpSyncWithCreden
});
// eslint-disable-next-line no-await-in-loop
const { data: secretsRes } = await request.get<GCPSMListSecretsRes>(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${secretSync.destinationConfig.projectId}/secrets`,
{
params,
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
const { data: secretsRes } = await request.get<GCPSMListSecretsRes>(`${getProjectUrl(secretSync)}/secrets`, {
params,
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
);
});
if (secretsRes.secrets) {
gcpSecrets = gcpSecrets.concat(secretsRes.secrets);
@@ -61,7 +67,7 @@ const getGcpSecrets = async (accessToken: string, secretSync: TGcpSyncWithCreden
try {
const { data: secretLatest } = await request.get<GCPLatestSecretVersionAccess>(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}/secrets/${key}/versions/latest:access`,
`${getProjectUrl(secretSync)}/secrets/${key}/versions/latest:access`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
@@ -113,11 +119,14 @@ export const GcpSyncFns = {
if (!(key in gcpSecrets)) {
// case: create secret
await request.post(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}/secrets`,
`${getProjectUrl(secretSync)}/secrets`,
{
replication: {
automatic: {}
}
replication:
destinationConfig.scope === GcpSyncScope.Global
? {
automatic: {}
}
: undefined
},
{
params: {
@@ -131,7 +140,7 @@ export const GcpSyncFns = {
);
await request.post(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}/secrets/${key}:addVersion`,
`${getProjectUrl(secretSync)}/secrets/${key}:addVersion`,
{
payload: {
data: Buffer.from(secretMap[key].value).toString("base64")
@@ -163,15 +172,12 @@ export const GcpSyncFns = {
if (secretSync.syncOptions.disableSecretDeletion) continue;
// case: delete secret
await request.delete(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}/secrets/${key}`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
await request.delete(`${getProjectUrl(secretSync)}/secrets/${key}`, {
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
);
});
} else if (secretMap[key].value !== gcpSecrets[key]) {
if (!secretMap[key].value) {
logger.warn(
@@ -180,7 +186,7 @@ export const GcpSyncFns = {
}
await request.post(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}/secrets/${key}:addVersion`,
`${getProjectUrl(secretSync)}/secrets/${key}:addVersion`,
{
payload: {
data: Buffer.from(secretMap[key].value).toString("base64")
@@ -212,21 +218,18 @@ export const GcpSyncFns = {
},
removeSecrets: async (secretSync: TGcpSyncWithCredentials, secretMap: TSecretMap) => {
const { destinationConfig, connection } = secretSync;
const { connection } = secretSync;
const accessToken = await getGcpConnectionAuthToken(connection);
const gcpSecrets = await getGcpSecrets(accessToken, secretSync);
for await (const [key] of Object.entries(gcpSecrets)) {
if (key in secretMap) {
await request.delete(
`${IntegrationUrls.GCP_SECRET_MANAGER_URL}/v1/projects/${destinationConfig.projectId}/secrets/${key}`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
await request.delete(`${getProjectUrl(secretSync)}/secrets/${key}`, {
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
);
});
}
}
}

View File

@@ -10,14 +10,33 @@ import {
import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types";
import { SecretSync } from "../secret-sync-enums";
import { GcpSyncScope } from "./gcp-sync-enums";
import { GCPSecretManagerLocation, GcpSyncScope } from "./gcp-sync-enums";
const GcpSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true };
const GcpSyncDestinationConfigSchema = z.object({
scope: z.literal(GcpSyncScope.Global).describe(SecretSyncs.DESTINATION_CONFIG.GCP.scope),
projectId: z.string().min(1, "Project ID is required").describe(SecretSyncs.DESTINATION_CONFIG.GCP.projectId)
});
const GcpSyncDestinationConfigSchema = z.discriminatedUnion("scope", [
z
.object({
scope: z.literal(GcpSyncScope.Global).describe(SecretSyncs.DESTINATION_CONFIG.GCP.scope),
projectId: z.string().min(1, "Project ID is required").describe(SecretSyncs.DESTINATION_CONFIG.GCP.projectId)
})
.describe(
JSON.stringify({
title: "Global"
})
),
z
.object({
scope: z.literal(GcpSyncScope.Region).describe(SecretSyncs.DESTINATION_CONFIG.GCP.scope),
projectId: z.string().min(1, "Project ID is required").describe(SecretSyncs.DESTINATION_CONFIG.GCP.projectId),
locationId: z.nativeEnum(GCPSecretManagerLocation).describe(SecretSyncs.DESTINATION_CONFIG.GCP.locationId)
})
.describe(
JSON.stringify({
title: "Region"
})
)
]);
export const GcpSyncSchema = BaseSecretSyncSchema(SecretSync.GCPSecretManager, GcpSyncOptionsConfig).extend({
destination: z.literal(SecretSync.GCPSecretManager),

View File

@@ -59,6 +59,7 @@ import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/se
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { TAppConnectionDALFactory } from "../app-connection/app-connection-dal";
import { TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
export type TSecretSyncQueueFactory = ReturnType<typeof secretSyncQueueFactory>;
@@ -94,6 +95,7 @@ type TSecretSyncQueueFactoryDep = {
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
};
@@ -136,6 +138,7 @@ export const secretSyncQueueFactory = ({
secretVersionV2BridgeDAL,
secretVersionTagV2BridgeDAL,
resourceMetadataDAL,
folderCommitService,
licenseService
}: TSecretSyncQueueFactoryDep) => {
const appCfg = getConfig();
@@ -167,7 +170,8 @@ export const secretSyncQueueFactory = ({
secretVersionV2BridgeDAL,
secretV2BridgeDAL,
secretVersionTagV2BridgeDAL,
resourceMetadataDAL
resourceMetadataDAL,
folderCommitService
});
const $updateManySecretsRawFn = updateManySecretsRawFnFactory({
@@ -183,7 +187,8 @@ export const secretSyncQueueFactory = ({
secretVersionV2BridgeDAL,
secretV2BridgeDAL,
secretVersionTagV2BridgeDAL,
resourceMetadataDAL
resourceMetadataDAL,
folderCommitService
});
const $getInfisicalSecrets = async (
@@ -373,7 +378,7 @@ export const secretSyncQueueFactory = ({
if (Object.hasOwn(secretMap, key)) {
// Only update secrets if the source value is not empty
if (value) {
if (value && value !== secretMap[key].value) {
secretsToUpdate.push(secret);
if (importBehavior === SecretSyncImportBehavior.PrioritizeDestination) importedSecretMap[key] = secretData;
}

View File

@@ -11,6 +11,7 @@ export const secretTagDALFactory = (db: TDbClient) => {
const secretTagOrm = ormify(db, TableName.SecretTag);
const secretJnTagOrm = ormify(db, TableName.JnSecretTag);
const secretV2JnTagOrm = ormify(db, TableName.SecretV2JnTag);
const secretVersionV2TagOrm = ormify(db, TableName.SecretVersionV2Tag);
const findManyTagsById = async (projectId: string, ids: string[], tx?: Knex) => {
try {
@@ -48,14 +49,39 @@ export const secretTagDALFactory = (db: TDbClient) => {
}
};
const findSecretTagsByVersionId = async (versionId: string, tx?: Knex) => {
try {
const tags = await (tx || db.replicaNode())(TableName.SecretVersionV2Tag)
.where(`${TableName.SecretVersionV2Tag}.${TableName.SecretVersionV2}Id`, versionId)
.select(selectAllTableCols(TableName.SecretVersionV2Tag));
return tags;
} catch (error) {
throw new DatabaseError({ error, name: "Find all by version id" });
}
};
const findSecretTagsBySecretId = async (secretId: string, tx?: Knex) => {
try {
const tags = await (tx || db.replicaNode())(TableName.SecretV2JnTag)
.where(`${TableName.SecretV2JnTag}.${TableName.SecretV2}Id`, secretId)
.select(selectAllTableCols(TableName.SecretV2JnTag));
return tags;
} catch (error) {
throw new DatabaseError({ error, name: "Find all by secret id" });
}
};
return {
...secretTagOrm,
saveTagsToSecret: secretJnTagOrm.insertMany,
deleteTagsToSecret: secretJnTagOrm.delete,
saveTagsToSecretV2: secretV2JnTagOrm.batchInsert,
deleteTagsToSecretV2: secretV2JnTagOrm.delete,
saveTagsToSecretVersionV2: secretVersionV2TagOrm.insertMany,
findSecretTagsByProjectId,
deleteTagsManySecret,
findManyTagsById
findManyTagsById,
findSecretTagsByVersionId,
findSecretTagsBySecretId
};
};

View File

@@ -8,6 +8,7 @@ import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorType } from "../auth/auth-type";
import { CommitType } from "../folder-commit/folder-commit-service";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { ResourceMetadataDTO } from "../resource-metadata/resource-metadata-schema";
import { INFISICAL_SECRET_VALUE_HIDDEN_MASK } from "../secret/secret-fns";
@@ -73,6 +74,7 @@ export const fnSecretBulkInsert = async ({
resourceMetadataDAL,
secretTagDAL,
secretVersionTagDAL,
folderCommitService,
actor,
tx
}: TFnSecretBulkInsert) => {
@@ -126,11 +128,36 @@ export const fnSecretBulkInsert = async ({
userActorId,
identityActorId,
actorType,
metadata: el.metadata ? JSON.stringify(el.metadata) : [],
secretId: newSecretGroupedByKeyName[el.key][0].id
})),
tx
);
const commitChanges = secretVersions
.filter(({ type }) => type === SecretType.Shared)
.map((sv) => ({
type: CommitType.ADD,
secretVersionId: sv.id
}));
if (commitChanges.length > 0) {
await folderCommitService.createCommit(
{
actor: {
type: actorType || ActorType.PLATFORM,
metadata: {
id: actor?.actorId
}
},
message: "Secret Created",
folderId,
changes: commitChanges
},
tx
);
}
await secretDAL.upsertSecretReferences(
inputSecrets.map(({ references = [], key }) => ({
secretId: newSecretGroupedByKeyName[key][0].id,
@@ -185,6 +212,7 @@ export const fnSecretBulkUpdate = async ({
orgId,
secretDAL,
secretVersionDAL,
folderCommitService,
secretTagDAL,
secretVersionTagDAL,
resourceMetadataDAL,
@@ -246,7 +274,7 @@ export const fnSecretBulkUpdate = async ({
userId,
encryptedComment,
version,
metadata,
metadata: metadata ? JSON.stringify(metadata) : [],
reminderNote,
encryptedValue,
reminderRepeatDays,
@@ -259,6 +287,7 @@ export const fnSecretBulkUpdate = async ({
),
tx
);
await secretDAL.upsertSecretReferences(
inputSecrets
.filter(({ data: { references } }) => Boolean(references))
@@ -329,6 +358,31 @@ export const fnSecretBulkUpdate = async ({
},
{ tx }
);
const commitChanges = secretVersions
.filter(({ type }) => type === SecretType.Shared)
.map((sv) => ({
type: CommitType.ADD,
isUpdate: true,
secretVersionId: sv.id
}));
if (commitChanges.length > 0) {
await folderCommitService.createCommit(
{
actor: {
type: actorType || ActorType.PLATFORM,
metadata: {
id: actor?.actorId
}
},
message: "Secret Updated",
folderId,
changes: commitChanges
},
tx
);
}
return secretsWithTags.map((secret) => ({ ...secret, _id: secret.id }));
};
@@ -337,8 +391,11 @@ export const fnSecretBulkDelete = async ({
inputSecrets,
tx,
actorId,
actorType,
secretDAL,
secretQueueService
secretQueueService,
folderCommitService,
secretVersionDAL
}: TFnSecretBulkDelete) => {
const deletedSecrets = await secretDAL.deleteMany(
inputSecrets.map(({ type, secretKey }) => ({
@@ -358,6 +415,35 @@ export const fnSecretBulkDelete = async ({
)
);
const secretVersions = await secretVersionDAL.findLatestVersionMany(
folderId,
deletedSecrets.map(({ id }) => id),
tx
);
const commitChanges = deletedSecrets
.filter(({ type }) => type === SecretType.Shared)
.map(({ id }) => ({
type: CommitType.DELETE,
secretVersionId: secretVersions[id].id
}));
if (commitChanges.length > 0) {
await folderCommitService.createCommit(
{
actor: {
type: actorType || ActorType.PLATFORM,
metadata: {
id: actorId
}
},
message: "Secret Deleted",
folderId,
changes: commitChanges
},
tx
);
}
return deletedSecrets;
};

View File

@@ -17,6 +17,7 @@ import {
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
ProjectPermissionActions,
ProjectPermissionCommitsActions,
ProjectPermissionSecretActions,
ProjectPermissionSet,
ProjectPermissionSub
@@ -34,6 +35,7 @@ import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { ActorType } from "../auth/auth-type";
import { TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "../kms/kms-service";
import { KmsDataKey } from "../kms/kms-types";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
@@ -90,6 +92,7 @@ type TSecretV2BridgeServiceFactoryDep = {
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
secretTagDAL: TSecretTagDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne" | "findBySlugs">;
folderDAL: Pick<
TSecretFolderDALFactory,
@@ -124,6 +127,7 @@ export const secretV2BridgeServiceFactory = ({
projectEnvDAL,
secretTagDAL,
secretVersionDAL,
folderCommitService,
folderDAL,
permissionService,
snapshotService,
@@ -321,12 +325,14 @@ export const secretV2BridgeServiceFactory = ({
userId: inputSecret.type === SecretType.Personal ? actorId : null,
tagIds: inputSecret.tagIds,
references: nestedReferences,
metadata: secretMetadata ? JSON.stringify(secretMetadata) : [],
secretMetadata
}
],
resourceMetadataDAL,
secretDAL,
secretVersionDAL,
folderCommitService,
secretTagDAL,
secretVersionTagDAL,
actor: {
@@ -510,6 +516,7 @@ export const secretV2BridgeServiceFactory = ({
folderId,
orgId: actorOrgId,
resourceMetadataDAL,
folderCommitService,
inputSecrets: [
{
filter: { id: secretId },
@@ -523,6 +530,7 @@ export const secretV2BridgeServiceFactory = ({
skipMultilineEncoding: inputSecret.skipMultilineEncoding,
key: inputSecret.newSecretName || secretName,
tags: inputSecret.tagIds,
metadata: secretMetadata ? JSON.stringify(secretMetadata) : [],
secretMetadata,
...encryptedValue
}
@@ -650,6 +658,9 @@ export const secretV2BridgeServiceFactory = ({
projectId,
folderId,
actorId,
actorType: actor,
folderCommitService,
secretVersionDAL,
secretDAL,
secretQueueService,
inputSecrets: [
@@ -1590,6 +1601,7 @@ export const secretV2BridgeServiceFactory = ({
orgId: actorOrgId,
secretDAL,
resourceMetadataDAL,
folderCommitService,
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
@@ -1859,6 +1871,7 @@ export const secretV2BridgeServiceFactory = ({
const bulkUpdatedSecrets = await fnSecretBulkUpdate({
folderId,
orgId: actorOrgId,
folderCommitService,
tx,
inputSecrets: secretsToUpdate.map((el) => {
const originalSecret = secretsToUpdateInDBGroupedByKey[el.secretKey][0];
@@ -1928,6 +1941,7 @@ export const secretV2BridgeServiceFactory = ({
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
folderCommitService,
actor: {
type: actor,
actorId
@@ -2061,6 +2075,8 @@ export const secretV2BridgeServiceFactory = ({
fnSecretBulkDelete({
secretDAL,
secretQueueService,
folderCommitService,
secretVersionDAL,
inputSecrets: inputSecrets.map(({ type, secretKey }) => ({
secretKey,
type: type || SecretType.Shared
@@ -2068,6 +2084,7 @@ export const secretV2BridgeServiceFactory = ({
projectId,
folderId,
actorId,
actorType: actor,
tx
})
);
@@ -2159,15 +2176,25 @@ export const secretV2BridgeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
const canRead =
permission.can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback) ||
permission.can(ProjectPermissionCommitsActions.Read, ProjectPermissionSub.Commits);
if (!canRead) throw new ForbiddenRequestError({ message: "You do not have permission to read secret versions" });
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: folder.projectId
});
const secretVersions = await secretVersionDAL.findVersionsBySecretIdWithActors(secretId, folder.projectId, {
offset,
limit,
sort: [["createdAt", "desc"]]
const secretVersions = await secretVersionDAL.findVersionsBySecretIdWithActors({
secretId,
projectId: folder.projectId,
findOpt: {
offset,
limit,
sort: [["createdAt", "desc"]]
}
});
return secretVersions.map((el) => {
const secretValueHidden = !hasSecretReadValueOrDescribePermission(
@@ -2469,6 +2496,7 @@ export const secretV2BridgeServiceFactory = ({
tx,
secretTagDAL,
resourceMetadataDAL,
folderCommitService,
secretVersionTagDAL,
actor: {
type: actor,
@@ -2495,6 +2523,7 @@ export const secretV2BridgeServiceFactory = ({
folderId: destinationFolder.id,
orgId: actorOrgId,
resourceMetadataDAL,
folderCommitService,
secretVersionDAL,
secretDAL,
tx,
@@ -2840,6 +2869,76 @@ export const secretV2BridgeServiceFactory = ({
};
};
const getSecretVersionsByIds = async ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
secretId,
secretVersionNumbers,
secretPath,
envId,
projectId
}: TGetSecretVersionsDTO & {
secretVersionNumbers: string[];
secretPath: string;
envId: string;
projectId: string;
}) => {
const environment = await projectEnvDAL.findOne({ id: envId, projectId });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
const canRead =
permission.can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback) ||
permission.can(ProjectPermissionCommitsActions.Read, ProjectPermissionSub.Commits);
if (!canRead) throw new ForbiddenRequestError({ message: "You do not have permission to read secret versions" });
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const secretVersions = await secretVersionDAL.findVersionsBySecretIdWithActors({
secretId,
projectId,
secretVersions: secretVersionNumbers
});
return secretVersions.map((el) => {
const secretValueHidden = !hasSecretReadValueOrDescribePermission(
permission,
ProjectPermissionSecretActions.ReadValue,
{
environment: environment.slug,
secretPath,
secretName: el.key,
...(el.tags?.length && {
secretTags: el.tags.map((tag) => tag.slug)
})
}
);
return reshapeBridgeSecret(
projectId,
environment.slug,
secretPath,
{
...el,
value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "",
comment: el.encryptedComment ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() : ""
},
secretValueHidden
);
});
};
return {
createSecret,
deleteSecret,
@@ -2858,6 +2957,7 @@ export const secretV2BridgeServiceFactory = ({
getSecretReferenceTree,
getSecretsByFolderMappings,
getSecretById,
getAccessibleSecrets
getAccessibleSecrets,
getSecretVersionsByIds
};
};

Some files were not shown because too many files have changed in this diff Show More