Compare commits

...

89 Commits

Author SHA1 Message Date
Maidul Islam
7949142ea7 update text for secret params 2025-07-28 23:32:05 -04:00
Sheen Capadngan
57fcfdaf21 Merge remote-tracking branch 'origin/main' into feature/secrets-detection-in-secrets-manager 2025-07-29 04:57:54 +08:00
Sheen Capadngan
e430abfc9e misc: addressed comments 2025-07-29 04:56:50 +08:00
Scott Wilson
7d1bc86702 Merge pull request #4236 from Infisical/improve-access-denied-banner-design
improvement(frontend): revise access restricted banner and refactor/update relevant locations
2025-07-28 10:31:14 -07:00
Scott Wilson
975b621bc8 fix: remove passthrough on banner guard for kms pages 2025-07-28 10:26:22 -07:00
Daniel Hougaard
ba9da3e6ec Merge pull request #4254 from Infisical/allow-click-outside-close-rotation-modal
improvement(frontend): remove click outside moda tol close disabling on various modals
2025-07-28 21:06:33 +04:00
carlosmonastyrski
d2274a622a Merge pull request #4251 from Infisical/fix/azureOAuthSeparateEnvVars
Separate Azure OAuth env vars to different env variables for each app connection
2025-07-28 14:06:01 -03:00
Scott Wilson
41ba7edba2 improvement: remove click outside modal close disabling on sync/data source/rotation modals 2025-07-28 09:50:18 -07:00
carlosmonastyrski
7acefbca29 Merge pull request #4220 from Infisical/feat/multipleApprovalEnvs
Allow multiple environments on secret and access policies
2025-07-28 12:22:40 -03:00
Daniel Hougaard
e246f6bbfe Merge pull request #4252 from Infisical/daniel/form-data-cve
Daniel/form data CVE
2025-07-28 19:01:27 +04:00
Carlos Monastyrski
f265fa6d37 Minor improvements to azure multi env variables 2025-07-28 10:14:21 -03:00
Daniel Hougaard
8eebd7228f Update package.json 2025-07-28 16:43:13 +04:00
Daniel Hougaard
2a5593ea30 update axios in oidc sink server 2025-07-28 16:42:21 +04:00
Daniel Hougaard
17af33372c uninstall axios in root 2025-07-28 16:40:58 +04:00
Daniel Hougaard
27da14df9d Fix CVE's 2025-07-28 16:40:20 +04:00
Carlos Monastyrski
cd4b9cd03a Improve azure client secrets env var name 2025-07-28 09:30:37 -03:00
Carlos Monastyrski
0779091d1f Separate Azure OAuth env vars to different env variables for each app connection 2025-07-28 09:14:43 -03:00
Maidul Islam
c421057cf1 Merge pull request #4250 from Infisical/fix/oracle-db-rotation-failing
fix: potential fix for oracle db rotation failing
2025-07-27 14:47:08 -04:00
Akhil Mohan
8df4616265 Update backend/src/ee/services/secret-rotation-v2/shared/sql-credentials/sql-credentials-rotation-fns.ts
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-07-28 00:09:30 +05:30
=
484f34a257 fix: potential fix for oracle db rotation failing 2025-07-28 00:03:01 +05:30
carlosmonastyrski
32851565a7 Merge pull request #4247 from Infisical/fix/azureClientSecretsPermissions
Fix/azure client secrets permissions
2025-07-25 20:52:04 -03:00
Carlos Monastyrski
68401a799e Fix env variables name on doc 2025-07-25 20:48:18 -03:00
Carlos Monastyrski
0adf2c830d Fix azure client secrets OAuth URL to use graph instead of vault 2025-07-25 20:47:17 -03:00
Carlos Monastyrski
3400a8f911 Small UI fix for environments label 2025-07-25 17:24:15 -03:00
Carlos Monastyrski
e6588b5d0e Set correct environmentName on listApprovalRequests 2025-07-25 17:00:11 -03:00
Daniel Hougaard
c68138ac21 Merge pull request #4245 from Infisical/daniel/fips-improvements
fix(fips): increased image size and migrations
2025-07-25 23:40:27 +04:00
Carlos Monastyrski
608979efa7 Merge branch 'main' into feat/multipleApprovalEnvs 2025-07-25 16:29:04 -03:00
Sheen Capadngan
585cb1b30c misc: used promise all 2025-07-26 03:26:24 +08:00
Sheen Capadngan
7fdee073d8 misc: add secret checker in change policy branch 2025-07-26 03:16:39 +08:00
Daniel Hougaard
d4f0301104 Update Dockerfile.fips.standalone-infisical 2025-07-25 23:13:26 +04:00
Daniel Hougaard
253c46f21d fips improvements 2025-07-25 23:09:23 +04:00
Maidul Islam
d8e39aed16 Merge pull request #4243 from Infisical/fix/secretReminderMigration
Add manual migration to secret imports rework
2025-07-25 15:01:04 -04:00
Sheen Capadngan
c368178cb1 feat: secrets detection in secret manager 2025-07-26 03:00:44 +08:00
Carlos Monastyrski
72ee468208 Remove previous queue running the migration 2025-07-25 15:20:23 -03:00
carlosmonastyrski
18238b46a7 Merge pull request #4229 from Infisical/feat/azureClientSecretsNewAuth
Add client secrets authentication on Azure CS app connection
2025-07-25 15:00:49 -03:00
Carlos Monastyrski
d0ffae2c10 Add uuid validation to Azure client secrets 2025-07-25 14:53:46 -03:00
Carlos Monastyrski
7ce11cde95 Add cycle logic to next reminder migration 2025-07-25 14:47:57 -03:00
Carlos Monastyrski
af32948a05 Minor improvements on reminders migration 2025-07-25 13:35:06 -03:00
Daniel Hougaard
25753fc995 Merge pull request #4242 from Infisical/daniel/render-sync-auto-redeploy
feat(secret-sync/render): auto redeploy on sync
2025-07-25 20:31:47 +04:00
Carlos Monastyrski
cd71848800 Avoid migrating existing reminders 2025-07-25 13:10:54 -03:00
Carlos Monastyrski
4afc7a1981 Add manual migration to secret imports rework 2025-07-25 13:06:29 -03:00
Daniel Hougaard
11ca76ccca fix: restructure and requested changes 2025-07-25 20:05:20 +04:00
Daniel Hougaard
418aca8af0 feat(secret-sync/render): auto redeploy on sync 2025-07-25 19:50:28 +04:00
Carlos Monastyrski
99e8bdef58 Minor fixes on policies multi env migration 2025-07-25 01:37:25 -03:00
Carlos Monastyrski
7365f60835 Small code improvements 2025-07-25 01:23:01 -03:00
Scott Wilson
929822514e Merge pull request #4230 from Infisical/secret-dashboard-sing-env-col-resize
improvement(frontend): add col resize to secret dashboard env view
2025-07-24 20:08:18 -07:00
Daniel Hougaard
616ccb97f2 Merge pull request #4238 from Infisical/daniel/docs-fix
Update docs.json
2025-07-25 04:59:32 +04:00
Daniel Hougaard
7917a767e6 Update docs.json 2025-07-25 04:57:15 +04:00
carlosmonastyrski
ccff675e0d Merge pull request #4237 from Infisical/fix/remindersMigrationFix
Fix secret reminders migration job
2025-07-24 21:25:47 -03:00
Carlos Monastyrski
ad905b2ff7 Fix secret reminders migration job 2025-07-24 20:42:39 -03:00
Scott Wilson
4e960445a4 chore: remove unused tw css 2025-07-24 15:56:14 -07:00
Scott Wilson
7af5a4ad8d improvement: revise access restricted banner and refactor/update relevant locations 2025-07-24 15:52:29 -07:00
carlosmonastyrski
2ada753527 Merge pull request #4235 from Infisical/fix/renderRateLimit
Improve render retries and rate limits
2025-07-24 19:07:17 -03:00
Carlos Monastyrski
c031736701 Improve render api usage 2025-07-24 18:51:44 -03:00
Daniel Hougaard
91a1c34637 Merge pull request #4211 from Infisical/daniel/vault-import
feat(external-migrations): vault migrations
2025-07-25 01:16:50 +04:00
Carlos Monastyrski
eadb1a63fa Improve render retries and rate limits 2025-07-24 17:49:28 -03:00
Scott Wilson
f70a1e3db6 Merge pull request #4233 from Infisical/fix-identity-role-invalidation
fix(frontend): correct org identity mutation table invalidation
2025-07-24 12:17:03 -07:00
Scott Wilson
fc6ab94a06 fix: correct org identity mutation table invalidation 2025-07-24 12:08:41 -07:00
Scott Wilson
4feb3314e7 Merge pull request #4232 from Infisical/create-project-modal-dropdown
improvement(frontend): Adjust select dropdown styling in add project modal
2025-07-24 11:57:23 -07:00
Scott Wilson
d9a57d1391 fix: make side prop optional 2025-07-24 11:50:05 -07:00
Scott Wilson
2c99d41592 improvement: adjust select dropdown styling in add project modal 2025-07-24 11:42:04 -07:00
Scott Wilson
2535d1bc4b Merge pull request #4228 from Infisical/project-audit-logs-page
feature(project-audit-logs): add project audit logs pages
2025-07-24 10:49:02 -07:00
Scott Wilson
83e59ae160 feature: add col resize to secret dashboard env view 2025-07-24 10:18:57 -07:00
x032205
a8a1bc5f4a Merge pull request #4227 from Infisical/ENG-3345
feat(machine-identity): Add AWS attributes for ABAC
2025-07-24 11:59:17 -04:00
Daniel Hougaard
d2a4f265de Update ExternalMigrationsTab.tsx 2025-07-24 19:58:29 +04:00
x032205
3483f185a8 Doc tweaks 2025-07-24 11:44:10 -04:00
Scott Wilson
9bc24487b3 Merge pull request #4216 from Infisical/dashboard-filter-improvements
improvement(frontend): improve dashboard filter behavior and design
2025-07-24 08:33:24 -07:00
Daniel Hougaard
4af872e504 fix: ui state 2025-07-24 19:14:50 +04:00
Daniel Hougaard
716b88fa49 requested changes and docs 2025-07-24 19:09:24 +04:00
Carlos Monastyrski
b05ea8a69a Fix migration 2025-07-24 12:07:01 -03:00
Carlos Monastyrski
0d97bb4c8c Merge branch 'main' into feat/multipleApprovalEnvs 2025-07-24 12:03:07 -03:00
Maidul Islam
cb700c5124 Merge pull request #4183 from Infisical/fix/oracle-app-connection
fix: resolved oracle failing in app connection
2025-07-24 09:57:10 -04:00
=
8e829bdf85 fix: resolved oracle failing in app connection 2025-07-24 19:23:52 +05:30
Daniel Hougaard
716f061c01 Merge branch 'heads/main' into daniel/vault-import 2025-07-24 17:29:55 +04:00
Carlos Monastyrski
5af939992c Update docs 2025-07-24 10:04:25 -03:00
Carlos Monastyrski
aec4ee905e Add client secrets authentication on Azure CS app connection 2025-07-24 09:40:54 -03:00
Scott Wilson
dd008724fb fix type error 2025-07-23 18:26:01 -07:00
Scott Wilson
dd0c07fb95 improvements: remove fixed css 2025-07-23 18:18:59 -07:00
Scott Wilson
d935b28925 feature: add project audit logs 2025-07-23 16:48:54 -07:00
x032205
60620840f2 Tweaks 2025-07-23 16:48:06 -04:00
x032205
e798eb2a4e feat(machine-identity): Add AWS attributes for ABAC 2025-07-23 16:30:55 -04:00
Scott Wilson
e96e7b835d improvements: address feedback 2025-07-23 12:43:48 -07:00
Carlos Monastyrski
60657f0bc6 Addressed PR suggestions 2025-07-23 10:37:23 -03:00
Carlos Monastyrski
05408bc151 Allow multiple environments on secret and access policies 2025-07-23 09:54:41 -03:00
Scott Wilson
e76e0f7bcc improvement: improve dashboard filter behavior and design 2025-07-22 17:14:45 -07:00
Daniel Hougaard
464e32b0e9 Update VaultPlatformModal.tsx 2025-07-22 13:04:00 +04:00
Daniel Hougaard
bfd8b64871 requested changes 2025-07-22 02:15:21 +04:00
Daniel Hougaard
185cc4efba Update VaultPlatformModal copy.tsx 2025-07-22 01:50:28 +04:00
Daniel Hougaard
7150b9314d feat(external-migrations): vault migrations 2025-07-22 01:35:02 +04:00
178 changed files with 4594 additions and 1391 deletions

View File

@@ -123,8 +123,17 @@ INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
# azure app connection
INF_APP_CONNECTION_AZURE_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID=
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID=
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID=
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET=
# datadog
SHOULD_USE_DATADOG_TRACER=

View File

@@ -145,7 +145,11 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
&& cd openssl-3.1.2 \
&& ./Configure enable-fips \
&& make \
&& make install_fips
&& make install_fips \
&& cd / \
&& rm -rf /openssl-build \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
@@ -186,12 +190,11 @@ ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
ENV NODE_OPTIONS="--max-old-space-size=1024"
ENV NODE_OPTIONS="--max-old-space-size=8192 --force-fips"
# FIPS mode of operation:
ENV OPENSSL_CONF=/backend/nodejs.fips.cnf
ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
ENV NODE_OPTIONS=--force-fips
ENV FIPS_ENABLED=true

View File

@@ -59,7 +59,11 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
&& cd openssl-3.1.2 \
&& ./Configure enable-fips \
&& make \
&& make install_fips
&& make install_fips \
&& cd / \
&& rm -rf /openssl-build \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# ? App setup

View File

@@ -7,6 +7,7 @@
"": {
"name": "backend",
"version": "1.0.0",
"hasInstallScript": true,
"license": "ISC",
"dependencies": {
"@aws-sdk/client-elasticache": "^3.637.0",
@@ -61,7 +62,7 @@
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.6.7",
"axios": "^1.11.0",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"botbuilder": "^4.23.2",
@@ -13699,14 +13700,16 @@
}
},
"node_modules/@types/request/node_modules/form-data": {
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.2.tgz",
"integrity": "sha512-GgwY0PS7DbXqajuGf4OYlsrIu3zgxD6Vvql43IBhm6MahqA5SK/7mwhtNj2AdH2z35YR34ujJ7BN+3fFC3jP5Q==",
"version": "2.5.5",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
"integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.6",
"mime-types": "^2.1.12",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.35",
"safe-buffer": "^5.2.1"
},
"engines": {
@@ -15230,13 +15233,13 @@
}
},
"node_modules/axios": {
"version": "1.7.9",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz",
"integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"form-data": "^4.0.4",
"proxy-from-env": "^1.1.0"
}
},
@@ -18761,13 +18764,15 @@
}
},
"node_modules/form-data": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz",
"integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==",
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {

View File

@@ -181,7 +181,7 @@
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.6.7",
"axios": "^1.11.0",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"botbuilder": "^4.23.2",

View File

@@ -126,6 +126,15 @@ declare module "@fastify/request-context" {
namespace: string;
name: string;
};
aws?: {
accountId: string;
arn: string;
userId: string;
partition: string;
service: string;
resourceType: string;
resourceName: string;
};
};
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };

View File

@@ -489,6 +489,11 @@ import {
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import {
TAccessApprovalPoliciesEnvironments,
TAccessApprovalPoliciesEnvironmentsInsert,
TAccessApprovalPoliciesEnvironmentsUpdate
} from "@app/db/schemas/access-approval-policies-environments";
import {
TIdentityLdapAuths,
TIdentityLdapAuthsInsert,
@@ -510,6 +515,11 @@ import {
TRemindersRecipientsInsert,
TRemindersRecipientsUpdate
} from "@app/db/schemas/reminders-recipients";
import {
TSecretApprovalPoliciesEnvironments,
TSecretApprovalPoliciesEnvironmentsInsert,
TSecretApprovalPoliciesEnvironmentsUpdate
} from "@app/db/schemas/secret-approval-policies-environments";
import {
TSecretReminderRecipients,
TSecretReminderRecipientsInsert,
@@ -887,6 +897,12 @@ declare module "knex/types/tables" {
TAccessApprovalPoliciesBypassersUpdate
>;
[TableName.AccessApprovalPolicyEnvironment]: KnexOriginal.CompositeTableType<
TAccessApprovalPoliciesEnvironments,
TAccessApprovalPoliciesEnvironmentsInsert,
TAccessApprovalPoliciesEnvironmentsUpdate
>;
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
TAccessApprovalRequests,
TAccessApprovalRequestsInsert,
@@ -935,6 +951,11 @@ declare module "knex/types/tables" {
TSecretApprovalRequestSecretTagsInsert,
TSecretApprovalRequestSecretTagsUpdate
>;
[TableName.SecretApprovalPolicyEnvironment]: KnexOriginal.CompositeTableType<
TSecretApprovalPoliciesEnvironments,
TSecretApprovalPoliciesEnvironmentsInsert,
TSecretApprovalPoliciesEnvironmentsUpdate
>;
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
TSecretRotations,
TSecretRotationsInsert,

View File

@@ -0,0 +1,96 @@
import { Knex } from "knex";
import { selectAllTableCols } from "@app/lib/knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicyEnvironment))) {
await knex.schema.createTable(TableName.AccessApprovalPolicyEnvironment, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("policyId").notNullable();
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment);
t.timestamps(true, true, true);
t.unique(["policyId", "envId"]);
});
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicyEnvironment);
const existingAccessApprovalPolicies = await knex(TableName.AccessApprovalPolicy)
.select(selectAllTableCols(TableName.AccessApprovalPolicy))
.whereNotNull(`${TableName.AccessApprovalPolicy}.envId`);
const accessApprovalPolicies = existingAccessApprovalPolicies.map(async (policy) => {
await knex(TableName.AccessApprovalPolicyEnvironment).insert({
policyId: policy.id,
envId: policy.envId
});
});
await Promise.all(accessApprovalPolicies);
}
if (!(await knex.schema.hasTable(TableName.SecretApprovalPolicyEnvironment))) {
await knex.schema.createTable(TableName.SecretApprovalPolicyEnvironment, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("policyId").notNullable();
t.foreign("policyId").references("id").inTable(TableName.SecretApprovalPolicy).onDelete("CASCADE");
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment);
t.timestamps(true, true, true);
t.unique(["policyId", "envId"]);
});
await createOnUpdateTrigger(knex, TableName.SecretApprovalPolicyEnvironment);
const existingSecretApprovalPolicies = await knex(TableName.SecretApprovalPolicy)
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
.whereNotNull(`${TableName.SecretApprovalPolicy}.envId`);
const secretApprovalPolicies = existingSecretApprovalPolicies.map(async (policy) => {
await knex(TableName.SecretApprovalPolicyEnvironment).insert({
policyId: policy.id,
envId: policy.envId
});
});
await Promise.all(secretApprovalPolicies);
}
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
// Add the new foreign key constraint with ON DELETE SET NULL
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("SET NULL");
});
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
// Add the new foreign key constraint with ON DELETE SET NULL
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("SET NULL");
});
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyEnvironment)) {
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicyEnvironment);
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicyEnvironment);
}
if (await knex.schema.hasTable(TableName.SecretApprovalPolicyEnvironment)) {
await knex.schema.dropTableIfExists(TableName.SecretApprovalPolicyEnvironment);
await dropOnUpdateTrigger(knex, TableName.SecretApprovalPolicyEnvironment);
}
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
});
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
});
}

View File

@@ -0,0 +1,111 @@
/* eslint-disable no-await-in-loop */
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { TableName } from "../schemas";
import { TReminders, TRemindersInsert } from "../schemas/reminders";
export async function up(knex: Knex): Promise<void> {
logger.info("Initializing secret reminders migration");
const hasReminderTable = await knex.schema.hasTable(TableName.Reminder);
if (hasReminderTable) {
const secretsWithLatestVersions = await knex(TableName.SecretV2)
.whereNotNull(`${TableName.SecretV2}.reminderRepeatDays`)
.whereRaw(`"${TableName.SecretV2}"."reminderRepeatDays" > 0`)
.innerJoin(TableName.SecretVersionV2, (qb) => {
void qb
.on(`${TableName.SecretVersionV2}.secretId`, "=", `${TableName.SecretV2}.id`)
.andOn(`${TableName.SecretVersionV2}.reminderRepeatDays`, "=", `${TableName.SecretV2}.reminderRepeatDays`);
})
.whereIn([`${TableName.SecretVersionV2}.secretId`, `${TableName.SecretVersionV2}.version`], (qb) => {
void qb
.select(["v2.secretId", knex.raw("MIN(v2.version) as version")])
.from(`${TableName.SecretVersionV2} as v2`)
.innerJoin(`${TableName.SecretV2} as s2`, "v2.secretId", "s2.id")
.whereRaw(`v2."reminderRepeatDays" = s2."reminderRepeatDays"`)
.whereNotNull("v2.reminderRepeatDays")
.whereRaw(`v2."reminderRepeatDays" > 0`)
.groupBy("v2.secretId");
})
// Add LEFT JOIN with Reminder table to check for existing reminders
.leftJoin(TableName.Reminder, `${TableName.Reminder}.secretId`, `${TableName.SecretV2}.id`)
// Only include secrets that don't already have reminders
.whereNull(`${TableName.Reminder}.secretId`)
.select(
knex.ref("id").withSchema(TableName.SecretV2).as("secretId"),
knex.ref("reminderRepeatDays").withSchema(TableName.SecretV2).as("reminderRepeatDays"),
knex.ref("reminderNote").withSchema(TableName.SecretV2).as("reminderNote"),
knex.ref("createdAt").withSchema(TableName.SecretVersionV2).as("createdAt")
);
logger.info(`Found ${secretsWithLatestVersions.length} reminders to migrate`);
const reminderInserts: TRemindersInsert[] = [];
if (secretsWithLatestVersions.length > 0) {
secretsWithLatestVersions.forEach((secret) => {
if (!secret.reminderRepeatDays) return;
const now = new Date();
const createdAt = new Date(secret.createdAt);
let nextReminderDate = new Date(createdAt);
nextReminderDate.setDate(nextReminderDate.getDate() + secret.reminderRepeatDays);
// If the next reminder date is in the past, calculate the proper next occurrence
if (nextReminderDate < now) {
const daysSinceCreation = Math.floor((now.getTime() - createdAt.getTime()) / (1000 * 60 * 60 * 24));
const daysIntoCurrentCycle = daysSinceCreation % secret.reminderRepeatDays;
const daysUntilNextReminder = secret.reminderRepeatDays - daysIntoCurrentCycle;
nextReminderDate = new Date(now);
nextReminderDate.setDate(now.getDate() + daysUntilNextReminder);
}
reminderInserts.push({
secretId: secret.secretId,
message: secret.reminderNote,
repeatDays: secret.reminderRepeatDays,
nextReminderDate
});
});
const commitBatches = chunkArray(reminderInserts, 2000);
for (const commitBatch of commitBatches) {
const insertedReminders = (await knex
.batchInsert(TableName.Reminder, commitBatch)
.returning("*")) as TReminders[];
const insertedReminderSecretIds = insertedReminders.map((reminder) => reminder.secretId).filter(Boolean);
const recipients = await knex(TableName.SecretReminderRecipients)
.whereRaw(`??.?? IN (${insertedReminderSecretIds.map(() => "?").join(",")})`, [
TableName.SecretReminderRecipients,
"secretId",
...insertedReminderSecretIds
])
.select(
knex.ref("userId").withSchema(TableName.SecretReminderRecipients).as("userId"),
knex.ref("secretId").withSchema(TableName.SecretReminderRecipients).as("secretId")
);
const reminderRecipients = recipients.map((recipient) => ({
reminderId: insertedReminders.find((reminder) => reminder.secretId === recipient.secretId)?.id,
userId: recipient.userId
}));
const filteredRecipients = reminderRecipients.filter((recipient) => Boolean(recipient.reminderId));
await knex.batchInsert(TableName.ReminderRecipient, filteredRecipients);
}
logger.info(`Successfully migrated ${reminderInserts.length} secret reminders`);
}
logger.info("Secret reminders migration completed");
} else {
logger.warn("Reminder table does not exist, skipping migration");
}
}
export async function down(): Promise<void> {
logger.info("Rollback not implemented for secret reminders fix migration");
}

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Project, "secretDetectionIgnoreValues"))) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.specificType("secretDetectionIgnoreValues", "text[]");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Project, "secretDetectionIgnoreValues")) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("secretDetectionIgnoreValues");
});
}
}

View File

@@ -53,7 +53,7 @@ export const getMigrationEnvConfig = async (superAdminDAL: TSuperAdminDALFactory
let envCfg = Object.freeze(parsedEnv.data);
const fipsEnabled = await crypto.initialize(superAdminDAL);
const fipsEnabled = await crypto.initialize(superAdminDAL, envCfg);
// Fix for 128-bit entropy encryption key expansion issue:
// In FIPS it is not ideal to expand a 128-bit key into 256-bit. We solved this issue in the past by creating the ROOT_ENCRYPTION_KEY.

View File

@@ -0,0 +1,25 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const AccessApprovalPoliciesEnvironmentsSchema = z.object({
id: z.string().uuid(),
policyId: z.string().uuid(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAccessApprovalPoliciesEnvironments = z.infer<typeof AccessApprovalPoliciesEnvironmentsSchema>;
export type TAccessApprovalPoliciesEnvironmentsInsert = Omit<
z.input<typeof AccessApprovalPoliciesEnvironmentsSchema>,
TImmutableDBKeys
>;
export type TAccessApprovalPoliciesEnvironmentsUpdate = Partial<
Omit<z.input<typeof AccessApprovalPoliciesEnvironmentsSchema>, TImmutableDBKeys>
>;

View File

@@ -100,6 +100,7 @@ export enum TableName {
AccessApprovalPolicyBypasser = "access_approval_policies_bypassers",
AccessApprovalRequest = "access_approval_requests",
AccessApprovalRequestReviewer = "access_approval_requests_reviewers",
AccessApprovalPolicyEnvironment = "access_approval_policies_environments",
SecretApprovalPolicy = "secret_approval_policies",
SecretApprovalPolicyApprover = "secret_approval_policies_approvers",
SecretApprovalPolicyBypasser = "secret_approval_policies_bypassers",
@@ -107,6 +108,7 @@ export enum TableName {
SecretApprovalRequestReviewer = "secret_approval_requests_reviewers",
SecretApprovalRequestSecret = "secret_approval_requests_secrets",
SecretApprovalRequestSecretTag = "secret_approval_request_secret_tags",
SecretApprovalPolicyEnvironment = "secret_approval_policies_environments",
SecretRotation = "secret_rotations",
SecretRotationOutput = "secret_rotation_outputs",
SamlConfig = "saml_configs",

View File

@@ -30,7 +30,8 @@ export const ProjectsSchema = z.object({
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
secretSharing: z.boolean().default(true),
showSnapshotsLegacy: z.boolean().default(false),
defaultProduct: z.string().nullable().optional()
defaultProduct: z.string().nullable().optional(),
secretDetectionIgnoreValues: z.string().array().nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@@ -0,0 +1,25 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretApprovalPoliciesEnvironmentsSchema = z.object({
id: z.string().uuid(),
policyId: z.string().uuid(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretApprovalPoliciesEnvironments = z.infer<typeof SecretApprovalPoliciesEnvironmentsSchema>;
export type TSecretApprovalPoliciesEnvironmentsInsert = Omit<
z.input<typeof SecretApprovalPoliciesEnvironmentsSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalPoliciesEnvironmentsUpdate = Partial<
Omit<z.input<typeof SecretApprovalPoliciesEnvironmentsSchema>, TImmutableDBKeys>
>;

View File

@@ -17,52 +17,66 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
rateLimit: writeLimit
},
schema: {
body: z.object({
projectSlug: z.string().trim(),
name: z.string().optional(),
secretPath: z.string().trim().min(1, { message: "Secret path cannot be empty" }).transform(removeTrailingSlash),
environment: z.string(),
approvers: z
.discriminatedUnion("type", [
z.object({
type: z.literal(ApproverType.Group),
id: z.string(),
sequence: z.number().int().default(1)
}),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional(),
sequence: z.number().int().default(1)
body: z
.object({
projectSlug: z.string().trim(),
name: z.string().optional(),
secretPath: z
.string()
.trim()
.min(1, { message: "Secret path cannot be empty" })
.transform(removeTrailingSlash),
environment: z.string().optional(),
environments: z.string().array().optional(),
approvers: z
.discriminatedUnion("type", [
z.object({
type: z.literal(ApproverType.Group),
id: z.string(),
sequence: z.number().int().default(1)
}),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional(),
sequence: z.number().int().default(1)
})
])
.array()
.max(100, "Cannot have more than 100 approvers")
.min(1, { message: "At least one approver should be provided" })
.refine(
// @ts-expect-error this is ok
(el) => el.every((i) => Boolean(i?.id) || Boolean(i?.username)),
"Must provide either username or id"
),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({
type: z.literal(BypasserType.User),
id: z.string().optional(),
username: z.string().optional()
})
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),
stepNumber: z.number().int()
})
])
.array()
.max(100, "Cannot have more than 100 approvers")
.min(1, { message: "At least one approver should be provided" })
.refine(
// @ts-expect-error this is ok
(el) => el.every((i) => Boolean(i?.id) || Boolean(i?.username)),
"Must provide either username or id"
),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),
stepNumber: z.number().int()
})
.array()
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
}),
.array()
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
})
.refine(
(val) => Boolean(val.environment) || Boolean(val.environments),
"Must provide either environment or environments"
),
response: {
200: z.object({
approval: sapPubSchema
@@ -78,7 +92,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
actorOrgId: req.permission.orgId,
...req.body,
projectSlug: req.body.projectSlug,
name: req.body.name ?? `${req.body.environment}-${nanoid(3)}`,
name:
req.body.name ?? `${req.body.environment || req.body.environments?.join("-").substring(0, 250)}-${nanoid(3)}`,
enforcementLevel: req.body.enforcementLevel
});
return { approval };
@@ -211,6 +226,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
approvals: z.number().min(1).optional(),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true),
environments: z.array(z.string()).optional(),
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),

View File

@@ -17,34 +17,45 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
rateLimit: writeLimit
},
schema: {
body: z.object({
workspaceId: z.string(),
name: z.string().optional(),
environment: z.string(),
secretPath: z
.string()
.min(1, { message: "Secret path cannot be empty" })
.transform((val) => removeTrailingSlash(val)),
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.min(1, { message: "At least one approver should be provided" })
.max(100, "Cannot have more than 100 approvers"),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
}),
body: z
.object({
workspaceId: z.string(),
name: z.string().optional(),
environment: z.string().optional(),
environments: z.string().array().optional(),
secretPath: z
.string()
.min(1, { message: "Secret path cannot be empty" })
.transform((val) => removeTrailingSlash(val)),
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional()
})
])
.array()
.min(1, { message: "At least one approver should be provided" })
.max(100, "Cannot have more than 100 approvers"),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({
type: z.literal(BypasserType.User),
id: z.string().optional(),
username: z.string().optional()
})
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
})
.refine((data) => data.environment || data.environments, "At least one environment should be provided"),
response: {
200: z.object({
approval: sapPubSchema
@@ -60,7 +71,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
actorOrgId: req.permission.orgId,
projectId: req.body.workspaceId,
...req.body,
name: req.body.name ?? `${req.body.environment}-${nanoid(3)}`,
name: req.body.name ?? `${req.body.environment || req.body.environments?.join(",")}-${nanoid(3)}`,
enforcementLevel: req.body.enforcementLevel
});
return { approval };
@@ -103,7 +114,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.optional()
.transform((val) => (val ? removeTrailingSlash(val) : undefined)),
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
allowedSelfApprovals: z.boolean().default(true)
allowedSelfApprovals: z.boolean().default(true),
environments: z.array(z.string()).optional()
}),
response: {
200: z.object({

View File

@@ -26,6 +26,7 @@ export interface TAccessApprovalPolicyDALFactory
>,
customFilter?: {
policyId?: string;
envId?: string;
},
tx?: Knex
) => Promise<
@@ -55,11 +56,6 @@ export interface TAccessApprovalPolicyDALFactory
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
@@ -72,6 +68,11 @@ export interface TAccessApprovalPolicyDALFactory
type: BypasserType.Group;
}
)[];
environments: {
id: string;
name: string;
slug: string;
}[];
}[]
>;
findById: (
@@ -95,11 +96,11 @@ export interface TAccessApprovalPolicyDALFactory
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
environments: {
id: string;
name: string;
slug: string;
};
}[];
projectId: string;
}
| undefined
@@ -143,6 +144,26 @@ export interface TAccessApprovalPolicyDALFactory
}
| undefined
>;
findPolicyByEnvIdAndSecretPath: (
{ envIds, secretPath }: { envIds: string[]; secretPath: string },
tx?: Knex
) => Promise<{
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
}>;
}
export interface TAccessApprovalPolicyServiceFactory {
@@ -367,6 +388,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>,
customFilter?: {
policyId?: string;
envId?: string;
}
) => {
const result = await tx(TableName.AccessApprovalPolicy)
@@ -377,7 +399,17 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
void qb.where(`${TableName.AccessApprovalPolicy}.id`, "=", customFilter.policyId);
}
})
.join(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.join(
TableName.AccessApprovalPolicyEnvironment,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`
)
.join(TableName.Environment, `${TableName.AccessApprovalPolicyEnvironment}.envId`, `${TableName.Environment}.id`)
.where((qb) => {
if (customFilter?.envId) {
void qb.where(`${TableName.AccessApprovalPolicyEnvironment}.envId`, "=", customFilter.envId);
}
})
.leftJoin(
TableName.AccessApprovalPolicyApprover,
`${TableName.AccessApprovalPolicy}.id`,
@@ -404,7 +436,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
.select(tx.ref("bypasserGroupId").withSchema(TableName.AccessApprovalPolicyBypasser))
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
.select(tx.ref("id").withSchema(TableName.Environment).as("environmentId"))
.select(tx.ref("projectId").withSchema(TableName.Environment))
.select(selectAllTableCols(TableName.AccessApprovalPolicy));
@@ -448,6 +480,15 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
sequence: approverSequence,
approvalsRequired
})
},
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId: id, envName, envSlug }) => ({
id,
name: envName,
slug: envSlug
})
}
]
});
@@ -470,11 +511,6 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
data: docs,
key: "id",
parentMapper: (data) => ({
environment: {
id: data.envId,
name: data.envName,
slug: data.envSlug
},
projectId: data.projectId,
...AccessApprovalPoliciesSchema.parse(data)
// secretPath: data.secretPath || undefined,
@@ -517,6 +553,15 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
id,
type: BypasserType.Group as const
})
},
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId: id, envName, envSlug }) => ({
id,
name: envName,
slug: envSlug
})
}
]
});
@@ -545,14 +590,20 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
envId,
secretPath
},
TableName.AccessApprovalPolicy
)
)
.join(
TableName.AccessApprovalPolicyEnvironment,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`,
`${TableName.AccessApprovalPolicy}.id`
)
.where(`${TableName.AccessApprovalPolicyEnvironment}.envId`, "=", envId)
.orderBy("deletedAt", "desc")
.orderByRaw(`"deletedAt" IS NULL`)
.select(selectAllTableCols(TableName.AccessApprovalPolicy))
.first();
return result;
@@ -561,5 +612,81 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
}
};
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById, findLastValidPolicy };
const findPolicyByEnvIdAndSecretPath: TAccessApprovalPolicyDALFactory["findPolicyByEnvIdAndSecretPath"] = async (
{ envIds, secretPath },
tx
) => {
try {
const docs = await (tx || db.replicaNode())(TableName.AccessApprovalPolicy)
.join(
TableName.AccessApprovalPolicyEnvironment,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`,
`${TableName.AccessApprovalPolicy}.id`
)
.join(
TableName.Environment,
`${TableName.AccessApprovalPolicyEnvironment}.envId`,
`${TableName.Environment}.id`
)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
$in: {
envId: envIds
}
},
TableName.AccessApprovalPolicyEnvironment
)
)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
secretPath
},
TableName.AccessApprovalPolicy
)
)
.whereNull(`${TableName.AccessApprovalPolicy}.deletedAt`)
.orderBy("deletedAt", "desc")
.orderByRaw(`"deletedAt" IS NULL`)
.select(selectAllTableCols(TableName.AccessApprovalPolicy))
.select(db.ref("name").withSchema(TableName.Environment).as("envName"))
.select(db.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(db.ref("id").withSchema(TableName.Environment).as("environmentId"))
.select(db.ref("projectId").withSchema(TableName.Environment));
const formattedDocs = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (data) => ({
projectId: data.projectId,
...AccessApprovalPoliciesSchema.parse(data)
}),
childrenMapper: [
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId: id, envName, envSlug }) => ({
id,
name: envName,
slug: envSlug
})
}
]
});
return formattedDocs?.[0];
} catch (error) {
throw new DatabaseError({ error, name: "findPolicyByEnvIdAndSecretPath" });
}
};
return {
...accessApprovalPolicyOrm,
find,
findById,
softDeleteById,
findLastValidPolicy,
findPolicyByEnvIdAndSecretPath
};
};

View File

@@ -0,0 +1,32 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
export type TAccessApprovalPolicyEnvironmentDALFactory = ReturnType<typeof accessApprovalPolicyEnvironmentDALFactory>;
export const accessApprovalPolicyEnvironmentDALFactory = (db: TDbClient) => {
const accessApprovalPolicyEnvironmentOrm = ormify(db, TableName.AccessApprovalPolicyEnvironment);
const findAvailablePoliciesByEnvId = async (envId: string, tx?: Knex) => {
try {
const docs = await (tx || db.replicaNode())(TableName.AccessApprovalPolicyEnvironment)
.join(
TableName.AccessApprovalPolicy,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`,
`${TableName.AccessApprovalPolicy}.id`
)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ envId }, TableName.AccessApprovalPolicyEnvironment))
.whereNull(`${TableName.AccessApprovalPolicy}.deletedAt`)
.select(selectAllTableCols(TableName.AccessApprovalPolicyEnvironment));
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "findAvailablePoliciesByEnvId" });
}
};
return { ...accessApprovalPolicyEnvironmentOrm, findAvailablePoliciesByEnvId };
};

View File

@@ -21,6 +21,7 @@ import {
TAccessApprovalPolicyBypasserDALFactory
} from "./access-approval-policy-approver-dal";
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
import { TAccessApprovalPolicyEnvironmentDALFactory } from "./access-approval-policy-environment-dal";
import {
ApproverType,
BypasserType,
@@ -45,12 +46,14 @@ type TAccessApprovalPolicyServiceFactoryDep = {
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update" | "delete">;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find">;
accessApprovalPolicyEnvironmentDAL: TAccessApprovalPolicyEnvironmentDALFactory;
};
export const accessApprovalPolicyServiceFactory = ({
accessApprovalPolicyDAL,
accessApprovalPolicyApproverDAL,
accessApprovalPolicyBypasserDAL,
accessApprovalPolicyEnvironmentDAL,
groupDAL,
permissionService,
projectEnvDAL,
@@ -63,21 +66,22 @@ export const accessApprovalPolicyServiceFactory = ({
}: TAccessApprovalPolicyServiceFactoryDep): TAccessApprovalPolicyServiceFactory => {
const $policyExists = async ({
envId,
envIds,
secretPath,
policyId
}: {
envId: string;
envId?: string;
envIds?: string[];
secretPath: string;
policyId?: string;
}) => {
const policy = await accessApprovalPolicyDAL
.findOne({
envId,
secretPath,
deletedAt: null
})
.catch(() => null);
if (!envId && !envIds) {
throw new BadRequestError({ message: "Must provide either envId or envIds" });
}
const policy = await accessApprovalPolicyDAL.findPolicyByEnvIdAndSecretPath({
secretPath,
envIds: envId ? [envId] : (envIds as string[])
});
return policyId ? policy && policy.id !== policyId : Boolean(policy);
};
@@ -93,6 +97,7 @@ export const accessApprovalPolicyServiceFactory = ({
bypassers,
projectSlug,
environment,
environments,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
@@ -125,13 +130,23 @@ export const accessApprovalPolicyServiceFactory = ({
ProjectPermissionActions.Create,
ProjectPermissionSub.SecretApproval
);
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
if (!env) throw new NotFoundError({ message: `Environment with slug '${environment}' not found` });
const mergedEnvs = (environment ? [environment] : environments) || [];
if (mergedEnvs.length === 0) {
throw new BadRequestError({ message: "Must provide either environment or environments" });
}
const envs = await projectEnvDAL.find({ $in: { slug: mergedEnvs }, projectId: project.id });
if (!envs.length || envs.length !== mergedEnvs.length) {
const notFoundEnvs = mergedEnvs.filter((env) => !envs.find((el) => el.slug === env));
throw new NotFoundError({ message: `One or more environments not found: ${notFoundEnvs.join(", ")}` });
}
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${environment}'`
});
for (const env of envs) {
// eslint-disable-next-line no-await-in-loop
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${env.slug}'`
});
}
}
let approverUserIds = userApprovers;
@@ -199,7 +214,7 @@ export const accessApprovalPolicyServiceFactory = ({
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
const doc = await accessApprovalPolicyDAL.create(
{
envId: env.id,
envId: envs[0].id,
approvals,
secretPath,
name,
@@ -208,6 +223,10 @@ export const accessApprovalPolicyServiceFactory = ({
},
tx
);
await accessApprovalPolicyEnvironmentDAL.insertMany(
envs.map((el) => ({ policyId: doc.id, envId: el.id })),
tx
);
if (approverUserIds.length) {
await accessApprovalPolicyApproverDAL.insertMany(
@@ -260,7 +279,7 @@ export const accessApprovalPolicyServiceFactory = ({
return doc;
});
return { ...accessApproval, environment: env, projectId: project.id };
return { ...accessApproval, environments: envs, projectId: project.id, environment: envs[0] };
};
const getAccessApprovalPolicyByProjectSlug: TAccessApprovalPolicyServiceFactory["getAccessApprovalPolicyByProjectSlug"] =
@@ -279,7 +298,10 @@ export const accessApprovalPolicyServiceFactory = ({
});
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
return accessApprovalPolicies;
return accessApprovalPolicies.map((policy) => ({
...policy,
environment: policy.environments[0]
}));
};
const updateAccessApprovalPolicy: TAccessApprovalPolicyServiceFactory["updateAccessApprovalPolicy"] = async ({
@@ -295,7 +317,8 @@ export const accessApprovalPolicyServiceFactory = ({
approvals,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
approvalsRequired,
environments
}: TUpdateAccessApprovalPolicy) => {
const groupApprovers = approvers.filter((approver) => approver.type === ApproverType.Group);
@@ -323,16 +346,27 @@ export const accessApprovalPolicyServiceFactory = ({
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
}
let envs = accessApprovalPolicy.environments;
if (
await $policyExists({
envId: accessApprovalPolicy.envId,
secretPath: secretPath || accessApprovalPolicy.secretPath,
policyId: accessApprovalPolicy.id
})
environments &&
(environments.length !== envs.length || environments.some((env) => !envs.find((el) => el.slug === env)))
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${accessApprovalPolicy.environment.slug}'`
});
envs = await projectEnvDAL.find({ $in: { slug: environments }, projectId: accessApprovalPolicy.projectId });
}
for (const env of envs) {
if (
// eslint-disable-next-line no-await-in-loop
await $policyExists({
envId: env.id,
secretPath: secretPath || accessApprovalPolicy.secretPath,
policyId: accessApprovalPolicy.id
})
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath || accessApprovalPolicy.secretPath}' already exists in environment '${env.slug}'`
});
}
}
const { permission } = await permissionService.getProjectPermission({
@@ -488,6 +522,14 @@ export const accessApprovalPolicyServiceFactory = ({
);
}
if (environments) {
await accessApprovalPolicyEnvironmentDAL.delete({ policyId: doc.id }, tx);
await accessApprovalPolicyEnvironmentDAL.insertMany(
envs.map((env) => ({ policyId: doc.id, envId: env.id })),
tx
);
}
await accessApprovalPolicyBypasserDAL.delete({ policyId: doc.id }, tx);
if (bypasserUserIds.length) {
@@ -517,7 +559,8 @@ export const accessApprovalPolicyServiceFactory = ({
return {
...updatedPolicy,
environment: accessApprovalPolicy.environment,
environments: accessApprovalPolicy.environments,
environment: accessApprovalPolicy.environments[0],
projectId: accessApprovalPolicy.projectId
};
};
@@ -568,7 +611,10 @@ export const accessApprovalPolicyServiceFactory = ({
}
});
return policy;
return {
...policy,
environment: policy.environments[0]
};
};
const getAccessPolicyCountByEnvSlug: TAccessApprovalPolicyServiceFactory["getAccessPolicyCountByEnvSlug"] = async ({
@@ -598,11 +644,13 @@ export const accessApprovalPolicyServiceFactory = ({
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
const policies = await accessApprovalPolicyDAL.find({
envId: environment.id,
projectId: project.id,
deletedAt: null
});
const policies = await accessApprovalPolicyDAL.find(
{
projectId: project.id,
deletedAt: null
},
{ envId: environment.id }
);
if (!policies) throw new NotFoundError({ message: `No policies found in environment with slug '${envSlug}'` });
return { count: policies.length };
@@ -634,7 +682,10 @@ export const accessApprovalPolicyServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
return policy;
return {
...policy,
environment: policy.environments[0]
};
};
return {

View File

@@ -26,7 +26,8 @@ export enum BypasserType {
export type TCreateAccessApprovalPolicy = {
approvals: number;
secretPath: string;
environment: string;
environment?: string;
environments?: string[];
approvers: (
| { type: ApproverType.Group; id: string; sequence?: number }
| { type: ApproverType.User; id?: string; username?: string; sequence?: number }
@@ -58,6 +59,7 @@ export type TUpdateAccessApprovalPolicy = {
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals: boolean;
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
environments?: string[];
} & Omit<TProjectPermission, "projectId">;
export type TDeleteAccessApprovalPolicy = {
@@ -113,6 +115,15 @@ export interface TAccessApprovalPolicyServiceFactory {
slug: string;
position: number;
};
environments: {
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
projectId: string;
slug: string;
position: number;
}[];
projectId: string;
name: string;
id: string;
@@ -153,6 +164,11 @@ export interface TAccessApprovalPolicyServiceFactory {
name: string;
slug: string;
};
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
}>;
updateAccessApprovalPolicy: ({
@@ -168,13 +184,19 @@ export interface TAccessApprovalPolicyServiceFactory {
approvals,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
approvalsRequired,
environments
}: TUpdateAccessApprovalPolicy) => Promise<{
environment: {
id: string;
name: string;
slug: string;
};
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
name: string;
id: string;
@@ -225,6 +247,11 @@ export interface TAccessApprovalPolicyServiceFactory {
name: string;
slug: string;
};
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
bypassers: (
| {
@@ -276,6 +303,11 @@ export interface TAccessApprovalPolicyServiceFactory {
name: string;
slug: string;
};
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
bypassers: (
| {

View File

@@ -65,7 +65,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
deletedAt: Date | null | undefined;
};
projectId: string;
environment: string;
environments: string[];
requestedByUser: {
userId: string;
email: string | null | undefined;
@@ -515,7 +515,17 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
`accessApprovalReviewerUser.id`
)
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.leftJoin(
TableName.AccessApprovalPolicyEnvironment,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`
)
.leftJoin(
TableName.Environment,
`${TableName.AccessApprovalPolicyEnvironment}.envId`,
`${TableName.Environment}.id`
)
.select(selectAllTableCols(TableName.AccessApprovalRequest))
.select(
tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover),
@@ -683,6 +693,11 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
lastName,
username
})
},
{
key: "environment",
label: "environments" as const,
mapper: ({ environment }) => environment
}
]
});

View File

@@ -86,6 +86,25 @@ export const accessApprovalRequestServiceFactory = ({
projectMicrosoftTeamsConfigDAL,
projectSlackConfigDAL
}: TSecretApprovalRequestServiceFactoryDep): TAccessApprovalRequestServiceFactory => {
const $getEnvironmentFromPermissions = (permissions: unknown): string | null => {
if (!Array.isArray(permissions) || permissions.length === 0) {
return null;
}
const firstPermission = permissions[0] as unknown[];
if (!Array.isArray(firstPermission) || firstPermission.length < 3) {
return null;
}
const metadata = firstPermission[2] as Record<string, unknown>;
if (typeof metadata === "object" && metadata !== null && "environment" in metadata) {
const env = metadata.environment;
return typeof env === "string" ? env : null;
}
return null;
};
const createAccessApprovalRequest: TAccessApprovalRequestServiceFactory["createAccessApprovalRequest"] = async ({
isTemporary,
temporaryRange,
@@ -308,6 +327,15 @@ export const accessApprovalRequestServiceFactory = ({
requests = requests.filter((request) => request.environment === envSlug);
}
requests = requests.map((request) => {
const permissionEnvironment = $getEnvironmentFromPermissions(request.permissions);
if (permissionEnvironment) {
request.environmentName = permissionEnvironment;
}
return request;
});
return { requests };
};
@@ -325,13 +353,27 @@ export const accessApprovalRequestServiceFactory = ({
throw new NotFoundError({ message: `Secret approval request with ID '${requestId}' not found` });
}
const { policy, environment } = accessApprovalRequest;
const { policy, environments, permissions } = accessApprovalRequest;
if (policy.deletedAt) {
throw new BadRequestError({
message: "The policy associated with this access request has been deleted."
});
}
const permissionEnvironment = $getEnvironmentFromPermissions(permissions);
if (
!permissionEnvironment ||
(!environments.includes(permissionEnvironment) && status === ApprovalStatus.APPROVED)
) {
throw new BadRequestError({
message: `The original policy ${policy.name} is not attached to environment '${permissionEnvironment}'.`
});
}
const environment = await projectEnvDAL.findOne({
projectId: accessApprovalRequest.projectId,
slug: permissionEnvironment
});
const { membership, hasRole } = await permissionService.getProjectPermission({
actor,
actorId,
@@ -553,7 +595,7 @@ export const accessApprovalRequestServiceFactory = ({
requesterEmail: actingUser.email,
bypassReason: bypassReason || "No reason provided",
secretPath: policy.secretPath || "/",
environment,
environment: environment?.name || permissionEnvironment,
approvalUrl: `${cfg.SITE_URL}/projects/secret-management/${project.id}/approval`,
requestType: "access"
},

View File

@@ -23,6 +23,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>,
customFilter?: {
sapId?: string;
envId?: string;
}
) =>
tx(TableName.SecretApprovalPolicy)
@@ -33,7 +34,17 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
void qb.where(`${TableName.SecretApprovalPolicy}.id`, "=", customFilter.sapId);
}
})
.join(TableName.Environment, `${TableName.SecretApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.join(
TableName.SecretApprovalPolicyEnvironment,
`${TableName.SecretApprovalPolicyEnvironment}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.join(TableName.Environment, `${TableName.SecretApprovalPolicyEnvironment}.envId`, `${TableName.Environment}.id`)
.where((qb) => {
if (customFilter?.envId) {
void qb.where(`${TableName.SecretApprovalPolicyEnvironment}.envId`, "=", customFilter.envId);
}
})
.leftJoin(
TableName.SecretApprovalPolicyApprover,
`${TableName.SecretApprovalPolicy}.id`,
@@ -97,7 +108,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
.select(
tx.ref("name").withSchema(TableName.Environment).as("envName"),
tx.ref("slug").withSchema(TableName.Environment).as("envSlug"),
tx.ref("id").withSchema(TableName.Environment).as("envId"),
tx.ref("id").withSchema(TableName.Environment).as("environmentId"),
tx.ref("projectId").withSchema(TableName.Environment)
)
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
@@ -146,6 +157,15 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
firstName,
lastName
})
},
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId, envName, envSlug }) => ({
id: environmentId,
name: envName,
slug: envSlug
})
}
]
});
@@ -160,6 +180,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>,
customFilter?: {
sapId?: string;
envId?: string;
},
tx?: Knex
) => {
@@ -221,6 +242,15 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
mapper: ({ approverGroupUserId: userId }) => ({
userId
})
},
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId, envName, envSlug }) => ({
id: environmentId,
name: envName,
slug: envSlug
})
}
]
});
@@ -235,5 +265,74 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
return softDeletedPolicy;
};
return { ...secretApprovalPolicyOrm, findById, find, softDeleteById };
const findPolicyByEnvIdAndSecretPath = async (
{ envIds, secretPath }: { envIds: string[]; secretPath: string },
tx?: Knex
) => {
try {
const docs = await (tx || db.replicaNode())(TableName.SecretApprovalPolicy)
.join(
TableName.SecretApprovalPolicyEnvironment,
`${TableName.SecretApprovalPolicyEnvironment}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.join(
TableName.Environment,
`${TableName.SecretApprovalPolicyEnvironment}.envId`,
`${TableName.Environment}.id`
)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
$in: {
envId: envIds
}
},
TableName.SecretApprovalPolicyEnvironment
)
)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
secretPath
},
TableName.SecretApprovalPolicy
)
)
.whereNull(`${TableName.SecretApprovalPolicy}.deletedAt`)
.orderBy("deletedAt", "desc")
.orderByRaw(`"deletedAt" IS NULL`)
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
.select(db.ref("name").withSchema(TableName.Environment).as("envName"))
.select(db.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(db.ref("id").withSchema(TableName.Environment).as("environmentId"))
.select(db.ref("projectId").withSchema(TableName.Environment));
const formattedDocs = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (data) => ({
projectId: data.projectId,
...SecretApprovalPoliciesSchema.parse(data)
}),
childrenMapper: [
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId: id, envName, envSlug }) => ({
id,
name: envName,
slug: envSlug
})
}
]
});
return formattedDocs?.[0];
} catch (error) {
throw new DatabaseError({ error, name: "findPolicyByEnvIdAndSecretPath" });
}
};
return { ...secretApprovalPolicyOrm, findById, find, softDeleteById, findPolicyByEnvIdAndSecretPath };
};

View File

@@ -0,0 +1,32 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
export type TSecretApprovalPolicyEnvironmentDALFactory = ReturnType<typeof secretApprovalPolicyEnvironmentDALFactory>;
export const secretApprovalPolicyEnvironmentDALFactory = (db: TDbClient) => {
const secretApprovalPolicyEnvironmentOrm = ormify(db, TableName.SecretApprovalPolicyEnvironment);
const findAvailablePoliciesByEnvId = async (envId: string, tx?: Knex) => {
try {
const docs = await (tx || db.replicaNode())(TableName.SecretApprovalPolicyEnvironment)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicyEnvironment}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ envId }, TableName.SecretApprovalPolicyEnvironment))
.whereNull(`${TableName.SecretApprovalPolicy}.deletedAt`)
.select(selectAllTableCols(TableName.SecretApprovalPolicyEnvironment));
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "findAvailablePoliciesByEnvId" });
}
};
return { ...secretApprovalPolicyEnvironmentOrm, findAvailablePoliciesByEnvId };
};

View File

@@ -19,6 +19,7 @@ import {
TSecretApprovalPolicyBypasserDALFactory
} from "./secret-approval-policy-approver-dal";
import { TSecretApprovalPolicyDALFactory } from "./secret-approval-policy-dal";
import { TSecretApprovalPolicyEnvironmentDALFactory } from "./secret-approval-policy-environment-dal";
import {
TCreateSapDTO,
TDeleteSapDTO,
@@ -36,12 +37,13 @@ const getPolicyScore = (policy: { secretPath?: string | null }) =>
type TSecretApprovalPolicyServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
secretApprovalPolicyDAL: TSecretApprovalPolicyDALFactory;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne" | "find">;
userDAL: Pick<TUserDALFactory, "find">;
secretApprovalPolicyApproverDAL: TSecretApprovalPolicyApproverDALFactory;
secretApprovalPolicyBypasserDAL: TSecretApprovalPolicyBypasserDALFactory;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "update">;
secretApprovalPolicyEnvironmentDAL: TSecretApprovalPolicyEnvironmentDALFactory;
};
export type TSecretApprovalPolicyServiceFactory = ReturnType<typeof secretApprovalPolicyServiceFactory>;
@@ -51,27 +53,30 @@ export const secretApprovalPolicyServiceFactory = ({
permissionService,
secretApprovalPolicyApproverDAL,
secretApprovalPolicyBypasserDAL,
secretApprovalPolicyEnvironmentDAL,
projectEnvDAL,
userDAL,
licenseService,
secretApprovalRequestDAL
}: TSecretApprovalPolicyServiceFactoryDep) => {
const $policyExists = async ({
envIds,
envId,
secretPath,
policyId
}: {
envId: string;
envIds?: string[];
envId?: string;
secretPath: string;
policyId?: string;
}) => {
const policy = await secretApprovalPolicyDAL
.findOne({
envId,
secretPath,
deletedAt: null
})
.catch(() => null);
if (!envIds && !envId) {
throw new BadRequestError({ message: "At least one environment should be provided" });
}
const policy = await secretApprovalPolicyDAL.findPolicyByEnvIdAndSecretPath({
envIds: envId ? [envId] : envIds || [],
secretPath
});
return policyId ? policy && policy.id !== policyId : Boolean(policy);
};
@@ -88,6 +93,7 @@ export const secretApprovalPolicyServiceFactory = ({
projectId,
secretPath,
environment,
environments,
enforcementLevel,
allowedSelfApprovals
}: TCreateSapDTO) => {
@@ -127,17 +133,23 @@ export const secretApprovalPolicyServiceFactory = ({
});
}
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
if (!env) {
throw new NotFoundError({
message: `Environment with slug '${environment}' not found in project with ID ${projectId}`
});
const mergedEnvs = (environment ? [environment] : environments) || [];
if (mergedEnvs.length === 0) {
throw new BadRequestError({ message: "Must provide either environment or environments" });
}
const envs = await projectEnvDAL.find({ $in: { slug: mergedEnvs }, projectId });
if (!envs.length || envs.length !== mergedEnvs.length) {
const notFoundEnvs = mergedEnvs.filter((env) => !envs.find((el) => el.slug === env));
throw new NotFoundError({ message: `One or more environments not found: ${notFoundEnvs.join(", ")}` });
}
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${environment}'`
});
for (const env of envs) {
// eslint-disable-next-line no-await-in-loop
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${env.slug}'`
});
}
}
let groupBypassers: string[] = [];
@@ -181,7 +193,7 @@ export const secretApprovalPolicyServiceFactory = ({
const secretApproval = await secretApprovalPolicyDAL.transaction(async (tx) => {
const doc = await secretApprovalPolicyDAL.create(
{
envId: env.id,
envId: envs[0].id,
approvals,
secretPath,
name,
@@ -190,6 +202,13 @@ export const secretApprovalPolicyServiceFactory = ({
},
tx
);
await secretApprovalPolicyEnvironmentDAL.insertMany(
envs.map((env) => ({
envId: env.id,
policyId: doc.id
})),
tx
);
let userApproverIds = userApprovers;
if (userApproverNames.length) {
@@ -253,12 +272,13 @@ export const secretApprovalPolicyServiceFactory = ({
return doc;
});
return { ...secretApproval, environment: env, projectId };
return { ...secretApproval, environments: envs, projectId, environment: envs[0] };
};
const updateSecretApprovalPolicy = async ({
approvers,
bypassers,
environments,
secretPath,
name,
actorId,
@@ -288,17 +308,26 @@ export const secretApprovalPolicyServiceFactory = ({
message: `Secret approval policy with ID '${secretPolicyId}' not found`
});
}
let envs = secretApprovalPolicy.environments;
if (
await $policyExists({
envId: secretApprovalPolicy.envId,
secretPath: secretPath || secretApprovalPolicy.secretPath,
policyId: secretApprovalPolicy.id
})
environments &&
(environments.length !== envs.length || environments.some((env) => !envs.find((el) => el.slug === env)))
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${secretApprovalPolicy.environment.slug}'`
});
envs = await projectEnvDAL.find({ $in: { slug: environments }, projectId: secretApprovalPolicy.projectId });
}
for (const env of envs) {
if (
// eslint-disable-next-line no-await-in-loop
await $policyExists({
envId: env.id,
secretPath: secretPath || secretApprovalPolicy.secretPath,
policyId: secretApprovalPolicy.id
})
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath || secretApprovalPolicy.secretPath}' already exists in environment '${env.slug}'`
});
}
}
const { permission } = await permissionService.getProjectPermission({
@@ -415,6 +444,17 @@ export const secretApprovalPolicyServiceFactory = ({
);
}
if (environments) {
await secretApprovalPolicyEnvironmentDAL.delete({ policyId: doc.id }, tx);
await secretApprovalPolicyEnvironmentDAL.insertMany(
envs.map((env) => ({
envId: env.id,
policyId: doc.id
})),
tx
);
}
await secretApprovalPolicyBypasserDAL.delete({ policyId: doc.id }, tx);
if (bypasserUserIds.length) {
@@ -441,7 +481,8 @@ export const secretApprovalPolicyServiceFactory = ({
});
return {
...updatedSap,
environment: secretApprovalPolicy.environment,
environments: secretApprovalPolicy.environments,
environment: secretApprovalPolicy.environments[0],
projectId: secretApprovalPolicy.projectId
};
};
@@ -487,7 +528,12 @@ export const secretApprovalPolicyServiceFactory = ({
const updatedPolicy = await secretApprovalPolicyDAL.softDeleteById(secretPolicyId, tx);
return updatedPolicy;
});
return { ...deletedPolicy, projectId: sapPolicy.projectId, environment: sapPolicy.environment };
return {
...deletedPolicy,
projectId: sapPolicy.projectId,
environments: sapPolicy.environments,
environment: sapPolicy.environments[0]
};
};
const getSecretApprovalPolicyByProjectId = async ({
@@ -520,7 +566,7 @@ export const secretApprovalPolicyServiceFactory = ({
});
}
const policies = await secretApprovalPolicyDAL.find({ envId: env.id, deletedAt: null });
const policies = await secretApprovalPolicyDAL.find({ deletedAt: null }, { envId: env.id });
if (!policies.length) return;
// this will filter policies either without scoped to secret path or the one that matches with secret path
const policiesFilteredByPath = policies.filter(

View File

@@ -5,7 +5,8 @@ import { ApproverType, BypasserType } from "../access-approval-policy/access-app
export type TCreateSapDTO = {
approvals: number;
secretPath: string;
environment: string;
environment?: string;
environments?: string[];
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; username?: string })[];
bypassers?: (
| { type: BypasserType.Group; id: string }
@@ -29,6 +30,7 @@ export type TUpdateSapDTO = {
name?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals?: boolean;
environments?: string[];
} & Omit<TProjectPermission, "projectId">;
export type TDeleteSapDTO = {

View File

@@ -40,6 +40,13 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalRequest}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.leftJoin(TableName.SecretApprovalPolicyEnvironment, (bd) => {
bd.on(
`${TableName.SecretApprovalPolicy}.id`,
"=",
`${TableName.SecretApprovalPolicyEnvironment}.policyId`
).andOn(`${TableName.SecretApprovalPolicyEnvironment}.envId`, "=", `${TableName.SecretFolder}.envId`);
})
.leftJoin<TUsers>(
db(TableName.Users).as("statusChangedByUser"),
`${TableName.SecretApprovalRequest}.statusChangedByUserId`,
@@ -146,7 +153,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("projectId").withSchema(TableName.Environment),
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"),
tx.ref("envId").withSchema(TableName.SecretApprovalPolicyEnvironment).as("policyEnvId"),
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),

View File

@@ -69,6 +69,7 @@ import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
import { scanSecretPolicyViolations } from "../secret-scanning-v2/secret-scanning-v2-fns";
import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service";
import { TSecretApprovalRequestDALFactory } from "./secret-approval-request-dal";
import { sendApprovalEmailsFn } from "./secret-approval-request-fns";
@@ -537,6 +538,11 @@ export const secretApprovalRequestServiceFactory = ({
message: "The policy associated with this secret approval request has been deleted."
});
}
if (!policy.envId) {
throw new BadRequestError({
message: "The policy associated with this secret approval request is not linked to the environment."
});
}
const { hasRole } = await permissionService.getProjectPermission({
actor: ActorType.USER,
@@ -1407,6 +1413,20 @@ export const secretApprovalRequestServiceFactory = ({
projectId
});
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
[
...(data[SecretOperations.Create] || []),
...(data[SecretOperations.Update] || []).filter((el) => el.secretValue)
].map((el) => ({
secretKey: el.secretKey,
secretValue: el.secretValue as string
})),
project.secretDetectionIgnoreValues || []
);
// for created secret approval change
const createdSecrets = data[SecretOperations.Create];
if (createdSecrets && createdSecrets?.length) {

View File

@@ -7,12 +7,13 @@ import {
TRotationFactoryRevokeCredentials,
TRotationFactoryRotateCredentials
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
executeWithPotentialGateway,
SQL_CONNECTION_ALTER_LOGIN_STATEMENT
} from "@app/services/app-connection/shared/sql";
import { generatePassword } from "../utils";
import { DEFAULT_PASSWORD_REQUIREMENTS, generatePassword } from "../utils";
import {
TSqlCredentialsRotationGeneratedCredentials,
TSqlCredentialsRotationWithConnection
@@ -32,6 +33,11 @@ const redactPasswords = (e: unknown, credentials: TSqlCredentialsRotationGenerat
return redactedMessage;
};
const ORACLE_PASSWORD_REQUIREMENTS = {
...DEFAULT_PASSWORD_REQUIREMENTS,
length: 30
};
export const sqlCredentialsRotationFactory: TRotationFactory<
TSqlCredentialsRotationWithConnection,
TSqlCredentialsRotationGeneratedCredentials
@@ -43,6 +49,9 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
secretsMapping
} = secretRotation;
const passwordRequirement =
connection.app === AppConnection.OracleDB ? ORACLE_PASSWORD_REQUIREMENTS : DEFAULT_PASSWORD_REQUIREMENTS;
const executeOperation = <T>(
operation: (client: Knex) => Promise<T>,
credentialsOverride?: TSqlCredentialsRotationGeneratedCredentials[number]
@@ -65,7 +74,7 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
const $validateCredentials = async (credentials: TSqlCredentialsRotationGeneratedCredentials[number]) => {
try {
await executeOperation(async (client) => {
await client.raw("SELECT 1");
await client.raw(connection.app === AppConnection.OracleDB ? `SELECT 1 FROM DUAL` : `Select 1`);
}, credentials);
} catch (error) {
throw new Error(redactPasswords(error, [credentials]));
@@ -75,11 +84,13 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
const issueCredentials: TRotationFactoryIssueCredentials<TSqlCredentialsRotationGeneratedCredentials> = async (
callback
) => {
// For SQL, since we get existing users, we change both their passwords
// on issue to invalidate their existing passwords
// For SQL, since we get existing users, we change both their passwords
// on issue to invalidate their existing passwords
const credentialsSet = [
{ username: username1, password: generatePassword() },
{ username: username2, password: generatePassword() }
{ username: username1, password: generatePassword(passwordRequirement) },
{ username: username2, password: generatePassword(passwordRequirement) }
];
try {
@@ -105,7 +116,10 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
credentialsToRevoke,
callback
) => {
const revokedCredentials = credentialsToRevoke.map(({ username }) => ({ username, password: generatePassword() }));
const revokedCredentials = credentialsToRevoke.map(({ username }) => ({
username,
password: generatePassword(passwordRequirement)
}));
try {
await executeOperation(async (client) => {
@@ -128,7 +142,10 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
callback
) => {
// generate new password for the next active user
const credentials = { username: activeIndex === 0 ? username2 : username1, password: generatePassword() };
const credentials = {
username: activeIndex === 0 ? username2 : username1,
password: generatePassword(passwordRequirement)
};
try {
await executeOperation(async (client) => {

View File

@@ -11,7 +11,7 @@ type TPasswordRequirements = {
allowedSymbols?: string;
};
const DEFAULT_PASSWORD_REQUIREMENTS: TPasswordRequirements = {
export const DEFAULT_PASSWORD_REQUIREMENTS: TPasswordRequirements = {
length: 48,
required: {
lowercase: 1,

View File

@@ -1,11 +1,21 @@
import { AxiosError } from "axios";
import { exec } from "child_process";
import { join } from "path";
import picomatch from "picomatch";
import RE2 from "re2";
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import {
createTempFolder,
deleteTempFolder,
readFindingsFile,
writeTextToFile
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
import { getConfig } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secret-scanning-v2-enums";
@@ -46,6 +56,19 @@ export function scanDirectory(inputPath: string, outputPath: string, configPath?
});
}
export function scanFile(inputPath: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `infisical scan --exit-code=77 --source "${inputPath}" --no-git`;
exec(command, (error) => {
if (error && error.code === 77) {
reject(error);
} else {
resolve();
}
});
});
}
export const scanGitRepositoryAndGetFindings = async (
scanPath: string,
findingsPath: string,
@@ -140,3 +163,47 @@ export const parseScanErrorMessage = (err: unknown): string => {
? errorMessage
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
};
export const scanSecretPolicyViolations = async (
projectId: string,
secretPath: string,
secrets: { secretKey: string; secretValue: string }[],
ignoreValues: string[]
) => {
const appCfg = getConfig();
if (!appCfg.PARAMS_FOLDER_SECRET_DETECTION_ENABLED) {
return;
}
const match = appCfg.PARAMS_FOLDER_SECRET_DETECTION_PATHS?.find(
(el) => el.projectId === projectId && picomatch.isMatch(secretPath, el.secretPath, { strictSlashes: false })
);
if (!match) {
return;
}
const tempFolder = await createTempFolder();
try {
const scanPromises = secrets
.filter((secret) => !ignoreValues.includes(secret.secretValue))
.map(async (secret) => {
const secretFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
await writeTextToFile(secretFilePath, `${secret.secretKey}=${secret.secretValue}`);
try {
await scanFile(secretFilePath);
} catch (error) {
throw new BadRequestError({
message: `Secret value detected in ${secret.secretKey}. Please add this instead to the designated secrets path in the project.`,
name: "SecretPolicyViolation"
});
}
});
await Promise.all(scanPromises);
} finally {
await deleteTempFolder(tempFolder);
}
};

View File

@@ -704,7 +704,8 @@ export const PROJECTS = {
hasDeleteProtection: "Enable or disable delete protection for the project.",
secretSharing: "Enable or disable secret sharing for the project.",
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project.",
defaultProduct: "The default product in which the project will open"
defaultProduct: "The default product in which the project will open",
secretDetectionIgnoreValues: "The list of secret values to ignore for secret detection."
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."
@@ -2245,7 +2246,9 @@ export const AppConnections = {
},
AZURE_CLIENT_SECRETS: {
code: "The OAuth code to use to connect with Azure Client Secrets.",
tenantId: "The Tenant ID to use to connect with Azure Client Secrets."
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
clientId: "The Client ID to use to connect with Azure Client Secrets.",
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
},
AZURE_DEVOPS: {
code: "The OAuth code to use to connect with Azure DevOps.",
@@ -2373,6 +2376,10 @@ export const SecretSyncs = {
keyId: "The AWS KMS key ID or alias to use when encrypting parameters synced by Infisical.",
tags: "Optional tags to add to secrets synced by Infisical.",
syncSecretMetadataAsTags: `Whether Infisical secret metadata should be added as tags to secrets synced by Infisical.`
},
RENDER: {
autoRedeployServices:
"Whether Infisical should automatically redeploy the configured Render service upon secret changes."
}
},
DESTINATION_CONFIG: {

View File

@@ -204,6 +204,17 @@ const envSchema = z
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional()),
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT: zodStrBool.default("true"),
// Special Detection Feature
PARAMS_FOLDER_SECRET_DETECTION_PATHS: zpStr(
z
.string()
.optional()
.transform((val) => {
if (!val) return undefined;
return JSON.parse(val) as { secretPath: string; projectId: string }[];
})
),
// HSM
HSM_LIB_PATH: zpStr(z.string().optional()),
HSM_PIN: zpStr(z.string().optional()),
@@ -261,10 +272,26 @@ const envSchema = z
// gcp app
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL: zpStr(z.string().optional()),
// azure app
// Legacy Single Multi Purpose Azure App Connection
INF_APP_CONNECTION_AZURE_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_CLIENT_SECRET: zpStr(z.string().optional()),
// Azure App Configuration App Connection
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET: zpStr(z.string().optional()),
// Azure Key Vault App Connection
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET: zpStr(z.string().optional()),
// Azure Client Secrets App Connection
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET: zpStr(z.string().optional()),
// Azure DevOps App Connection
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET: zpStr(z.string().optional()),
// datadog
SHOULD_USE_DATADOG_TRACER: zodStrBool.default("false"),
DATADOG_PROFILING_ENABLED: zodStrBool.default("false"),
@@ -341,7 +368,24 @@ const envSchema = z
isHsmConfigured:
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",")
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(","),
PARAMS_FOLDER_SECRET_DETECTION_ENABLED: (data.PARAMS_FOLDER_SECRET_DETECTION_PATHS?.length ?? 0) > 0,
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET:
data.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET:
data.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET:
data.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET:
data.INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET
}));
export type TEnvConfig = Readonly<z.infer<typeof envSchema>>;
@@ -451,15 +495,54 @@ export const overwriteSchema: {
}
]
},
azure: {
name: "Azure",
azureAppConfiguration: {
name: "Azure App Configuration",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_ID",
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRET",
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]
},
azureKeyVault: {
name: "Azure Key Vault",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]
},
azureClientSecrets: {
name: "Azure Client Secrets",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]
},
azureDevOps: {
name: "Azure DevOps",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]

View File

@@ -14,7 +14,7 @@ import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal
import { ADMIN_CONFIG_DB_UUID } from "@app/services/super-admin/super-admin-service";
import { isBase64 } from "../../base64";
import { getConfig } from "../../config/env";
import { getConfig, TEnvConfig } from "../../config/env";
import { CryptographyError } from "../../errors";
import { logger } from "../../logger";
import { asymmetricFipsValidated } from "./asymmetric-fips";
@@ -106,12 +106,12 @@ const cryptographyFactory = () => {
}
};
const $setFipsModeEnabled = (enabled: boolean) => {
const $setFipsModeEnabled = (enabled: boolean, envCfg?: Pick<TEnvConfig, "ENCRYPTION_KEY">) => {
// If FIPS is enabled, we need to validate that the ENCRYPTION_KEY is in a base64 format, and is a 256-bit key.
if (enabled) {
crypto.setFips(true);
const appCfg = getConfig();
const appCfg = envCfg || getConfig();
if (appCfg.ENCRYPTION_KEY) {
// we need to validate that the ENCRYPTION_KEY is a base64 encoded 256-bit key
@@ -141,14 +141,14 @@ const cryptographyFactory = () => {
$isInitialized = true;
};
const initialize = async (superAdminDAL: TSuperAdminDALFactory) => {
const initialize = async (superAdminDAL: TSuperAdminDALFactory, envCfg?: Pick<TEnvConfig, "ENCRYPTION_KEY">) => {
if ($isInitialized) {
return isFipsModeEnabled();
}
if (process.env.FIPS_ENABLED !== "true") {
logger.info("Cryptography module initialized in normal operation mode.");
$setFipsModeEnabled(false);
$setFipsModeEnabled(false, envCfg);
return false;
}
@@ -158,11 +158,11 @@ const cryptographyFactory = () => {
if (serverCfg) {
if (serverCfg.fipsEnabled) {
logger.info("[FIPS]: Instance is configured for FIPS mode of operation. Continuing startup with FIPS enabled.");
$setFipsModeEnabled(true);
$setFipsModeEnabled(true, envCfg);
return true;
}
logger.info("[FIPS]: Instance age predates FIPS mode inception date. Continuing without FIPS.");
$setFipsModeEnabled(false);
$setFipsModeEnabled(false, envCfg);
return false;
}
@@ -171,7 +171,7 @@ const cryptographyFactory = () => {
// TODO(daniel): check if it's an enterprise deployment
// if there is no server cfg, and FIPS_MODE is `true`, its a fresh FIPS deployment. We need to set the fipsEnabled to true.
$setFipsModeEnabled(true);
$setFipsModeEnabled(true, envCfg);
return true;
};

View File

@@ -162,6 +162,12 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
kubernetes: token?.identityAuth?.kubernetes
});
}
if (token?.identityAuth?.aws) {
requestContext.set("identityAuthInfo", {
identityId: identity.identityId,
aws: token?.identityAuth?.aws
});
}
break;
}
case AuthMode.SERVICE_TOKEN: {

View File

@@ -11,6 +11,7 @@ import {
accessApprovalPolicyBypasserDALFactory
} from "@app/ee/services/access-approval-policy/access-approval-policy-approver-dal";
import { accessApprovalPolicyDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-dal";
import { accessApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-environment-dal";
import { accessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { accessApprovalRequestDALFactory } from "@app/ee/services/access-approval-request/access-approval-request-dal";
import { accessApprovalRequestReviewerDALFactory } from "@app/ee/services/access-approval-request/access-approval-request-reviewer-dal";
@@ -76,6 +77,7 @@ import {
secretApprovalPolicyBypasserDALFactory
} from "@app/ee/services/secret-approval-policy/secret-approval-policy-approver-dal";
import { secretApprovalPolicyDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-dal";
import { secretApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-environment-dal";
import { secretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { secretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
import { secretApprovalRequestReviewerDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-reviewer-dal";
@@ -425,9 +427,11 @@ export const registerRoutes = async (
const accessApprovalPolicyApproverDAL = accessApprovalPolicyApproverDALFactory(db);
const accessApprovalPolicyBypasserDAL = accessApprovalPolicyBypasserDALFactory(db);
const accessApprovalRequestReviewerDAL = accessApprovalRequestReviewerDALFactory(db);
const accessApprovalPolicyEnvironmentDAL = accessApprovalPolicyEnvironmentDALFactory(db);
const sapApproverDAL = secretApprovalPolicyApproverDALFactory(db);
const sapBypasserDAL = secretApprovalPolicyBypasserDALFactory(db);
const sapEnvironmentDAL = secretApprovalPolicyEnvironmentDALFactory(db);
const secretApprovalPolicyDAL = secretApprovalPolicyDALFactory(db);
const secretApprovalRequestDAL = secretApprovalRequestDALFactory(db);
const secretApprovalRequestReviewerDAL = secretApprovalRequestReviewerDALFactory(db);
@@ -561,6 +565,7 @@ export const registerRoutes = async (
projectEnvDAL,
secretApprovalPolicyApproverDAL: sapApproverDAL,
secretApprovalPolicyBypasserDAL: sapBypasserDAL,
secretApprovalPolicyEnvironmentDAL: sapEnvironmentDAL,
permissionService,
secretApprovalPolicyDAL,
licenseService,
@@ -1156,7 +1161,9 @@ export const registerRoutes = async (
keyStore,
licenseService,
projectDAL,
folderDAL
folderDAL,
accessApprovalPolicyEnvironmentDAL,
secretApprovalPolicyEnvironmentDAL: sapEnvironmentDAL
});
const projectRoleService = projectRoleServiceFactory({
@@ -1231,6 +1238,7 @@ export const registerRoutes = async (
const secretV2BridgeService = secretV2BridgeServiceFactory({
folderDAL,
projectDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
folderCommitService,
secretQueueService,
@@ -1317,6 +1325,7 @@ export const registerRoutes = async (
accessApprovalPolicyDAL,
accessApprovalPolicyApproverDAL,
accessApprovalPolicyBypasserDAL,
accessApprovalPolicyEnvironmentDAL,
groupDAL,
permissionService,
projectEnvDAL,

View File

@@ -93,6 +93,13 @@ export const sapPubSchema = SecretApprovalPoliciesSchema.merge(
name: z.string(),
slug: z.string()
}),
environments: z.array(
z.object({
id: z.string(),
name: z.string(),
slug: z.string()
})
),
projectId: z.string()
})
);
@@ -264,7 +271,8 @@ export const SanitizedProjectSchema = ProjectsSchema.pick({
auditLogsRetentionDays: true,
hasDeleteProtection: true,
secretSharing: true,
showSnapshotsLegacy: true
showSnapshotsLegacy: true,
secretDetectionIgnoreValues: true
});
export const SanitizedTagSchema = SecretTagsSchema.pick({

View File

@@ -52,7 +52,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
defaultAuthOrgAuthMethod: z.string().nullish(),
isSecretScanningDisabled: z.boolean(),
kubernetesAutoFetchServiceAccountToken: z.boolean()
kubernetesAutoFetchServiceAccountToken: z.boolean(),
paramsFolderSecretDetectionEnabled: z.boolean()
})
})
}
@@ -67,7 +68,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
fipsEnabled: crypto.isFipsModeEnabled(),
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED
}
};
}

View File

@@ -270,11 +270,6 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
}
}
});
remainingLimit -= imports.length;
adjustedOffset = 0;
} else {
adjustedOffset = Math.max(0, adjustedOffset - totalImportCount);
}
}
@@ -317,7 +312,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
}
}
if (!includeDynamicSecrets && !includeSecrets)
if (!includeDynamicSecrets && !includeSecrets && !includeSecretRotations)
return {
folders,
totalFolderCount,
@@ -547,7 +542,6 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
(totalFolderCount ?? 0) +
(totalDynamicSecretCount ?? 0) +
(totalSecretCount ?? 0) +
(totalImportCount ?? 0) +
(totalSecretRotationCount ?? 0)
};
}

View File

@@ -369,7 +369,11 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
.describe(PROJECTS.UPDATE.slug),
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing),
showSnapshotsLegacy: z.boolean().optional().describe(PROJECTS.UPDATE.showSnapshotsLegacy),
defaultProduct: z.nativeEnum(ProjectType).optional().describe(PROJECTS.UPDATE.defaultProduct)
defaultProduct: z.nativeEnum(ProjectType).optional().describe(PROJECTS.UPDATE.defaultProduct),
secretDetectionIgnoreValues: z
.array(z.string())
.optional()
.describe(PROJECTS.UPDATE.secretDetectionIgnoreValues)
}),
response: {
200: z.object({
@@ -392,7 +396,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
hasDeleteProtection: req.body.hasDeleteProtection,
slug: req.body.slug,
secretSharing: req.body.secretSharing,
showSnapshotsLegacy: req.body.showSnapshotsLegacy
showSnapshotsLegacy: req.body.showSnapshotsLegacy,
secretDetectionIgnoreValues: req.body.secretDetectionIgnoreValues
},
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,

View File

@@ -1,9 +1,11 @@
import fastifyMultipart from "@fastify/multipart";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { readLimit } from "@app/server/config/rateLimiter";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { VaultMappingType } from "@app/services/external-migration/external-migration-types";
const MB25_IN_BYTES = 26214400;
@@ -15,7 +17,7 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
bodyLimit: MB25_IN_BYTES,
url: "/env-key",
config: {
rateLimit: readLimit
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
@@ -52,4 +54,30 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
});
}
});
server.route({
method: "POST",
url: "/vault",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
vaultAccessToken: z.string(),
vaultNamespace: z.string().trim().optional(),
vaultUrl: z.string(),
mappingType: z.nativeEnum(VaultMappingType)
})
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
await server.services.migration.importVaultData({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body
});
}
});
};

View File

@@ -11,5 +11,5 @@ export const registerV3Routes = async (server: FastifyZodProvider) => {
await server.register(registerUserRouter, { prefix: "/users" });
await server.register(registerSecretRouter, { prefix: "/secrets" });
await server.register(registerSecretBlindIndexRouter, { prefix: "/workspaces" });
await server.register(registerExternalMigrationRouter, { prefix: "/migrate" });
await server.register(registerExternalMigrationRouter, { prefix: "/external-migration" });
};

View File

@@ -14,13 +14,13 @@ import {
} from "./azure-app-configuration-connection-types";
export const getAzureAppConfigurationConnectionListItem = () => {
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
const { INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID } = getConfig();
return {
name: "Azure App Configuration" as const,
app: AppConnection.AzureAppConfiguration as const,
methods: Object.values(AzureAppConfigurationConnectionMethod) as [AzureAppConfigurationConnectionMethod.OAuth],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
oauthClientId: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID
};
};
@@ -29,9 +29,16 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
) => {
const { credentials: inputCredentials, method } = config;
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
const {
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
SITE_URL
} = getConfig();
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID ||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET
) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
@@ -47,8 +54,8 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://azconfig.io/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);

View File

@@ -1,3 +1,4 @@
export enum AzureClientSecretsConnectionMethod {
OAuth = "oauth"
OAuth = "oauth",
ClientSecret = "client-secret"
}

View File

@@ -1,3 +1,4 @@
/* eslint-disable no-case-declarations */
import { AxiosError, AxiosResponse } from "axios";
import { getConfig } from "@app/lib/config/env";
@@ -16,18 +17,22 @@ import { AppConnection } from "../app-connection-enums";
import { AzureClientSecretsConnectionMethod } from "./azure-client-secrets-connection-enums";
import {
ExchangeCodeAzureResponse,
TAzureClientSecretsConnectionClientSecretCredentials,
TAzureClientSecretsConnectionConfig,
TAzureClientSecretsConnectionCredentials
} from "./azure-client-secrets-connection-types";
export const getAzureClientSecretsConnectionListItem = () => {
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
const { INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID } = getConfig();
return {
name: "Azure Client Secrets" as const,
app: AppConnection.AzureClientSecrets as const,
methods: Object.values(AzureClientSecretsConnectionMethod) as [AzureClientSecretsConnectionMethod.OAuth],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
methods: Object.values(AzureClientSecretsConnectionMethod) as [
AzureClientSecretsConnectionMethod.OAuth,
AzureClientSecretsConnectionMethod.ClientSecret
],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID
};
};
@@ -37,12 +42,6 @@ export const getAzureConnectionAccessToken = async (
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const appCfg = getConfig();
if (!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
throw new BadRequestError({
message: `Azure environment variables have not been configured`
});
}
const appConnection = await appConnectionDAL.findById(connectionId);
if (!appConnection) {
@@ -63,104 +62,195 @@ export const getAzureConnectionAccessToken = async (
const { refreshToken } = credentials;
const currentTime = Date.now();
switch (appConnection.method) {
case AzureClientSecretsConnectionMethod.OAuth:
if (
!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID ||
!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET
) {
throw new BadRequestError({
message: `Azure OAuth environment variables have not been configured`
});
}
const { data } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", credentials.tenantId || "common"),
new URLSearchParams({
grant_type: "refresh_token",
scope: `openid offline_access https://graph.microsoft.com/.default`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET,
refresh_token: refreshToken
})
);
const { data } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", credentials.tenantId || "common"),
new URLSearchParams({
grant_type: "refresh_token",
scope: `openid offline_access https://graph.microsoft.com/.default`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
refresh_token: refreshToken
})
);
const updatedCredentials = {
...credentials,
accessToken: data.access_token,
expiresAt: currentTime + data.expires_in * 1000,
refreshToken: data.refresh_token
};
const updatedCredentials = {
...credentials,
accessToken: data.access_token,
expiresAt: currentTime + data.expires_in * 1000,
refreshToken: data.refresh_token
};
const encryptedCredentials = await encryptAppConnectionCredentials({
credentials: updatedCredentials,
orgId: appConnection.orgId,
kmsService
});
const encryptedCredentials = await encryptAppConnectionCredentials({
credentials: updatedCredentials,
orgId: appConnection.orgId,
kmsService
});
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials });
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials });
return data.access_token;
case AzureClientSecretsConnectionMethod.ClientSecret:
const accessTokenCredentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureClientSecretsConnectionClientSecretCredentials;
const { accessToken, expiresAt, clientId, clientSecret, tenantId } = accessTokenCredentials;
if (accessToken && expiresAt && expiresAt > currentTime + 300000) {
return accessToken;
}
return data.access_token;
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://graph.microsoft.com/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
const updatedClientCredentials = {
...accessTokenCredentials,
accessToken: clientData.access_token,
expiresAt: currentTime + clientData.expires_in * 1000
};
const encryptedClientCredentials = await encryptAppConnectionCredentials({
credentials: updatedClientCredentials,
orgId: appConnection.orgId,
kmsService
});
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedClientCredentials });
return clientData.access_token;
default:
throw new InternalServerError({
message: `Unhandled Azure connection method: ${appConnection.method as AzureClientSecretsConnectionMethod}`
});
}
};
export const validateAzureClientSecretsConnectionCredentials = async (config: TAzureClientSecretsConnectionConfig) => {
const { credentials: inputCredentials, method } = config;
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
if (!SITE_URL) {
throw new InternalServerError({ message: "SITE_URL env var is required to complete Azure OAuth flow" });
}
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
let tokenError: AxiosError | null = null;
try {
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://graph.microsoft.com/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);
} catch (e: unknown) {
if (e instanceof AxiosError) {
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
}
if (tokenError) {
if (tokenError instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
if (!tokenResp) {
throw new InternalServerError({
message: `Failed to get access token: Token was empty with no error`
});
}
const {
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID,
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET,
SITE_URL
} = getConfig();
switch (method) {
case AzureClientSecretsConnectionMethod.OAuth:
if (!SITE_URL) {
throw new InternalServerError({ message: "SITE_URL env var is required to complete Azure OAuth flow" });
}
if (
!INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID ||
!INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET
) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
let tokenError: AxiosError | null = null;
try {
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://graph.microsoft.com/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);
} catch (e: unknown) {
if (e instanceof AxiosError) {
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
}
if (tokenError) {
if (tokenError instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
if (!tokenResp) {
throw new InternalServerError({
message: `Failed to get access token: Token was empty with no error`
});
}
return {
tenantId: inputCredentials.tenantId,
accessToken: tokenResp.data.access_token,
refreshToken: tokenResp.data.refresh_token,
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
};
case AzureClientSecretsConnectionMethod.ClientSecret:
const { tenantId, clientId, clientSecret } = inputCredentials;
try {
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://graph.microsoft.com/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
return {
tenantId,
accessToken: clientData.access_token,
expiresAt: Date.now() + clientData.expires_in * 1000,
clientId,
clientSecret
};
} catch (e: unknown) {
if (e instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(e?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
default:
throw new InternalServerError({
message: `Unhandled Azure connection method: ${method as AzureClientSecretsConnectionMethod}`

View File

@@ -26,6 +26,36 @@ export const AzureClientSecretsConnectionOAuthOutputCredentialsSchema = z.object
expiresAt: z.number()
});
export const AzureClientSecretsConnectionClientSecretInputCredentialsSchema = z.object({
clientId: z
.string()
.uuid()
.trim()
.min(1, "Client ID required")
.max(50, "Client ID must be at most 50 characters long")
.describe(AppConnections.CREDENTIALS.AZURE_CLIENT_SECRETS.clientId),
clientSecret: z
.string()
.trim()
.min(1, "Client Secret required")
.max(50, "Client Secret must be at most 50 characters long")
.describe(AppConnections.CREDENTIALS.AZURE_CLIENT_SECRETS.clientSecret),
tenantId: z
.string()
.uuid()
.trim()
.min(1, "Tenant ID required")
.describe(AppConnections.CREDENTIALS.AZURE_CLIENT_SECRETS.tenantId)
});
export const AzureClientSecretsConnectionClientSecretOutputCredentialsSchema = z.object({
clientId: z.string(),
clientSecret: z.string(),
tenantId: z.string(),
accessToken: z.string(),
expiresAt: z.number()
});
export const ValidateAzureClientSecretsConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
@@ -34,6 +64,14 @@ export const ValidateAzureClientSecretsConnectionCredentialsSchema = z.discrimin
credentials: AzureClientSecretsConnectionOAuthInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureClientSecrets).credentials
)
}),
z.object({
method: z
.literal(AzureClientSecretsConnectionMethod.ClientSecret)
.describe(AppConnections.CREATE(AppConnection.AzureClientSecrets).method),
credentials: AzureClientSecretsConnectionClientSecretInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureClientSecrets).credentials
)
})
]);
@@ -43,9 +81,13 @@ export const CreateAzureClientSecretsConnectionSchema = ValidateAzureClientSecre
export const UpdateAzureClientSecretsConnectionSchema = z
.object({
credentials: AzureClientSecretsConnectionOAuthInputCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.AzureClientSecrets).credentials
)
credentials: z
.union([
AzureClientSecretsConnectionOAuthInputCredentialsSchema,
AzureClientSecretsConnectionClientSecretInputCredentialsSchema
])
.optional()
.describe(AppConnections.UPDATE(AppConnection.AzureClientSecrets).credentials)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureClientSecrets));
@@ -59,6 +101,10 @@ export const AzureClientSecretsConnectionSchema = z.intersection(
z.object({
method: z.literal(AzureClientSecretsConnectionMethod.OAuth),
credentials: AzureClientSecretsConnectionOAuthOutputCredentialsSchema
}),
z.object({
method: z.literal(AzureClientSecretsConnectionMethod.ClientSecret),
credentials: AzureClientSecretsConnectionClientSecretOutputCredentialsSchema
})
])
);
@@ -69,6 +115,13 @@ export const SanitizedAzureClientSecretsConnectionSchema = z.discriminatedUnion(
credentials: AzureClientSecretsConnectionOAuthOutputCredentialsSchema.pick({
tenantId: true
})
}),
BaseAzureClientSecretsConnectionSchema.extend({
method: z.literal(AzureClientSecretsConnectionMethod.ClientSecret),
credentials: AzureClientSecretsConnectionClientSecretOutputCredentialsSchema.pick({
clientId: true,
tenantId: true
})
})
]);

View File

@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
AzureClientSecretsConnectionClientSecretOutputCredentialsSchema,
AzureClientSecretsConnectionOAuthOutputCredentialsSchema,
AzureClientSecretsConnectionSchema,
CreateAzureClientSecretsConnectionSchema,
@@ -30,6 +31,10 @@ export type TAzureClientSecretsConnectionCredentials = z.infer<
typeof AzureClientSecretsConnectionOAuthOutputCredentialsSchema
>;
export type TAzureClientSecretsConnectionClientSecretCredentials = z.infer<
typeof AzureClientSecretsConnectionClientSecretOutputCredentialsSchema
>;
export interface ExchangeCodeAzureResponse {
token_type: string;
scope: string;

View File

@@ -23,7 +23,7 @@ import {
} from "./azure-devops-types";
export const getAzureDevopsConnectionListItem = () => {
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
const { INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID } = getConfig();
return {
name: "Azure DevOps" as const,
@@ -32,7 +32,7 @@ export const getAzureDevopsConnectionListItem = () => {
AzureDevOpsConnectionMethod.OAuth,
AzureDevOpsConnectionMethod.AccessToken
],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
oauthClientId: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID
};
};
@@ -63,7 +63,7 @@ export const getAzureDevopsConnection = async (
switch (appConnection.method) {
case AzureDevOpsConnectionMethod.OAuth:
const appCfg = getConfig();
if (!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (!appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET) {
throw new BadRequestError({
message: `Azure environment variables have not been configured`
});
@@ -81,8 +81,8 @@ export const getAzureDevopsConnection = async (
new URLSearchParams({
grant_type: "refresh_token",
scope: `https://app.vssps.visualstudio.com/.default`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET,
refresh_token: refreshToken
})
);
@@ -119,7 +119,8 @@ export const getAzureDevopsConnection = async (
export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDevOpsConnectionConfig) => {
const { credentials: inputCredentials, method } = config;
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
const { INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID, INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET, SITE_URL } =
getConfig();
switch (method) {
case AzureDevOpsConnectionMethod.OAuth:
@@ -127,7 +128,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
throw new InternalServerError({ message: "SITE_URL env var is required to complete Azure OAuth flow" });
}
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (!INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || !INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
@@ -144,8 +145,8 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
grant_type: "authorization_code",
code: oauthCredentials.code,
scope: `https://app.vssps.visualstudio.com/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);

View File

@@ -26,7 +26,10 @@ export const getAzureConnectionAccessToken = async (
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const appCfg = getConfig();
if (!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID ||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET
) {
throw new BadRequestError({
message: `Azure environment variables have not been configured`
});
@@ -57,8 +60,8 @@ export const getAzureConnectionAccessToken = async (
new URLSearchParams({
grant_type: "refresh_token",
scope: `openid offline_access`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
refresh_token: credentials.refreshToken
})
);
@@ -92,22 +95,23 @@ export const getAzureConnectionAccessToken = async (
};
export const getAzureKeyVaultConnectionListItem = () => {
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID } = getConfig();
return {
name: "Azure Key Vault" as const,
app: AppConnection.AzureKeyVault as const,
methods: Object.values(AzureKeyVaultConnectionMethod) as [AzureKeyVaultConnectionMethod.OAuth],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
oauthClientId: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID
};
};
export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureKeyVaultConnectionConfig) => {
const { credentials: inputCredentials, method } = config;
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID, INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET, SITE_URL } =
getConfig();
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (!INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || !INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
@@ -123,8 +127,8 @@ export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureK
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://vault.azure.net/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);

View File

@@ -164,7 +164,7 @@ export const validateSqlConnectionCredentials = async (
) => {
try {
await executeWithPotentialGateway(config, gatewayService, async (client) => {
await client.raw(`Select 1`);
await client.raw(config.app === AppConnection.OracleDB ? `SELECT 1 FROM DUAL` : `Select 1`);
});
return config.credentials;
} catch (error) {

View File

@@ -1,32 +1,26 @@
import slugify from "@sindresorhus/slugify";
import sjcl from "sjcl";
import tweetnacl from "tweetnacl";
import tweetnaclUtil from "tweetnacl-util";
import { SecretType, TSecretFolders } from "@app/db/schemas";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { chunkArray } from "@app/lib/fn";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { CommitType, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "../kms/kms-service";
import { KmsDataKey } from "../kms/kms-types";
import { TProjectDALFactory } from "../project/project-dal";
import { TProjectServiceFactory } from "../project/project-service";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TProjectEnvServiceFactory } from "../project-env/project-env-service";
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal";
import { TSecretTagDALFactory } from "../secret-tag/secret-tag-dal";
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
import { fnSecretBulkInsert, getAllSecretReferences } from "../secret-v2-bridge/secret-v2-bridge-fns";
import type { TSecretV2BridgeServiceFactory } from "../secret-v2-bridge/secret-v2-bridge-service";
import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-dal";
import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal";
import { InfisicalImportData, TEnvKeyExportJSON, TImportInfisicalDataCreate } from "./external-migration-types";
import { TFolderCommitServiceFactory } from "../../folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "../../kms/kms-service";
import { TProjectDALFactory } from "../../project/project-dal";
import { TProjectServiceFactory } from "../../project/project-service";
import { TProjectEnvDALFactory } from "../../project-env/project-env-dal";
import { TProjectEnvServiceFactory } from "../../project-env/project-env-service";
import { TResourceMetadataDALFactory } from "../../resource-metadata/resource-metadata-dal";
import { TSecretFolderDALFactory } from "../../secret-folder/secret-folder-dal";
import { TSecretFolderVersionDALFactory } from "../../secret-folder/secret-folder-version-dal";
import { TSecretTagDALFactory } from "../../secret-tag/secret-tag-dal";
import { TSecretV2BridgeDALFactory } from "../../secret-v2-bridge/secret-v2-bridge-dal";
import type { TSecretV2BridgeServiceFactory } from "../../secret-v2-bridge/secret-v2-bridge-service";
import { TSecretVersionV2DALFactory } from "../../secret-v2-bridge/secret-version-dal";
import { TSecretVersionV2TagDALFactory } from "../../secret-v2-bridge/secret-version-tag-dal";
import { InfisicalImportData, TEnvKeyExportJSON, TImportInfisicalDataCreate } from "../external-migration-types";
export type TImportDataIntoInfisicalDTO = {
projectDAL: Pick<TProjectDALFactory, "transaction">;
@@ -499,326 +493,3 @@ export const parseEnvKeyDataFn = async (decryptedJson: string): Promise<Infisica
return infisicalImportData;
};
export const importDataIntoInfisicalFn = async ({
projectService,
projectEnvDAL,
projectDAL,
secretDAL,
kmsService,
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
folderDAL,
resourceMetadataDAL,
folderVersionDAL,
folderCommitService,
input: { data, actor, actorId, actorOrgId, actorAuthMethod }
}: TImportDataIntoInfisicalDTO) => {
// Import data to infisical
if (!data || !data.projects) {
throw new BadRequestError({ message: "No projects found in data" });
}
const originalToNewProjectId = new Map<string, string>();
const originalToNewEnvironmentId = new Map<
string,
{ envId: string; envSlug: string; rootFolderId: string; projectId: string }
>();
const originalToNewFolderId = new Map<
string,
{
folderId: string;
projectId: string;
}
>();
const projectsNotImported: string[] = [];
await projectDAL.transaction(async (tx) => {
for await (const project of data.projects) {
const newProject = await projectService
.createProject({
actor,
actorId,
actorOrgId,
actorAuthMethod,
workspaceName: project.name,
createDefaultEnvs: false,
tx
})
.catch((e) => {
logger.error(e, `Failed to import to project [name:${project.name}]`);
throw new BadRequestError({ message: `Failed to import to project [name:${project.name}]` });
});
originalToNewProjectId.set(project.id, newProject.id);
}
// Import environments
if (data.environments) {
for await (const environment of data.environments) {
const projectId = originalToNewProjectId.get(environment.projectId);
const slug = slugify(`${environment.name}-${alphaNumericNanoId(4)}`);
if (!projectId) {
projectsNotImported.push(environment.projectId);
// eslint-disable-next-line no-continue
continue;
}
const existingEnv = await projectEnvDAL.findOne({ projectId, slug }, tx);
if (existingEnv) {
throw new BadRequestError({
message: `Environment with slug '${slug}' already exist`,
name: "CreateEnvironment"
});
}
const lastPos = await projectEnvDAL.findLastEnvPosition(projectId, tx);
const doc = await projectEnvDAL.create({ slug, name: environment.name, projectId, position: lastPos + 1 }, tx);
const folder = await folderDAL.create({ name: "root", parentId: null, envId: doc.id, version: 1 }, tx);
originalToNewEnvironmentId.set(environment.id, {
envSlug: doc.slug,
envId: doc.id,
rootFolderId: folder.id,
projectId
});
}
}
if (data.folders) {
for await (const folder of data.folders) {
const parentEnv = originalToNewEnvironmentId.get(folder.parentFolderId as string);
if (!parentEnv) {
// eslint-disable-next-line no-continue
continue;
}
const newFolder = await folderDAL.create(
{
name: folder.name,
envId: parentEnv.envId,
parentId: parentEnv.rootFolderId
},
tx
);
const newFolderVersion = await folderVersionDAL.create(
{
name: newFolder.name,
envId: newFolder.envId,
version: newFolder.version,
folderId: newFolder.id
},
tx
);
await folderCommitService.createCommit(
{
actor: {
type: actor,
metadata: {
id: actorId
}
},
message: "Changed by external migration",
folderId: parentEnv.rootFolderId,
changes: [
{
type: CommitType.ADD,
folderVersionId: newFolderVersion.id
}
]
},
tx
);
originalToNewFolderId.set(folder.id, {
folderId: newFolder.id,
projectId: parentEnv.projectId
});
}
}
// Useful for debugging:
// console.log("data.secrets", data.secrets);
// console.log("data.folders", data.folders);
// console.log("data.environment", data.environments);
if (data.secrets && data.secrets.length > 0) {
const mappedToEnvironmentId = new Map<
string,
{
secretKey: string;
secretValue: string;
folderId?: string;
isFromBlock?: boolean;
}[]
>();
for (const secret of data.secrets) {
const targetId = secret.folderId || secret.environmentId;
// Skip if we can't find either an environment or folder mapping for this secret
if (!originalToNewEnvironmentId.get(secret.environmentId) && !originalToNewFolderId.get(targetId)) {
logger.info({ secret }, "[importDataIntoInfisicalFn]: Could not find environment or folder for secret");
// eslint-disable-next-line no-continue
continue;
}
if (!mappedToEnvironmentId.has(targetId)) {
mappedToEnvironmentId.set(targetId, []);
}
const alreadyHasSecret = mappedToEnvironmentId
.get(targetId)!
.find((el) => el.secretKey === secret.name && el.folderId === secret.folderId);
if (alreadyHasSecret && alreadyHasSecret.isFromBlock) {
// remove the existing secret if any
mappedToEnvironmentId
.get(targetId)!
.splice(mappedToEnvironmentId.get(targetId)!.indexOf(alreadyHasSecret), 1);
}
mappedToEnvironmentId.get(targetId)!.push({
secretKey: secret.name,
secretValue: secret.value || "",
folderId: secret.folderId,
isFromBlock: secret.appBlockOrderIndex !== undefined
});
}
// for each of the mappedEnvironmentId
for await (const [targetId, secrets] of mappedToEnvironmentId) {
logger.info("[importDataIntoInfisicalFn]: Processing secrets for targetId", targetId);
let selectedFolder: TSecretFolders | undefined;
let selectedProjectId: string | undefined;
// Case 1: Secret belongs to a folder / branch / branch of a block
const foundFolder = originalToNewFolderId.get(targetId);
if (foundFolder) {
logger.info("[importDataIntoInfisicalFn]: Processing secrets for folder");
selectedFolder = await folderDAL.findById(foundFolder.folderId, tx);
selectedProjectId = foundFolder.projectId;
} else {
logger.info("[importDataIntoInfisicalFn]: Processing secrets for normal environment");
const environment = data.environments.find((env) => env.id === targetId);
if (!environment) {
logger.info(
{
targetId
},
"[importDataIntoInfisicalFn]: Could not find environment for secret"
);
// eslint-disable-next-line no-continue
continue;
}
const projectId = originalToNewProjectId.get(environment.projectId)!;
if (!projectId) {
throw new BadRequestError({ message: `Failed to import secret, project not found` });
}
const env = originalToNewEnvironmentId.get(targetId);
if (!env) {
logger.info(
{
targetId
},
"[importDataIntoInfisicalFn]: Could not find environment for secret"
);
// eslint-disable-next-line no-continue
continue;
}
const folder = await folderDAL.findBySecretPath(projectId, env.envSlug, "/", tx);
if (!folder) {
throw new NotFoundError({
message: `Folder not found for the given environment slug (${env.envSlug}) & secret path (/)`,
name: "Create secret"
});
}
selectedFolder = folder;
selectedProjectId = projectId;
}
if (!selectedFolder) {
throw new NotFoundError({
message: `Folder not found for the given environment slug & secret path`,
name: "CreateSecret"
});
}
if (!selectedProjectId) {
throw new NotFoundError({
message: `Project not found for the given environment slug & secret path`,
name: "CreateSecret"
});
}
const { encryptor: secretManagerEncrypt } = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId: selectedProjectId
},
tx
);
const secretBatches = chunkArray(secrets, 2500);
for await (const secretBatch of secretBatches) {
const secretsByKeys = await secretDAL.findBySecretKeys(
selectedFolder.id,
secretBatch.map((el) => ({
key: el.secretKey,
type: SecretType.Shared
})),
tx
);
if (secretsByKeys.length) {
throw new BadRequestError({
message: `Secret already exist: ${secretsByKeys.map((el) => el.key).join(",")}`
});
}
await fnSecretBulkInsert({
inputSecrets: secretBatch.map((el) => {
const references = getAllSecretReferences(el.secretValue).nestedReferences;
return {
version: 1,
encryptedValue: el.secretValue
? secretManagerEncrypt({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob
: undefined,
key: el.secretKey,
references,
type: SecretType.Shared
};
}),
folderId: selectedFolder.id,
orgId: actorOrgId,
resourceMetadataDAL,
secretDAL,
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
folderCommitService,
actor: {
type: actor,
actorId
},
tx
});
}
}
}
});
return { projectsNotImported };
};

View File

@@ -0,0 +1,352 @@
import slugify from "@sindresorhus/slugify";
import { SecretType, TSecretFolders } from "@app/db/schemas";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { chunkArray } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { CommitType } from "@app/services/folder-commit/folder-commit-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { fnSecretBulkInsert, getAllSecretReferences } from "@app/services/secret-v2-bridge/secret-v2-bridge-fns";
import { TImportDataIntoInfisicalDTO } from "./envkey";
export const importDataIntoInfisicalFn = async ({
projectService,
projectEnvDAL,
projectDAL,
secretDAL,
kmsService,
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
folderDAL,
resourceMetadataDAL,
folderVersionDAL,
folderCommitService,
input: { data, actor, actorId, actorOrgId, actorAuthMethod }
}: TImportDataIntoInfisicalDTO) => {
// Import data to infisical
if (!data || !data.projects) {
throw new BadRequestError({ message: "No projects found in data" });
}
const originalToNewProjectId = new Map<string, string>();
const originalToNewEnvironmentId = new Map<
string,
{ envId: string; envSlug: string; rootFolderId?: string; projectId: string }
>();
const originalToNewFolderId = new Map<
string,
{
envId: string;
envSlug: string;
folderId: string;
projectId: string;
}
>();
const projectsNotImported: string[] = [];
await projectDAL.transaction(async (tx) => {
for await (const project of data.projects) {
const newProject = await projectService
.createProject({
actor,
actorId,
actorOrgId,
actorAuthMethod,
workspaceName: project.name,
createDefaultEnvs: false,
tx
})
.catch((e) => {
logger.error(e, `Failed to import to project [name:${project.name}]`);
throw new BadRequestError({ message: `Failed to import to project [name:${project.name}]` });
});
originalToNewProjectId.set(project.id, newProject.id);
}
// Import environments
if (data.environments) {
for await (const environment of data.environments) {
const projectId = originalToNewProjectId.get(environment.projectId);
const slug = slugify(`${environment.name}-${alphaNumericNanoId(4)}`);
if (!projectId) {
projectsNotImported.push(environment.projectId);
// eslint-disable-next-line no-continue
continue;
}
const existingEnv = await projectEnvDAL.findOne({ projectId, slug }, tx);
if (existingEnv) {
throw new BadRequestError({
message: `Environment with slug '${slug}' already exist`,
name: "CreateEnvironment"
});
}
const lastPos = await projectEnvDAL.findLastEnvPosition(projectId, tx);
const doc = await projectEnvDAL.create({ slug, name: environment.name, projectId, position: lastPos + 1 }, tx);
const folder = await folderDAL.create({ name: "root", parentId: null, envId: doc.id, version: 1 }, tx);
originalToNewEnvironmentId.set(environment.id, {
envSlug: doc.slug,
envId: doc.id,
rootFolderId: folder.id,
projectId
});
}
}
if (data.folders) {
for await (const folder of data.folders) {
const parentEnv = originalToNewEnvironmentId.get(folder.parentFolderId as string);
const parentFolder = originalToNewFolderId.get(folder.parentFolderId as string);
let newFolder: TSecretFolders;
if (parentEnv?.rootFolderId) {
newFolder = await folderDAL.create(
{
name: folder.name,
envId: parentEnv.envId,
parentId: parentEnv.rootFolderId
},
tx
);
} else if (parentFolder) {
newFolder = await folderDAL.create(
{
name: folder.name,
envId: parentFolder.envId,
parentId: parentFolder.folderId
},
tx
);
} else {
logger.info({ folder }, "No parent environment found for folder");
// eslint-disable-next-line no-continue
continue;
}
const newFolderVersion = await folderVersionDAL.create(
{
name: newFolder.name,
envId: newFolder.envId,
version: newFolder.version,
folderId: newFolder.id
},
tx
);
await folderCommitService.createCommit(
{
actor: {
type: actor,
metadata: {
id: actorId
}
},
message: "Changed by external migration",
folderId: parentEnv?.rootFolderId || parentFolder?.folderId || "",
changes: [
{
type: CommitType.ADD,
folderVersionId: newFolderVersion.id
}
]
},
tx
);
originalToNewFolderId.set(folder.id, {
folderId: newFolder.id,
envId: parentEnv?.envId || parentFolder?.envId || "",
envSlug: parentEnv?.envSlug || parentFolder?.envSlug || "",
projectId: parentEnv?.projectId || parentFolder?.projectId || ""
});
}
}
// Useful for debugging:
// console.log("data.secrets", data.secrets);
// console.log("data.folders", data.folders);
// console.log("data.environment", data.environments);
if (data.secrets && data.secrets.length > 0) {
const mappedToEnvironmentId = new Map<
string,
{
secretKey: string;
secretValue: string;
folderId?: string;
isFromBlock?: boolean;
}[]
>();
for (const secret of data.secrets) {
const targetId = secret.folderId || secret.environmentId;
// Skip if we can't find either an environment or folder mapping for this secret
if (!originalToNewEnvironmentId.get(secret.environmentId) && !originalToNewFolderId.get(targetId)) {
logger.info({ secret }, "[importDataIntoInfisicalFn]: Could not find environment or folder for secret");
// eslint-disable-next-line no-continue
continue;
}
if (!mappedToEnvironmentId.has(targetId)) {
mappedToEnvironmentId.set(targetId, []);
}
const alreadyHasSecret = mappedToEnvironmentId
.get(targetId)!
.find((el) => el.secretKey === secret.name && el.folderId === secret.folderId);
if (alreadyHasSecret && alreadyHasSecret.isFromBlock) {
// remove the existing secret if any
mappedToEnvironmentId
.get(targetId)!
.splice(mappedToEnvironmentId.get(targetId)!.indexOf(alreadyHasSecret), 1);
}
mappedToEnvironmentId.get(targetId)!.push({
secretKey: secret.name,
secretValue: secret.value || "",
folderId: secret.folderId,
isFromBlock: secret.appBlockOrderIndex !== undefined
});
}
// for each of the mappedEnvironmentId
for await (const [targetId, secrets] of mappedToEnvironmentId) {
logger.info("[importDataIntoInfisicalFn]: Processing secrets for targetId", targetId);
let selectedFolder: TSecretFolders | undefined;
let selectedProjectId: string | undefined;
// Case 1: Secret belongs to a folder / branch / branch of a block
const foundFolder = originalToNewFolderId.get(targetId);
if (foundFolder) {
logger.info("[importDataIntoInfisicalFn]: Processing secrets for folder");
selectedFolder = await folderDAL.findById(foundFolder.folderId, tx);
selectedProjectId = foundFolder.projectId;
} else {
logger.info("[importDataIntoInfisicalFn]: Processing secrets for normal environment");
const environment = data.environments.find((env) => env.id === targetId);
if (!environment) {
logger.info(
{
targetId
},
"[importDataIntoInfisicalFn]: Could not find environment for secret"
);
// eslint-disable-next-line no-continue
continue;
}
const projectId = originalToNewProjectId.get(environment.projectId)!;
if (!projectId) {
throw new BadRequestError({ message: `Failed to import secret, project not found` });
}
const env = originalToNewEnvironmentId.get(targetId);
if (!env) {
logger.info(
{
targetId
},
"[importDataIntoInfisicalFn]: Could not find environment for secret"
);
// eslint-disable-next-line no-continue
continue;
}
const folder = await folderDAL.findBySecretPath(projectId, env.envSlug, "/", tx);
if (!folder) {
throw new NotFoundError({
message: `Folder not found for the given environment slug (${env.envSlug}) & secret path (/)`,
name: "Create secret"
});
}
selectedFolder = folder;
selectedProjectId = projectId;
}
if (!selectedFolder) {
throw new NotFoundError({
message: `Folder not found for the given environment slug & secret path`,
name: "CreateSecret"
});
}
if (!selectedProjectId) {
throw new NotFoundError({
message: `Project not found for the given environment slug & secret path`,
name: "CreateSecret"
});
}
const { encryptor: secretManagerEncrypt } = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId: selectedProjectId
},
tx
);
const secretBatches = chunkArray(secrets, 2500);
for await (const secretBatch of secretBatches) {
const secretsByKeys = await secretDAL.findBySecretKeys(
selectedFolder.id,
secretBatch.map((el) => ({
key: el.secretKey,
type: SecretType.Shared
})),
tx
);
if (secretsByKeys.length) {
throw new BadRequestError({
message: `Secret already exist: ${secretsByKeys.map((el) => el.key).join(",")}`
});
}
await fnSecretBulkInsert({
inputSecrets: secretBatch.map((el) => {
const references = getAllSecretReferences(el.secretValue).nestedReferences;
return {
version: 1,
encryptedValue: el.secretValue
? secretManagerEncrypt({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob
: undefined,
key: el.secretKey,
references,
type: SecretType.Shared
};
}),
folderId: selectedFolder.id,
orgId: actorOrgId,
resourceMetadataDAL,
secretDAL,
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
folderCommitService,
actor: {
type: actor,
actorId
},
tx
});
}
}
}
});
return { projectsNotImported };
};

View File

@@ -0,0 +1,3 @@
export * from "./envkey";
export * from "./import";
export * from "./vault";

View File

@@ -0,0 +1,341 @@
import axios, { AxiosInstance } from "axios";
import { v4 as uuidv4 } from "uuid";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { InfisicalImportData, VaultMappingType } from "../external-migration-types";
type VaultData = {
namespace: string;
mount: string;
path: string;
secretData: Record<string, string>;
};
const vaultFactory = () => {
const getMounts = async (request: AxiosInstance) => {
const response = await request
.get<
Record<
string,
{
accessor: string;
options: {
version?: string;
} | null;
type: string;
}
>
>("/v1/sys/mounts")
.catch((err) => {
if (axios.isAxiosError(err)) {
logger.error(err.response?.data, "External migration: Failed to get Vault mounts");
}
throw err;
});
return response.data;
};
const getPaths = async (
request: AxiosInstance,
{ mountPath, secretPath = "" }: { mountPath: string; secretPath?: string }
) => {
try {
// For KV v2: /v1/{mount}/metadata/{path}?list=true
const path = secretPath ? `${mountPath}/metadata/${secretPath}` : `${mountPath}/metadata`;
const response = await request.get<{
data: {
keys: string[];
};
}>(`/v1/${path}?list=true`);
return response.data.data.keys;
} catch (err) {
if (axios.isAxiosError(err)) {
logger.error(err.response?.data, "External migration: Failed to get Vault paths");
if (err.response?.status === 404) {
return null;
}
}
throw err;
}
};
const getSecrets = async (
request: AxiosInstance,
{ mountPath, secretPath }: { mountPath: string; secretPath: string }
) => {
// For KV v2: /v1/{mount}/data/{path}
const response = await request
.get<{
data: {
data: Record<string, string>; // KV v2 has nested data structure
metadata: {
created_time: string;
deletion_time: string;
destroyed: boolean;
version: number;
};
};
}>(`/v1/${mountPath}/data/${secretPath}`)
.catch((err) => {
if (axios.isAxiosError(err)) {
logger.error(err.response?.data, "External migration: Failed to get Vault secret");
}
throw err;
});
return response.data.data.data;
};
// helper function to check if a mount is KV v2 (will be useful if we add support for Vault KV v1)
// const isKvV2Mount = (mountInfo: { type: string; options?: { version?: string } | null }) => {
// return mountInfo.type === "kv" && mountInfo.options?.version === "2";
// };
const recursivelyGetAllPaths = async (
request: AxiosInstance,
mountPath: string,
currentPath: string = ""
): Promise<string[]> => {
const paths = await getPaths(request, { mountPath, secretPath: currentPath });
if (paths === null || paths.length === 0) {
return [];
}
const allSecrets: string[] = [];
for await (const path of paths) {
const cleanPath = path.endsWith("/") ? path.slice(0, -1) : path;
const fullItemPath = currentPath ? `${currentPath}/${cleanPath}` : cleanPath;
if (path.endsWith("/")) {
// it's a folder so we recurse into it
const subSecrets = await recursivelyGetAllPaths(request, mountPath, fullItemPath);
allSecrets.push(...subSecrets);
} else {
// it's a secret so we add it to our results
allSecrets.push(`${mountPath}/${fullItemPath}`);
}
}
return allSecrets;
};
async function collectVaultData({
baseUrl,
namespace,
accessToken
}: {
baseUrl: string;
namespace?: string;
accessToken: string;
}): Promise<VaultData[]> {
const request = axios.create({
baseURL: baseUrl,
headers: {
"X-Vault-Token": accessToken,
...(namespace ? { "X-Vault-Namespace": namespace } : {})
}
});
const allData: VaultData[] = [];
// Get all mounts in this namespace
const mounts = await getMounts(request);
for (const mount of Object.keys(mounts)) {
if (!mount.endsWith("/")) {
delete mounts[mount];
}
}
for await (const [mountPath, mountInfo] of Object.entries(mounts)) {
// skip non-KV mounts
if (!mountInfo.type.startsWith("kv")) {
// eslint-disable-next-line no-continue
continue;
}
// get all paths in this mount
const paths = await recursivelyGetAllPaths(request, `${mountPath.replace(/\/$/, "")}`);
const cleanMountPath = mountPath.replace(/\/$/, "");
for await (const secretPath of paths) {
// get the actual secret data
const secretData = await getSecrets(request, {
mountPath: cleanMountPath,
secretPath: secretPath.replace(`${cleanMountPath}/`, "")
});
allData.push({
namespace: namespace || "",
mount: mountPath.replace(/\/$/, ""),
path: secretPath.replace(`${cleanMountPath}/`, ""),
secretData
});
}
}
return allData;
}
return {
collectVaultData,
getMounts,
getPaths,
getSecrets,
recursivelyGetAllPaths
};
};
export const transformToInfisicalFormatNamespaceToProjects = (
vaultData: VaultData[],
mappingType: VaultMappingType
): InfisicalImportData => {
const projects: Array<{ name: string; id: string }> = [];
const environments: Array<{ name: string; id: string; projectId: string; envParentId?: string }> = [];
const folders: Array<{ id: string; name: string; environmentId: string; parentFolderId?: string }> = [];
const secrets: Array<{ id: string; name: string; environmentId: string; value: string; folderId?: string }> = [];
// track created entities to avoid duplicates
const projectMap = new Map<string, string>(); // namespace -> projectId
const environmentMap = new Map<string, string>(); // namespace:mount -> environmentId
const folderMap = new Map<string, string>(); // namespace:mount:folderPath -> folderId
let environmentId: string = "";
for (const data of vaultData) {
const { namespace, mount, path, secretData } = data;
if (mappingType === VaultMappingType.Namespace) {
// create project (namespace)
if (!projectMap.has(namespace)) {
const projectId = uuidv4();
projectMap.set(namespace, projectId);
projects.push({
name: namespace,
id: projectId
});
}
const projectId = projectMap.get(namespace)!;
// create environment (mount)
const envKey = `${namespace}:${mount}`;
if (!environmentMap.has(envKey)) {
environmentId = uuidv4();
environmentMap.set(envKey, environmentId);
environments.push({
name: mount,
id: environmentId,
projectId
});
}
environmentId = environmentMap.get(envKey)!;
} else if (mappingType === VaultMappingType.KeyVault) {
if (!projectMap.has(mount)) {
const projectId = uuidv4();
projectMap.set(mount, projectId);
projects.push({
name: mount,
id: projectId
});
}
const projectId = projectMap.get(mount)!;
// create single "Production" environment per project, because we have no good way of determining environments from vault
if (!environmentMap.has(mount)) {
environmentId = uuidv4();
environmentMap.set(mount, environmentId);
environments.push({
name: "Production",
id: environmentId,
projectId
});
}
environmentId = environmentMap.get(mount)!;
}
// create folder structure
let currentFolderId: string | undefined;
let currentPath = "";
if (path.includes("/")) {
const pathParts = path.split("/").filter(Boolean);
const folderParts = pathParts;
// create nested folder structure for the entire path
for (const folderName of folderParts) {
currentPath = currentPath ? `${currentPath}/${folderName}` : folderName;
const folderKey = `${namespace}:${mount}:${currentPath}`;
if (!folderMap.has(folderKey)) {
const folderId = uuidv4();
folderMap.set(folderKey, folderId);
folders.push({
id: folderId,
name: folderName,
environmentId,
parentFolderId: currentFolderId || environmentId
});
currentFolderId = folderId;
} else {
currentFolderId = folderMap.get(folderKey)!;
}
}
}
for (const [key, value] of Object.entries(secretData)) {
secrets.push({
id: uuidv4(),
name: key,
environmentId,
value: String(value),
folderId: currentFolderId
});
}
}
return {
projects,
environments,
folders,
secrets
};
};
export const importVaultDataFn = async ({
vaultAccessToken,
vaultNamespace,
vaultUrl,
mappingType
}: {
vaultAccessToken: string;
vaultNamespace?: string;
vaultUrl: string;
mappingType: VaultMappingType;
}) => {
await blockLocalAndPrivateIpAddresses(vaultUrl);
if (mappingType === VaultMappingType.Namespace && !vaultNamespace) {
throw new BadRequestError({
message: "Vault namespace is required when project mapping type is set to namespace."
});
}
const vaultApi = vaultFactory();
const vaultData = await vaultApi.collectVaultData({
accessToken: vaultAccessToken,
baseUrl: vaultUrl,
namespace: vaultNamespace
});
const infisicalData = transformToInfisicalFormatNamespaceToProjects(vaultData, mappingType);
return infisicalData;
};

View File

@@ -19,7 +19,7 @@ import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-d
import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal";
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
import { importDataIntoInfisicalFn } from "./external-migration-fns";
import { ExternalPlatforms, TImportInfisicalDataCreate } from "./external-migration-types";
import { ExternalPlatforms, ImportType, TImportInfisicalDataCreate } from "./external-migration-types";
export type TExternalMigrationQueueFactoryDep = {
smtpService: TSmtpService;
@@ -67,6 +67,7 @@ export const externalMigrationQueueFactory = ({
const startImport = async (dto: {
actorEmail: string;
data: {
importType: ImportType;
iv: string;
tag: string;
ciphertext: string;

View File

@@ -4,9 +4,9 @@ import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors";
import { TUserDALFactory } from "../user/user-dal";
import { decryptEnvKeyDataFn, parseEnvKeyDataFn } from "./external-migration-fns";
import { decryptEnvKeyDataFn, importVaultDataFn, parseEnvKeyDataFn } from "./external-migration-fns";
import { TExternalMigrationQueueFactory } from "./external-migration-queue";
import { TImportEnvKeyDataCreate } from "./external-migration-types";
import { ImportType, TImportEnvKeyDataDTO, TImportVaultDataDTO } from "./external-migration-types";
type TExternalMigrationServiceFactoryDep = {
permissionService: TPermissionServiceFactory;
@@ -28,7 +28,7 @@ export const externalMigrationServiceFactory = ({
actorId,
actorOrgId,
actorAuthMethod
}: TImportEnvKeyDataCreate) => {
}: TImportEnvKeyDataDTO) => {
if (crypto.isFipsModeEnabled()) {
throw new BadRequestError({ message: "EnvKey migration is not supported when running in FIPS mode." });
}
@@ -60,11 +60,65 @@ export const externalMigrationServiceFactory = ({
await externalMigrationQueue.startImport({
actorEmail: user.email!,
data: encrypted
data: {
importType: ImportType.EnvKey,
...encrypted
}
});
};
const importVaultData = async ({
vaultAccessToken,
vaultNamespace,
mappingType,
vaultUrl,
actor,
actorId,
actorOrgId,
actorAuthMethod
}: TImportVaultDataDTO) => {
const { membership } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
if (membership.role !== OrgMembershipRole.Admin) {
throw new ForbiddenRequestError({ message: "Only admins can import data" });
}
const user = await userDAL.findById(actorId);
const vaultData = await importVaultDataFn({
vaultAccessToken,
vaultNamespace,
vaultUrl,
mappingType
});
const stringifiedJson = JSON.stringify({
data: vaultData,
actor,
actorId,
actorOrgId,
actorAuthMethod
});
const encrypted = crypto.encryption().symmetric().encryptWithRootEncryptionKey(stringifiedJson);
await externalMigrationQueue.startImport({
actorEmail: user.email!,
data: {
importType: ImportType.Vault,
...encrypted
}
});
};
return {
importEnvKeyData
importEnvKeyData,
importVaultData
};
};

View File

@@ -1,5 +1,17 @@
import { TOrgPermission } from "@app/lib/types";
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
export enum ImportType {
EnvKey = "envkey",
Vault = "vault"
}
export enum VaultMappingType {
Namespace = "namespace",
KeyVault = "key-vault"
}
export type InfisicalImportData = {
projects: Array<{ name: string; id: string }>;
environments: Array<{ name: string; id: string; projectId: string; envParentId?: string }>;
@@ -14,14 +26,17 @@ export type InfisicalImportData = {
}>;
};
export type TImportEnvKeyDataCreate = {
export type TImportEnvKeyDataDTO = {
decryptionKey: string;
encryptedJson: { nonce: string; data: string };
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
};
} & Omit<TOrgPermission, "orgId">;
export type TImportVaultDataDTO = {
vaultAccessToken: string;
vaultNamespace?: string;
mappingType: VaultMappingType;
vaultUrl: string;
} & Omit<TOrgPermission, "orgId">;
export type TImportInfisicalDataCreate = {
data: InfisicalImportData;

View File

@@ -15,5 +15,16 @@ export type TIdentityAccessTokenJwtPayload = {
namespace: string;
name: string;
};
aws?: {
accountId: string;
arn: string;
userId: string;
// Derived from ARN
partition: string; // "aws", "aws-gov", "aws-cn"
service: string; // "iam", "sts"
resourceType: string; // "user" or "role"
resourceName: string;
};
};
};

View File

@@ -1,67 +1,91 @@
interface PrincipalArnEntity {
Partition: string;
Service: "iam" | "sts";
AccountNumber: string;
Type: "user" | "role" | "instance-profile";
Path: string;
FriendlyName: string;
SessionInfo: string; // Only populated for assumed-role
}
export const extractPrincipalArnEntity = (arn: string): PrincipalArnEntity => {
// split the ARN into parts using ":" as the delimiter
const fullParts = arn.split(":");
if (fullParts.length !== 6) {
throw new Error(`Unrecognized ARN: "${arn}" contains ${fullParts.length} colon-separated parts, expected 6`);
}
const [prefix, partition, service, , accountNumber, resource] = fullParts;
if (prefix !== "arn") {
throw new Error(`Unrecognized ARN: "${arn}" does not begin with "arn:"`);
}
// validate the service is either 'iam' or 'sts'
if (service !== "iam" && service !== "sts") {
throw new Error(`Unrecognized service: "${service}" in ARN "${arn}", expected "iam" or "sts"`);
}
// parse the last part of the ARN which describes the resource
const parts = resource.split("/");
if (parts.length < 2) {
throw new Error(
`Unrecognized ARN: "${resource}" in ARN "${arn}" contains fewer than 2 slash-separated parts (expected type/name)`
);
}
const [rawType, ...rest] = parts;
let finalType: PrincipalArnEntity["Type"];
let friendlyName: string = parts[parts.length - 1];
let path: string = "";
let sessionInfo: string = "";
// handle different types of resources
switch (rawType) {
case "assumed-role": {
if (rest.length < 2) {
throw new Error(
`Unrecognized ARN: "${resource}" for assumed-role in ARN "${arn}" contains fewer than 3 slash-separated parts (type/roleName/sessionId)`
);
}
// assumed roles use a special format where the friendly name is the role name
const [roleName, sessionId] = rest;
finalType = "role"; // treat assumed role case as role
friendlyName = roleName;
sessionInfo = sessionId;
break;
}
case "user":
case "role":
case "instance-profile":
finalType = rawType;
path = rest.slice(0, -1).join("/");
break;
default:
throw new Error(
`Unrecognized principal type: "${rawType}" in ARN "${arn}". Expected "user", "role", "instance-profile", or "assumed-role".`
);
}
const entity: PrincipalArnEntity = {
Partition: partition,
Service: service,
AccountNumber: accountNumber,
Type: finalType,
Path: path,
FriendlyName: friendlyName,
SessionInfo: sessionInfo
};
return entity;
};
/**
* Extracts the identity ARN from the GetCallerIdentity response to one of the following formats:
* - arn:aws:iam::123456789012:user/MyUserName
* - arn:aws:iam::123456789012:role/MyRoleName
*/
export const extractPrincipalArn = (arn: string) => {
// split the ARN into parts using ":" as the delimiter
const fullParts = arn.split(":");
if (fullParts.length !== 6) {
throw new Error(`Unrecognized ARN: contains ${fullParts.length} colon-separated parts, expected 6`);
}
const [prefix, partition, service, , accountNumber, resource] = fullParts;
if (prefix !== "arn") {
throw new Error('Unrecognized ARN: does not begin with "arn:"');
}
// structure to hold the parsed data
const entity = {
Partition: partition,
Service: service,
AccountNumber: accountNumber,
Type: "",
Path: "",
FriendlyName: "",
SessionInfo: ""
};
// validate the service is either 'iam' or 'sts'
if (entity.Service !== "iam" && entity.Service !== "sts") {
throw new Error(`Unrecognized service: ${entity.Service}, not one of iam or sts`);
}
// parse the last part of the ARN which describes the resource
const parts = resource.split("/");
if (parts.length < 2) {
throw new Error(`Unrecognized ARN: "${resource}" contains fewer than 2 slash-separated parts`);
}
const [type, ...rest] = parts;
entity.Type = type;
entity.FriendlyName = parts[parts.length - 1];
// handle different types of resources
switch (entity.Type) {
case "assumed-role": {
if (rest.length < 2) {
throw new Error(`Unrecognized ARN: "${resource}" contains fewer than 3 slash-separated parts`);
}
// assumed roles use a special format where the friendly name is the role name
const [roleName, sessionId] = rest;
entity.Type = "role"; // treat assumed role case as role
entity.FriendlyName = roleName;
entity.SessionInfo = sessionId;
break;
}
case "user":
case "role":
case "instance-profile":
// standard cases: just join back the path if there's any
entity.Path = rest.slice(0, -1).join("/");
break;
default:
throw new Error(`Unrecognized principal type: "${entity.Type}"`);
}
const entity = extractPrincipalArnEntity(arn);
return `arn:aws:iam::${entity.AccountNumber}:${entity.Type}/${entity.FriendlyName}`;
};

View File

@@ -22,7 +22,7 @@ import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identit
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
import { validateIdentityUpdateForSuperAdminPrivileges } from "../super-admin/super-admin-fns";
import { TIdentityAwsAuthDALFactory } from "./identity-aws-auth-dal";
import { extractPrincipalArn } from "./identity-aws-auth-fns";
import { extractPrincipalArn, extractPrincipalArnEntity } from "./identity-aws-auth-fns";
import {
TAttachAwsAuthDTO,
TAwsGetCallerIdentityHeaders,
@@ -107,7 +107,7 @@ export const identityAwsAuthServiceFactory = ({
const {
data: {
GetCallerIdentityResponse: {
GetCallerIdentityResult: { Account, Arn }
GetCallerIdentityResult: { Account, Arn, UserId }
}
}
}: { data: TGetCallerIdentityResponse } = await axios({
@@ -168,11 +168,25 @@ export const identityAwsAuthServiceFactory = ({
});
const appCfg = getConfig();
const splitArn = extractPrincipalArnEntity(Arn);
const accessToken = crypto.jwt().sign(
{
identityId: identityAwsAuth.identityId,
identityAccessTokenId: identityAccessToken.id,
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN,
identityAuth: {
aws: {
accountId: Account,
arn: Arn,
userId: UserId,
// Derived from ARN
partition: splitArn.Partition,
service: splitArn.Service,
resourceType: splitArn.Type,
resourceName: splitArn.FriendlyName
}
}
} as TIdentityAccessTokenJwtPayload,
appCfg.AUTH_SECRET,
// akhilmhdh: for non-expiry tokens you should not even set the value, including undefined. Even for undefined jsonwebtoken throws error

View File

@@ -1,9 +1,11 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { TAccessApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-environment-dal";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TSecretApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-environment-dal";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
@@ -20,6 +22,8 @@ type TProjectEnvServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem" | "waitTillReady">;
accessApprovalPolicyEnvironmentDAL: Pick<TAccessApprovalPolicyEnvironmentDALFactory, "findAvailablePoliciesByEnvId">;
secretApprovalPolicyEnvironmentDAL: Pick<TSecretApprovalPolicyEnvironmentDALFactory, "findAvailablePoliciesByEnvId">;
};
export type TProjectEnvServiceFactory = ReturnType<typeof projectEnvServiceFactory>;
@@ -30,7 +34,9 @@ export const projectEnvServiceFactory = ({
licenseService,
keyStore,
projectDAL,
folderDAL
folderDAL,
accessApprovalPolicyEnvironmentDAL,
secretApprovalPolicyEnvironmentDAL
}: TProjectEnvServiceFactoryDep) => {
const createEnvironment = async ({
projectId,
@@ -220,6 +226,20 @@ export const projectEnvServiceFactory = ({
}
const env = await projectEnvDAL.transaction(async (tx) => {
const secretApprovalPolicies = await secretApprovalPolicyEnvironmentDAL.findAvailablePoliciesByEnvId(id, tx);
if (secretApprovalPolicies.length > 0) {
throw new BadRequestError({
message: "Environment is in use by a secret approval policy",
name: "DeleteEnvironment"
});
}
const accessApprovalPolicies = await accessApprovalPolicyEnvironmentDAL.findAvailablePoliciesByEnvId(id, tx);
if (accessApprovalPolicies.length > 0) {
throw new BadRequestError({
message: "Environment is in use by an access approval policy",
name: "DeleteEnvironment"
});
}
const [doc] = await projectEnvDAL.delete({ id, projectId }, tx);
if (!doc)
throw new NotFoundError({

View File

@@ -645,7 +645,7 @@ export const projectServiceFactory = ({
const updateProject = async ({ actor, actorId, actorOrgId, actorAuthMethod, update, filter }: TUpdateProjectDTO) => {
const project = await projectDAL.findProjectByFilter(filter);
const { permission } = await permissionService.getProjectPermission({
const { permission, hasRole } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: project.id,
@@ -667,6 +667,12 @@ export const projectServiceFactory = ({
}
}
if (update.secretDetectionIgnoreValues && !hasRole(ProjectMembershipRole.Admin)) {
throw new ForbiddenRequestError({
message: "Only admins can update secret detection ignore values"
});
}
const updatedProject = await projectDAL.updateById(project.id, {
name: update.name,
description: update.description,
@@ -676,7 +682,8 @@ export const projectServiceFactory = ({
slug: update.slug,
secretSharing: update.secretSharing,
defaultProduct: update.defaultProduct,
showSnapshotsLegacy: update.showSnapshotsLegacy
showSnapshotsLegacy: update.showSnapshotsLegacy,
secretDetectionIgnoreValues: update.secretDetectionIgnoreValues
});
return updatedProject;

View File

@@ -96,6 +96,7 @@ export type TUpdateProjectDTO = {
slug?: string;
secretSharing?: boolean;
showSnapshotsLegacy?: boolean;
secretDetectionIgnoreValues?: string[];
};
} & Omit<TProjectPermission, "projectId">;

View File

@@ -11,7 +11,7 @@ import { TReminderServiceFactory } from "./reminder-types";
type TDailyReminderQueueServiceFactoryDep = {
reminderService: TReminderServiceFactory;
queueService: TQueueServiceFactory;
secretDAL: Pick<TSecretV2BridgeDALFactory, "transaction" | "findSecretsWithReminderRecipients">;
secretDAL: Pick<TSecretV2BridgeDALFactory, "transaction" | "findSecretsWithReminderRecipientsOld">;
secretReminderRecipientsDAL: Pick<TSecretReminderRecipientsDALFactory, "delete">;
};
@@ -69,7 +69,7 @@ export const dailyReminderQueueServiceFactory = ({
// Find existing secrets with pagination
// eslint-disable-next-line no-await-in-loop
const secrets = await secretDAL.findSecretsWithReminderRecipients(batchIds, REMINDER_PRUNE_BATCH_SIZE);
const secrets = await secretDAL.findSecretsWithReminderRecipientsOld(batchIds, REMINDER_PRUNE_BATCH_SIZE);
const secretsWithReminder = secrets.filter((secret) => secret.reminderRepeatDays);
const foundSecretIds = new Set(secretsWithReminder.map((secret) => secret.id));
@@ -173,12 +173,6 @@ export const dailyReminderQueueServiceFactory = ({
{ pattern: "0 */1 * * *", utc: true },
QueueName.SecretReminderMigration // just a job id
);
await queueService.queue(QueueName.SecretReminderMigration, QueueJobs.SecretReminderMigration, undefined, {
delay: 5000,
jobId: QueueName.SecretReminderMigration,
repeat: { pattern: "0 */1 * * *", utc: true }
});
};
queueService.listen(QueueName.DailyReminders, "failed", (_, err) => {

View File

@@ -308,12 +308,11 @@ export const reminderServiceFactory = ({
);
const newReminders = await reminderDAL.insertMany(
processedReminders.map(({ secretId, message, repeatDays, nextReminderDate, projectId }) => ({
processedReminders.map(({ secretId, message, repeatDays, nextReminderDate }) => ({
secretId,
message,
repeatDays,
nextReminderDate,
projectId
nextReminderDate
})),
tx
);

View File

@@ -8,7 +8,26 @@ import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
import { TRenderSecret, TRenderSyncWithCredentials } from "./render-sync-types";
const getRenderEnvironmentSecrets = async (secretSync: TRenderSyncWithCredentials) => {
const MAX_RETRIES = 5;
const retrySleep = async () =>
new Promise((resolve) => {
setTimeout(resolve, 60000);
});
const makeRequestWithRetry = async <T>(requestFn: () => Promise<T>, attempt = 0): Promise<T> => {
try {
return await requestFn();
} catch (error) {
if (isAxiosError(error) && error.response?.status === 429 && attempt < MAX_RETRIES) {
await retrySleep();
return await makeRequestWithRetry(requestFn, attempt + 1);
}
throw error;
}
};
const getRenderEnvironmentSecrets = async (secretSync: TRenderSyncWithCredentials): Promise<TRenderSecret[]> => {
const {
destinationConfig,
connection: {
@@ -22,20 +41,23 @@ const getRenderEnvironmentSecrets = async (secretSync: TRenderSyncWithCredential
do {
const url = cursor ? `${baseUrl}?cursor=${cursor}` : baseUrl;
const { data } = await request.get<
{
envVar: {
key: string;
value: string;
};
cursor: string;
}[]
>(url, {
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
});
const { data } = await makeRequestWithRetry(() =>
request.get<
{
envVar: {
key: string;
value: string;
};
cursor: string;
}[]
>(url, {
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
})
);
const secrets = data.map((item) => ({
key: item.envVar.key,
@@ -44,13 +66,20 @@ const getRenderEnvironmentSecrets = async (secretSync: TRenderSyncWithCredential
allSecrets.push(...secrets);
cursor = data[data.length - 1]?.cursor;
if (data.length > 0 && data[data.length - 1]?.cursor) {
cursor = data[data.length - 1].cursor;
} else {
cursor = undefined;
}
} while (cursor);
return allSecrets;
};
const putEnvironmentSecret = async (secretSync: TRenderSyncWithCredentials, secretMap: TSecretMap, key: string) => {
const batchUpdateEnvironmentSecrets = async (
secretSync: TRenderSyncWithCredentials,
envVars: Array<{ key: string; value: string }>
): Promise<void> => {
const {
destinationConfig,
connection: {
@@ -58,22 +87,17 @@ const putEnvironmentSecret = async (secretSync: TRenderSyncWithCredentials, secr
}
} = secretSync;
await request.put(
`${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/env-vars/${key}`,
{
key,
value: secretMap[key].value
},
{
await makeRequestWithRetry(() =>
request.put(`${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/env-vars`, envVars, {
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
}
})
);
};
const deleteEnvironmentSecret = async (secretSync: TRenderSyncWithCredentials, secret: Pick<TRenderSecret, "key">) => {
const redeployService = async (secretSync: TRenderSyncWithCredentials) => {
const {
destinationConfig,
connection: {
@@ -81,70 +105,81 @@ const deleteEnvironmentSecret = async (secretSync: TRenderSyncWithCredentials, s
}
} = secretSync;
try {
await request.delete(
`${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/env-vars/${secret.key}`,
await makeRequestWithRetry(() =>
request.post(
`${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/deploys`,
{},
{
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
}
);
} catch (error) {
if (isAxiosError(error) && error.response?.status === 404) {
// If the secret does not exist, we can ignore this error
return;
}
throw error;
}
)
);
};
const sleep = async () =>
new Promise((resolve) => {
setTimeout(resolve, 500);
});
export const RenderSyncFns = {
syncSecrets: async (secretSync: TRenderSyncWithCredentials, secretMap: TSecretMap) => {
const renderSecrets = await getRenderEnvironmentSecrets(secretSync);
for await (const key of Object.keys(secretMap)) {
// If value is empty skip it as render does not allow empty variables
if (secretMap[key].value === "") {
// eslint-disable-next-line no-continue
continue;
const finalEnvVars: Array<{ key: string; value: string }> = [];
for (const renderSecret of renderSecrets) {
const shouldKeep =
secretMap[renderSecret.key] ||
(secretSync.syncOptions.disableSecretDeletion &&
!matchesSchema(renderSecret.key, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema));
if (shouldKeep && !secretMap[renderSecret.key]) {
finalEnvVars.push({
key: renderSecret.key,
value: renderSecret.value
});
}
await putEnvironmentSecret(secretSync, secretMap, key);
await sleep();
}
if (secretSync.syncOptions.disableSecretDeletion) return;
for await (const renderSecret of renderSecrets) {
if (!matchesSchema(renderSecret.key, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema))
for (const [key, secret] of Object.entries(secretMap)) {
// Skip empty values as render does not allow empty variables
if (secret.value === "") {
// eslint-disable-next-line no-continue
continue;
if (!secretMap[renderSecret.key]) {
await deleteEnvironmentSecret(secretSync, renderSecret);
await sleep();
}
finalEnvVars.push({
key,
value: secret.value
});
}
await batchUpdateEnvironmentSecrets(secretSync, finalEnvVars);
if (secretSync.syncOptions.autoRedeployServices) {
await redeployService(secretSync);
}
},
getSecrets: async (secretSync: TRenderSyncWithCredentials): Promise<TSecretMap> => {
const renderSecrets = await getRenderEnvironmentSecrets(secretSync);
return Object.fromEntries(renderSecrets.map((secret) => [secret.key, { value: secret.value ?? "" }]));
},
removeSecrets: async (secretSync: TRenderSyncWithCredentials, secretMap: TSecretMap) => {
const encryptedSecrets = await getRenderEnvironmentSecrets(secretSync);
const renderSecrets = await getRenderEnvironmentSecrets(secretSync);
const finalEnvVars: Array<{ key: string; value: string }> = [];
for await (const encryptedSecret of encryptedSecrets) {
if (encryptedSecret.key in secretMap) {
await deleteEnvironmentSecret(secretSync, encryptedSecret);
await sleep();
for (const renderSecret of renderSecrets) {
if (!(renderSecret.key in secretMap)) {
finalEnvVars.push({
key: renderSecret.key,
value: renderSecret.value
});
}
}
await batchUpdateEnvironmentSecrets(secretSync, finalEnvVars);
if (secretSync.syncOptions.autoRedeployServices) {
await redeployService(secretSync);
}
}
};

View File

@@ -20,23 +20,33 @@ const RenderSyncDestinationConfigSchema = z.discriminatedUnion("scope", [
})
]);
const RenderSyncOptionsSchema = z.object({
autoRedeployServices: z.boolean().optional().describe(SecretSyncs.ADDITIONAL_SYNC_OPTIONS.RENDER.autoRedeployServices)
});
const RenderSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true };
export const RenderSyncSchema = BaseSecretSyncSchema(SecretSync.Render, RenderSyncOptionsConfig).extend({
export const RenderSyncSchema = BaseSecretSyncSchema(
SecretSync.Render,
RenderSyncOptionsConfig,
RenderSyncOptionsSchema
).extend({
destination: z.literal(SecretSync.Render),
destinationConfig: RenderSyncDestinationConfigSchema
});
export const CreateRenderSyncSchema = GenericCreateSecretSyncFieldsSchema(
SecretSync.Render,
RenderSyncOptionsConfig
RenderSyncOptionsConfig,
RenderSyncOptionsSchema
).extend({
destinationConfig: RenderSyncDestinationConfigSchema
});
export const UpdateRenderSyncSchema = GenericUpdateSecretSyncFieldsSchema(
SecretSync.Render,
RenderSyncOptionsConfig
RenderSyncOptionsConfig,
RenderSyncOptionsSchema
).extend({
destinationConfig: RenderSyncDestinationConfigSchema.optional()
});

View File

@@ -875,6 +875,48 @@ export const secretV2BridgeDALFactory = ({ db, keyStore }: TSecretV2DalArg) => {
}
};
const findSecretsWithReminderRecipientsOld = async (ids: string[], limit: number, tx?: Knex) => {
try {
// Create a subquery to get limited secret IDs
const limitedSecretIds = (tx || db)(TableName.SecretV2)
.whereIn(`${TableName.SecretV2}.id`, ids)
.limit(limit)
.select("id");
// Join with all recipients for the limited secrets
const docs = await (tx || db)(TableName.SecretV2)
.whereIn(`${TableName.SecretV2}.id`, limitedSecretIds)
.leftJoin(TableName.Reminder, `${TableName.SecretV2}.id`, `${TableName.Reminder}.secretId`)
.leftJoin(
TableName.SecretReminderRecipients,
`${TableName.SecretV2}.id`,
`${TableName.SecretReminderRecipients}.secretId`
)
.select(selectAllTableCols(TableName.SecretV2))
.select(db.ref("userId").withSchema(TableName.SecretReminderRecipients).as("reminderRecipientUserId"));
const data = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (el) => ({
_id: el.id,
...SecretsV2Schema.parse(el)
}),
childrenMapper: [
{
key: "reminderRecipientUserId",
label: "recipients" as const,
mapper: ({ reminderRecipientUserId }) => reminderRecipientUserId
}
]
});
return data;
} catch (error) {
throw new DatabaseError({ error, name: "findSecretsWithReminderRecipientsOld" });
}
};
return {
...secretOrm,
update,
@@ -893,6 +935,7 @@ export const secretV2BridgeDALFactory = ({ db, keyStore }: TSecretV2DalArg) => {
findOne,
find,
invalidateSecretCacheByProjectId,
findSecretsWithReminderRecipients
findSecretsWithReminderRecipients,
findSecretsWithReminderRecipientsOld
};
};

View File

@@ -25,6 +25,7 @@ import {
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
import { scanSecretPolicyViolations } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { DatabaseErrorCode } from "@app/lib/error-codes";
@@ -38,6 +39,7 @@ import { ActorType } from "../auth/auth-type";
import { TCommitResourceChangeDTO, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "../kms/kms-service";
import { KmsDataKey } from "../kms/kms-types";
import { TProjectDALFactory } from "../project/project-dal";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TReminderServiceFactory } from "../reminder/reminder-types";
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
@@ -88,6 +90,7 @@ import { TSecretVersionV2TagDALFactory } from "./secret-version-tag-dal";
type TSecretV2BridgeServiceFactoryDep = {
secretDAL: TSecretV2BridgeDALFactory;
projectDAL: Pick<TProjectDALFactory, "findById">;
secretVersionDAL: TSecretVersionV2DALFactory;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
@@ -126,6 +129,7 @@ export type TSecretV2BridgeServiceFactory = ReturnType<typeof secretV2BridgeServ
*/
export const secretV2BridgeServiceFactory = ({
secretDAL,
projectDAL,
projectEnvDAL,
secretTagDAL,
secretVersionDAL,
@@ -295,6 +299,19 @@ export const secretV2BridgeServiceFactory = ({
})
);
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
[
{
secretKey: inputSecret.secretName,
secretValue: inputSecret.secretValue
}
],
project.secretDetectionIgnoreValues || []
);
const { nestedReferences, localReferences } = getAllSecretReferences(inputSecret.secretValue);
const allSecretReferences = nestedReferences.concat(
localReferences.map((el) => ({ secretKey: el, secretPath, environment }))
@@ -506,6 +523,21 @@ export const secretV2BridgeServiceFactory = ({
const { secretName, secretValue } = inputSecret;
if (secretValue) {
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
[
{
secretKey: inputSecret.newSecretName || secretName,
secretValue
}
],
project.secretDetectionIgnoreValues || []
);
}
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
@@ -1585,6 +1617,9 @@ export const secretV2BridgeServiceFactory = ({
if (secrets.length)
throw new BadRequestError({ message: `Secret already exist: ${secrets.map((el) => el.key).join(",")}` });
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(projectId, secretPath, inputSecrets, project.secretDetectionIgnoreValues || []);
// get all tags
const sanitizedTagIds = inputSecrets.flatMap(({ tagIds = [] }) => tagIds);
const tags = sanitizedTagIds.length ? await secretTagDAL.findManyTagsById(projectId, sanitizedTagIds) : [];
@@ -1925,6 +1960,19 @@ export const secretV2BridgeServiceFactory = ({
});
await $validateSecretReferences(projectId, permission, secretReferences, tx);
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
secretsToUpdate
.filter((el) => el.secretValue)
.map((el) => ({
secretKey: el.newSecretName || el.secretKey,
secretValue: el.secretValue as string
})),
project.secretDetectionIgnoreValues || []
);
const bulkUpdatedSecrets = await fnSecretBulkUpdate({
folderId,
orgId: actorOrgId,

View File

@@ -198,6 +198,14 @@
"documentation/platform/workflow-integrations/microsoft-teams-integration"
]
},
{
"group": "External Migrations",
"pages": [
"documentation/platform/external-migrations/overview",
"documentation/platform/external-migrations/envkey",
"documentation/platform/external-migrations/vault"
]
},
{
"group": "Admin Consoles",
"pages": [

View File

@@ -1,41 +0,0 @@
---
title: "Migrating from EnvKey to Infisical"
sidebarTitle: "Migration"
description: "Learn how to migrate from EnvKey to Infisical in the easiest way possible."
---
## What is Infisical?
[Infisical](https://infisical.com) is an open-source all-in-one secret management platform that helps developers manage secrets (e.g., API-keys, DB access tokens, [certificates](https://infisical.com/docs/documentation/platform/pki/overview)) across their infrastructure. In addition, Infisical provides [secret sharing](https://infisical.com/docs/documentation/platform/secret-sharing) functionality, ability to [prevent secret leaks](https://infisical.com/docs/cli/scanning-overview), and more.
Infisical is used by 10,000+ organizations across all industries including First American Financial Corporation, Delivery Hero, and [Hugging Face](https://infisical.com/customers/hugging-face).
## Migrating from EnvKey
<Steps>
<Step>
Open the EnvKey dashboard and go to My Org.
![EnvKey Dashboard](../../images/guides/import-envkey/envkey-dashboard.png)
</Step>
<Step>
Go to Import/Export on the top right corner, Click on Export Org and save the exported file.
![Export organization](../../images/guides/import-envkey/envkey-export.png)
</Step>
<Step>
Click on copy to copy the encryption key and save it.
![Copy encryption key](../../images/guides/import-envkey/copy-encryption-key.png)
</Step>
<Step>
Open the Infisical dashboard and go to Organization Settings > Import.
![Infisical Organization settings](../../images/guides/import-envkey/infisical-import-dashboard.png)
</Step>
<Step>
Upload the exported file from EnvKey, paste the encryption key and click Import.
![Infisical Import EnvKey](../../images/guides/import-envkey/infisical-import-envkey.png)
</Step>
</Steps>
## Talk to our team
To make the migration process even more seamless, you can [schedule a meeting with our team](https://infisical.cal.com/vlad/migration-from-envkey-to-infisical) to learn more about how Infisical compares to EnvKey and discuss unique needs of your organization. You are also welcome to email us at [support@infisical.com](mailto:support@infisical.com) to ask any questions or get any technical help.

View File

@@ -30,10 +30,10 @@ For methods like OIDC, these come as claims in the token and can be made availab
<Tabs>
<Tab title="OIDC Login Attributes">
1. Navigate to the Identity Authentication settings and select the OIDC Auth Method.
2. In the **Advanced section**, locate the Claim Mapping configuration.
3. Map the OIDC claims to permission attributes by specifying:
- **Attribute Name:** The identifier to be used in your policies (e.g., department).
1. Navigate to the Identity Authentication settings and select the OIDC Auth Method.
2. In the **Advanced section**, locate the Claim Mapping configuration.
3. Map the OIDC claims to permission attributes by specifying:
- **Attribute Name:** The identifier to be used in your policies (e.g., department).
- **Claim Path:** The dot notation path to the claim in the OIDC token (e.g., user.department).
For example, if your OIDC provider returns:
@@ -64,7 +64,7 @@ For methods like OIDC, these come as claims in the token and can be made availab
</Tab>
<Tab title="Kubernetes Login Attributes">
For identities authenticated using Kubernetes, the service account's namespace and name are available in their policy and can be accessed as follows:
For identities authenticated using Kubernetes, the service account's namespace and name are available in their policy and can be accessed as follows:
```
{{ identity.auth.kubernetes.namespace }}
@@ -72,9 +72,25 @@ For methods like OIDC, these come as claims in the token and can be made availab
```
<img src="/images/platform/access-controls/abac-policy-k8s-format.png" />
</Tab>
<Tab title="AWS Attributes">
For identities authenticated using AWS Auth, several attributes can be accessed. On top of the 3 base attributes, there's 4 derived from the ARN. The example below includes comments showing how each derived attribute looks like based on this ARN: `arn:aws:iam::123456789012:user/example-user`
```
{{ identity.auth.aws.accountId }}
{{ identity.auth.aws.arn }}
{{ identity.auth.aws.userId }}
// Derived from ARN
{{ identity.auth.aws.partition }} // aws
{{ identity.auth.aws.service }} // iam
{{ identity.auth.aws.resourceType }} // user
{{ identity.auth.aws.resourceName }} // example-user
```
<img src="/images/platform/access-controls/abac-policy-aws-format.png" />
</Tab>
<Tab title="Other Authentication Method Attributes">
At the moment we only support OIDC claims. Payloads on other authentication methods are not yet accessible.
At the moment we only support OIDC claims, Kubernetes attributes, and AWS attributes. Payloads on other authentication methods are not yet accessible.
</Tab>
</Tabs>

View File

@@ -0,0 +1,44 @@
---
title: "Migrating from EnvKey to Infisical"
sidebarTitle: "EnvKey"
description: "Learn how to migrate secrets from EnvKey to Infisical."
---
## Migrating from EnvKey
<Steps>
<Step title="Open the EnvKey dashboard and go to My Org">
![EnvKey Dashboard](/images/platform/external-migrations/envkey-dashboard.png)
</Step>
<Step title="Export your EnvKey organization">
Go to Import/Export on the top right corner, Click on Export Org and save the exported file.
![Export organization](/images/platform/external-migrations/envkey-export.png)
</Step>
<Step title="Obtain EnvKey encryption key">
Click on copy to copy the encryption key and save it.
![Copy encryption key](/images/platform/external-migrations/envkey-copy-encryption-key.png)
</Step>
<Step title="Navigate to Infisical external migrations">
Open the Infisical dashboard and go to Organization Settings > External Migrations.
![Infisical Organization settings](/images/platform/external-migrations/infisical-external-migration-dashboard.png)
</Step>
<Step title="Select the EnvKey platform">
Select the EnvKey platform and click on Next.
![Select EnvKey platform](/images/platform/external-migrations/infisical-import-envkey-modal.png)
</Step>
<Step title="Upload the exported file from EnvKey">
Upload the exported file from EnvKey, paste the encryption key and click Import data.
![Infisical Import EnvKey](/images/platform/external-migrations/infisical-import-envkey.png)
</Step>
</Steps>
<Note>
It may take several minutes to complete the migration. You will receive an email when the migration is complete, or if there were any errors during the migration process.
</Note>
## Talk to our team
To make the migration process even more seamless, you can [schedule a meeting with our team](https://infisical.cal.com/vlad/migration-from-envkey-to-infisical) to learn more about how Infisical compares to EnvKey and discuss unique needs of your organization. You are also welcome to email us at [support@infisical.com](mailto:support@infisical.com) to ask any questions or get any technical help.

View File

@@ -0,0 +1,16 @@
---
title: "External Migrations"
sidebarTitle: "Overview"
description: "Learn how to migrate secrets from third-party secrets management platforms to Infisical."
---
## Overview
Infisical supports migrating secrets from third-party secrets management platforms to Infisical. This is useful if you're looking to easily switch to Infisical and wish to move over your existing secrets from a different platform.
## Supported Platforms
- [EnvKey](./envkey)
- [Vault](./vault)
We're always looking to add more migration paths for other providers. If we're missing a platform, please open an issue on our [GitHub repository](https://github.com/infisical/infisical/issues).

View File

@@ -0,0 +1,127 @@
---
title: "Migrating from Vault to Infisical"
sidebarTitle: "Vault"
description: "Learn how to migrate secrets from Vault to Infisical."
---
## Migrating from Vault
Migrating from Vault Self-Hosted or Dedicated Vault is a straight forward process with our inbuilt migration option. In order to migrate from Vault, you'll need to provide Infisical an access token to your Vault instance.
Currently the Vault migration only supports migrating secrets from the KV v2 secrets engine. If you're using a different secrets engine, please open an issue on our [GitHub repository](https://github.com/infisical/infisical/issues).
### Prerequisites
- A Vault instance with the KV v2 secrets engine enabled.
- An access token to your Vault instance.
### Project Mapping
When migrating from Vault, you'll need to choose how you want to map your Vault resources to Infisical projects.
There are two options for project mapping:
- `Namespace`: This will map your selected Vault namespace to a single Infisical project. When you select this option, each KV secret engine within the namespace will be mapped to a single Infisical project. Each KV secret engine will be mapped to a Infisical environment within the project. This means if you have 3 KV secret engines, you'll have 3 environments inside the same project, where the name of the environments correspond to the name of the KV secret engines.
- `Key Vault`: This will map all the KV secret engines within your Vault instance to a Infisical project. Each KV engine will be created as a Infisical project. This means if you have 3 KV secret engines, you'll have 3 Infisical projects. For each of the created projects, a single default environment will be created called `Production`, which will contain all your secrets from the corresponding KV secret engine.
<Steps>
<Step title="Create a Vault policy">
In order to migrate from Vault, you'll need to create a Vault policy that allows Infisical to read the secrets and metadata from the KV v2 secrets engines within your Vault instance.
```python
# Allow listing secret engines/mounts
path "sys/mounts" {
capabilities = ["read", "list"]
}
# For KV v2 engines - access to both data and metadata
path "*/data/*" {
capabilities = ["read", "list"]
}
path "*/metadata/*" {
capabilities = ["read", "list"]
}
# If using Vault Enterprise - allow listing namespaces
path "sys/namespaces" {
capabilities = ["list", "read"]
}
# Cross-namespace access (Enterprise only)
path "+/*" {
capabilities = ["read", "list"]
}
path "+/sys/mounts" {
capabilities = ["read", "list"]
}
```
Save this policy with the name `infisical-migration`.
</Step>
<Step title="Generate an access token">
You can use the Vault CLI to easily generate an access token for the new `infisical-migration` policy that you created in the previous step.
```bash
vault token create --policy="infisical-migration"
```
After generating the token, you should see the following output:
```t
$ vault token create --policy="infisical-migration"
Key Value
--- -----
token <your-access-token>
token_accessor p6kJDiBSzYYdabJUIpGCsCBm
token_duration 768h
token_renewable true
token_policies ["default" "infisical-migration"]
identity_policies []
policies ["default" "infisical-migration"]
```
Copy the `token` field and save it for later, as you'll need this when configuring the migration to Infisical.
</Step>
<Step title="Navigate to Infisical external migrations">
Open the Infisical dashboard and go to Organization Settings > External Migrations.
![Infisical Organization settings](/images/platform/external-migrations/infisical-external-migration-dashboard.png)
</Step>
<Step title="Select the Vault platform">
Select the Vault platform and click on Next.
![Select Vault platform](/images/platform/external-migrations/infisical-import-vault-modal.png)
</Step>
<Step title="Configure the Vault migration">
Enter the Vault access token that you generated in the previous step and click Import data.
![Configure Vault migration](/images/platform/external-migrations/infisical-import-vault.png)
- `Vault URL`: The URL of your Vault instance.
- `Vault Namespace`: The namespace of your Vault instance. This is optional, and can be left blank if you're not using namespaces for your Vault instance.
- `Vault Access Token`: The access token that you generated in the previous step.
- `Project Mapping`: Choose how you want to map your Vault resources to Infisical projects. You can review the mapping options in the [Project Mapping](#project-mapping) section.
Click on Import data to start the migration.
</Step>
</Steps>
<Note>
It may take several minutes to complete the migration. You will receive an email when the migration is complete, or if there were any errors during the migration process.
</Note>

Binary file not shown.

After

Width:  |  Height:  |  Size: 452 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 896 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 609 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 819 KiB

View File

Before

Width:  |  Height:  |  Size: 403 KiB

After

Width:  |  Height:  |  Size: 403 KiB

View File

Before

Width:  |  Height:  |  Size: 216 KiB

After

Width:  |  Height:  |  Size: 216 KiB

View File

Before

Width:  |  Height:  |  Size: 413 KiB

After

Width:  |  Height:  |  Size: 413 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 193 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 135 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 139 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 136 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 161 KiB

View File

@@ -50,8 +50,8 @@ Infisical currently only supports one method for connecting to Azure, which is O
Back in your Infisical instance, add two new environment variables for the credentials of your Azure application.
- `INF_APP_CONNECTION_AZURE_CLIENT_ID`: The **Application (Client) ID** of your Azure application.
- `INF_APP_CONNECTION_AZURE_CLIENT_SECRET`: The **Client Secret** of your Azure application.
- `INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID`: The **Application (Client) ID** of your Azure application.
- `INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET`: The **Client Secret** of your Azure application.
Once added, restart your Infisical instance and use the Azure App Configuration connection.
</Step>

View File

@@ -43,12 +43,6 @@ Infisical currently only supports one method for connecting to Azure, which is O
- `Application.ReadWrite.All` (Delegated)
- `Directory.ReadWrite.All` (Delegated)
- `User.Read` (Delegated)
- Azure App Configuration
- `KeyValue.Delete` (Delegated)
- `KeyValue.Read` (Delegated)
- `KeyValue.Write` (Delegated)
- Access Key Vault
- `user_impersonation` (Delegated)
![Azure client secrets](/images/integrations/azure-client-secrets/app-api-permissions.png)
@@ -63,8 +57,8 @@ Infisical currently only supports one method for connecting to Azure, which is O
Back in your Infisical instance, add two new environment variables for the credentials of your Azure application.
- `INF_APP_CONNECTION_AZURE_CLIENT_ID`: The **Application (Client) ID** of your Azure application.
- `INF_APP_CONNECTION_AZURE_CLIENT_SECRET`: The **Client Secret** of your Azure application.
- `INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID`: The **Application (Client) ID** of your Azure application.
- `INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET`: The **Client Secret** of your Azure application.
Once added, restart your Infisical instance and use the Azure Client Secrets connection.
</Step>
@@ -72,6 +66,30 @@ Infisical currently only supports one method for connecting to Azure, which is O
</Accordion>
<Accordion title="Client Secret Authentication">
Ensure your Azure application has the required permissions that Infisical needs for the Azure Client Secrets connection to work.
**Prerequisites:**
- An active Azure setup.
<Steps>
<Step title="Assign API permissions to the application">
For the Azure Client Secrets connection to work, assign the following permissions to your Azure application:
#### Required API Permissions
**Microsoft Graph**
- `Application.ReadWrite.All`
- `Application.ReadWrite.OwnedBy`
- `Application.ReadWrite.All` (Delegated)
- `Directory.ReadWrite.All` (Delegated)
- `User.Read` (Delegated)
![Azure client secrets](/images/integrations/azure-client-secrets/app-api-permissions.png)
</Step>
</Steps>
</Accordion>
## Setup Azure Connection in Infisical
<Steps>
@@ -82,21 +100,31 @@ Infisical currently only supports one method for connecting to Azure, which is O
<Step title="Add Connection">
Select the **Azure Connection** option from the connection options modal. ![Select Azure Connection](/images/app-connections/azure/client-secrets/select-connection.png)
</Step>
<Step title="Authorize Connection">
Fill in the **Tenant ID** field with the Directory (Tenant) ID you obtained in the previous step.
<Step title="Create Connection">
<Tabs>
<Tab title="OAuth">
<Step title="Authorize Connection">
Fill in the **Tenant ID** field with the Directory (Tenant) ID you obtained in the previous step.
Now select the **OAuth** method and click **Connect to Azure**.
Now select the **OAuth** method and click **Connect to Azure**.
![Connect via Azure OAUth](/images/app-connections/azure/client-secrets/create-oauth-method.png)
![Connect via Azure OAUth](/images/app-connections/azure/client-secrets/create-oauth-method.png)
</Step>
<Step title="Grant Access">
You will then be redirected to Azure to grant Infisical access to your Azure account. Once granted,
you will be redirected back to Infisical's App Connections page. ![Azure Client Secrets
Authorization](/images/app-connections/azure/grant-access.png)
</Step>
</Tab>
<Tab title="Client Secret">
<Step title="Create Connection">
Fill in the **Tenant ID**, **Client ID** and **Client Secret** fields with the Directory (Tenant) ID, Application (Client) ID and Client Secret you obtained in the previous step.
</Step>
<Step title="Grant Access">
You will then be redirected to Azure to grant Infisical access to your Azure account. Once granted,
you will be redirected back to Infisical's App Connections page. ![Azure Client Secrets
Authorization](/images/app-connections/azure/grant-access.png)
</Step>
![Connect via Azure OAUth](/images/app-connections/azure/client-secrets/create-client-secrets-method.png)
</Step>
</Tab>
</Tabs>
</Step>
<Step title="Connection Created">
Your **Azure Client Secrets Connection** is now available for use. ![Azure Client Secrets](/images/app-connections/azure/client-secrets/oauth-connection.png)
</Step>

View File

@@ -56,8 +56,8 @@ Infisical currently supports two methods for connecting to Azure DevOps, which a
Back in your Infisical instance, add two new environment variables for the credentials of your Azure application.
- `INF_APP_CONNECTION_AZURE_CLIENT_ID`: The **Application (Client) ID** of your Azure application.
- `INF_APP_CONNECTION_AZURE_CLIENT_SECRET`: The **Client Secret** of your Azure application.
- `INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID`: The **Application (Client) ID** of your Azure application.
- `INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET`: The **Client Secret** of your Azure application.
Once added, restart your Infisical instance and use the Azure Client Secrets connection.
</Step>

View File

@@ -49,8 +49,8 @@ Infisical currently only supports one method for connecting to Azure, which is O
Back in your Infisical instance, add two new environment variables for the credentials of your Azure application.
- `INF_APP_CONNECTION_AZURE_CLIENT_ID`: The **Application (Client) ID** of your Azure application.
- `INF_APP_CONNECTION_AZURE_CLIENT_SECRET`: The **Client Secret** of your Azure application.
- `INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID`: The **Application (Client) ID** of your Azure application.
- `INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET`: The **Client Secret** of your Azure application.
Once added, restart your Infisical instance and use the Azure Key Vault connection.
</Step>

View File

@@ -55,7 +55,7 @@
"@ucast/mongo2js": "^1.3.4",
"@xyflow/react": "^12.4.4",
"argon2-browser": "^1.18.0",
"axios": "^1.7.9",
"axios": "^1.11.0",
"classnames": "^2.5.1",
"cva": "npm:class-variance-authority@^0.7.1",
"date-fns": "^4.1.0",
@@ -5282,13 +5282,13 @@
}
},
"node_modules/axios": {
"version": "1.8.3",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.8.3.tgz",
"integrity": "sha512-iP4DebzoNlP/YN2dpwCgb8zoCmhtkajzS48JvwmkSkXvPI3DHc7m+XYL5tGnSlJtR6nImXZmdCuN5aP8dh1d8A==",
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz",
"integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"form-data": "^4.0.4",
"proxy-from-env": "^1.1.0"
}
},
@@ -5700,7 +5700,6 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz",
"integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
@@ -6665,7 +6664,6 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.0.tgz",
"integrity": "sha512-9+Sj30DIu+4KvHqMfLUGLFYL2PkURSYMVXJyXe92nFRvlYq5hBjLEhblKB+vkd/WVlUYMWigiY07T91Fkk0+4A==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.0",
@@ -6819,7 +6817,6 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -6829,7 +6826,6 @@
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -6867,7 +6863,6 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz",
"integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0"
@@ -6877,15 +6872,15 @@
}
},
"node_modules/es-set-tostringtag": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz",
"integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==",
"dev": true,
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
"license": "MIT",
"dependencies": {
"get-intrinsic": "^1.2.4",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.6",
"has-tostringtag": "^1.0.2",
"hasown": "^2.0.1"
"hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
@@ -7855,13 +7850,15 @@
}
},
"node_modules/form-data": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
"integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {
@@ -7992,7 +7989,6 @@
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.6.tgz",
"integrity": "sha512-qxsEs+9A+u85HhllWJJFicJfPDhRmjzoYdl64aMWW9yRIJmSyxdn8IEkuIM530/7T+lv0TIHd8L6Q/ra0tEoeA==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.1",
@@ -8139,7 +8135,6 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -8215,7 +8210,6 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -8228,7 +8222,6 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
"dev": true,
"license": "MIT",
"dependencies": {
"has-symbols": "^1.0.3"
@@ -9545,7 +9538,6 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.0.0.tgz",
"integrity": "sha512-4MqMiKP90ybymYvsut0CH2g4XWbfLtmlCkXmtmdcDCxNB+mQcu1w/1+L/VD7vi/PSv7X2JYV7SCcR+jiPXnQtA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"

View File

@@ -59,7 +59,7 @@
"@ucast/mongo2js": "^1.3.4",
"@xyflow/react": "^12.4.4",
"argon2-browser": "^1.18.0",
"axios": "^1.7.9",
"axios": "^1.11.0",
"classnames": "^2.5.1",
"cva": "npm:class-variance-authority@^0.7.1",
"date-fns": "^4.1.0",

View File

@@ -1,26 +1,14 @@
import { FunctionComponent, ReactNode } from "react";
import { BoundCanProps, Can } from "@casl/react";
import { faLock } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { TOrgPermission, useOrgPermission } from "@app/context/OrgPermissionContext";
import { Tooltip } from "../v2";
import { AccessRestrictedBanner, Tooltip } from "../v2";
export const OrgPermissionGuardBanner = () => {
return (
<div className="container mx-auto flex h-full items-center justify-center">
<div className="flex items-end space-x-12 rounded-md bg-mineshaft-800 p-16 text-bunker-300">
<div>
<FontAwesomeIcon icon={faLock} size="6x" />
</div>
<div>
<div className="mb-2 text-4xl font-medium">Access Restricted</div>
<div className="text-sm">
Your role has limited permissions, please <br /> contact your admin to gain access
</div>
</div>
</div>
<AccessRestrictedBanner />
</div>
);
};

View File

@@ -1,15 +1,12 @@
import { ReactNode } from "react";
import { faLock } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { twMerge } from "tailwind-merge";
import { AccessRestrictedBanner } from "@app/components/v2";
type Props = {
containerClassName?: string;
className?: string;
children?: ReactNode;
};
export const PermissionDeniedBanner = ({ containerClassName, className, children }: Props) => {
export const PermissionDeniedBanner = ({ containerClassName }: Props) => {
return (
<div
className={twMerge(
@@ -17,22 +14,7 @@ export const PermissionDeniedBanner = ({ containerClassName, className, children
containerClassName
)}
>
<div className={twMerge("rounded-md bg-mineshaft-800 p-16 text-bunker-300", className)}>
<div className="flex items-end space-x-12">
<div>
<FontAwesomeIcon icon={faLock} size="6x" />
</div>
<div>
<div className="mb-2 text-4xl font-medium">Access Restricted</div>
{children || (
<div className="text-sm">
Your role has limited permissions, please <br /> contact your administrator to gain
access
</div>
)}
</div>
</div>
</div>
<AccessRestrictedBanner />
</div>
);
};

View File

@@ -1,9 +1,8 @@
import { FunctionComponent, ReactNode } from "react";
import { AbilityTuple, MongoAbility } from "@casl/ability";
import { Can } from "@casl/react";
import { faLock } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { AccessRestrictedBanner } from "@app/components/v2";
import { ProjectPermissionSet, useProjectPermission } from "@app/context/ProjectPermissionContext";
import { Tooltip } from "../v2/Tooltip";
@@ -11,17 +10,7 @@ import { Tooltip } from "../v2/Tooltip";
export const ProjectPermissionGuardBanner = () => {
return (
<div className="container mx-auto flex h-full items-center justify-center">
<div className="flex items-end space-x-12 rounded-md bg-mineshaft-800 p-16 text-bunker-300">
<div>
<FontAwesomeIcon icon={faLock} size="6x" />
</div>
<div>
<div className="mb-2 text-4xl font-medium">Access Restricted</div>
<div className="text-sm">
Your role has limited permissions, please <br /> contact your admin to gain access
</div>
</div>
</div>
<AccessRestrictedBanner />
</div>
);
};

View File

@@ -271,6 +271,8 @@ const NewProjectForm = ({ onOpenChange }: NewProjectFormProps) => {
value={value}
onValueChange={onChange}
className="w-full"
position="popper"
dropdownContainerClassName="max-w-none"
>
{projectTemplates.length
? projectTemplates.map((template) => (
@@ -306,6 +308,9 @@ const NewProjectForm = ({ onOpenChange }: NewProjectFormProps) => {
onChange(e);
}}
className="mb-12 w-full bg-mineshaft-600"
position="popper"
dropdownContainerClassName="max-w-none -top-1"
side="top"
>
<SelectItem value={INTERNAL_KMS_KEY_ID} key="kms-internal">
Default Infisical KMS

View File

@@ -76,7 +76,6 @@ export const CreateSecretRotationV2Modal = ({ onOpenChange, isOpen, ...props }:
</div>
)
}
onPointerDownOutside={(e) => e.preventDefault()}
className={selectedRotation ? "max-w-2xl" : "max-w-3xl"}
subTitle={
selectedRotation ? undefined : "Select a provider to create a secret rotation for."

View File

@@ -75,7 +75,6 @@ export const CreateSecretScanningDataSourceModal = ({ onOpenChange, isOpen, ...p
</div>
)
}
onPointerDownOutside={(e) => e.preventDefault()}
className={selectedDataSource ? "max-w-2xl" : "max-w-3xl"}
subTitle={
selectedDataSource ? undefined : "Select a data source to configure secret scanning for."

Some files were not shown because too many files have changed in this diff Show More