mirror of
https://github.com/Infisical/infisical.git
synced 2025-04-13 16:52:12 +00:00
Compare commits
122 Commits
daniel/hsm
...
daniel/act
Author | SHA1 | Date | |
---|---|---|---|
96e331b678 | |||
9a62efea4f | |||
506c30bcdb | |||
735ad4ff65 | |||
41e36dfcef | |||
421d8578b7 | |||
6685f8aa0a | |||
d6c37c1065 | |||
54f3f94185 | |||
907537f7c0 | |||
61263b9384 | |||
d71c85e052 | |||
b6d8be2105 | |||
0693f81d0a | |||
61d516ef35 | |||
31fc64fb4c | |||
8bf7e4c4d1 | |||
2027d4b44e | |||
d401c9074e | |||
afe35dbbb5 | |||
6ff1602fd5 | |||
6603364749 | |||
53bea22b85 | |||
7c84adc1c2 | |||
fa8d6735a1 | |||
a6137f267d | |||
d521ee7b7e | |||
827931e416 | |||
faa83344a7 | |||
089a7e880b | |||
64ec741f1a | |||
c98233ddaf | |||
ae17981c41 | |||
6c49c7da3c | |||
2de04b6fe5 | |||
5c9ec1e4be | |||
ba89491d4c | |||
483e596a7a | |||
65f122bd41 | |||
682b552fdc | |||
d4cfd0b6ed | |||
e8f09d2c7b | |||
774371a218 | |||
c4b54de303 | |||
433971a72d | |||
4acf9413f0 | |||
f0549cab98 | |||
d75e49dce5 | |||
8819abd710 | |||
796f76da46 | |||
d6e1ed4d1e | |||
1295b68d80 | |||
c79f84c064 | |||
d0c50960ef | |||
85089a08e1 | |||
bf97294dad | |||
4053078d95 | |||
4ba3899861 | |||
6bae3628c0 | |||
4cb935dae7 | |||
ccad684ab2 | |||
fd77708cad | |||
9aebd712d1 | |||
05f07b25ac | |||
5b0dbf04b2 | |||
b050db84ab | |||
8fef6911f1 | |||
44ba31a743 | |||
6bdbac4750 | |||
60fb195706 | |||
c8109b4e84 | |||
1f2b0443cc | |||
dd1cabf9f6 | |||
8b781b925a | |||
ddcf5b576b | |||
7138b392f2 | |||
bfce1021fb | |||
93c0313b28 | |||
8cfc217519 | |||
d272c6217a | |||
2fe2ddd9fc | |||
e330ddd5ee | |||
7aba9c1a50 | |||
4cd8e0fa67 | |||
ea3d164ead | |||
df468e4865 | |||
66e96018c4 | |||
3b02eedca6 | |||
a55fe2b788 | |||
5d7a267f1d | |||
b16ab6f763 | |||
2d2ad0724f | |||
e90efb7fc8 | |||
17d5e4bdab | |||
334a728259 | |||
4a3143e689 | |||
14810de054 | |||
8cfcbaa12c | |||
0e946f73bd | |||
7b8551f883 | |||
3b1ce86ee6 | |||
c649661133 | |||
70e44d04ef | |||
0dddd58be1 | |||
d4c911a28f | |||
ada63b9e7d | |||
8ef078872e | |||
5f93016d22 | |||
f220246eb4 | |||
3f6a0c77f1 | |||
9e4b66e215 | |||
8a14914bc3 | |||
027b200b1a | |||
e761e65322 | |||
6956d14e2e | |||
bae7c6c3d7 | |||
e8b33f27fc | |||
fc3a409164 | |||
ffc58b0313 | |||
9a7e05369c | |||
33b49f4466 | |||
60895537a7 |
@ -74,6 +74,14 @@ CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED=
|
||||
OTEL_EXPORT_TYPE=
|
||||
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||
OTEL_OTLP_PUSH_INTERVAL=
|
||||
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
|
@ -10,8 +10,7 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
# packages: write
|
||||
# issues: write
|
||||
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
@ -26,6 +25,63 @@ jobs:
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [cli-integration-tests]
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -71,3 +71,5 @@ frontend-build
|
||||
cli/infisical-merge
|
||||
cli/test/infisical-merge
|
||||
/backend/binary
|
||||
|
||||
/npm/bin
|
||||
|
4
Makefile
4
Makefile
@ -10,6 +10,9 @@ up-dev:
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
|
||||
up-dev-metrics:
|
||||
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
|
||||
@ -27,4 +30,3 @@ reviewable-api:
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
|
@ -5,6 +5,9 @@ export const mockSmtpServer = (): TSmtpService => {
|
||||
return {
|
||||
sendMail: async (data) => {
|
||||
storage.push(data);
|
||||
},
|
||||
verify: async () => {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
1667
backend/package-lock.json
generated
1667
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -50,6 +50,7 @@
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
@ -58,6 +59,7 @@
|
||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
@ -138,6 +140,14 @@
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
@ -181,6 +191,7 @@
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"otplib": "^12.0.1",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
|
@ -8,61 +8,80 @@ const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
const sanitizeInputParam = (value: string) => {
|
||||
// Escape double quotes and wrap the entire value in double quotes
|
||||
if (value) {
|
||||
return `"${value.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
return '""';
|
||||
};
|
||||
|
||||
const exportDb = () => {
|
||||
const exportHost = prompt("Enter your Postgres Host to migrate from: ");
|
||||
const exportPort = prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432";
|
||||
const exportUser = prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical";
|
||||
const exportPassword = prompt("Enter your Postgres Password to migrate from: ");
|
||||
const exportDatabase = prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical";
|
||||
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
|
||||
const exportPort = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
|
||||
);
|
||||
const exportUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
|
||||
const exportDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
|
||||
// we do not include the audit_log and secret_sharing entries
|
||||
execSync(
|
||||
`PGDATABASE="${exportDatabase}" PGPASSWORD="${exportPassword}" PGHOST="${exportHost}" PGPORT=${exportPort} PGUSER=${exportUser} pg_dump infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
};
|
||||
|
||||
const importDbForOrg = () => {
|
||||
const importHost = prompt("Enter your Postgres Host to migrate to: ");
|
||||
const importPort = prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432";
|
||||
const importUser = prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical";
|
||||
const importPassword = prompt("Enter your Postgres Password to migrate to: ");
|
||||
const importDatabase = prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical";
|
||||
const orgId = prompt("Enter the organization ID to migrate: ");
|
||||
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
|
||||
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
|
||||
const importUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
|
||||
const importDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
|
||||
|
||||
if (!existsSync(path.join(__dirname, "../src/db/dump.sql"))) {
|
||||
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
|
||||
console.log("File not found, please export the database first.");
|
||||
return;
|
||||
}
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -f ${path.join(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
)}`
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ maxBuffer: 1024 * 1024 * 4096 }
|
||||
);
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
);
|
||||
|
||||
// delete global/instance-level resources not relevant to the organization to migrate
|
||||
// users
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
);
|
||||
|
||||
// identities
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
);
|
||||
|
||||
// reset slack configuration in superAdmin
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
);
|
||||
|
||||
console.log("Organization migrated successfully.");
|
||||
|
2
backend/src/@types/fastify.d.ts
vendored
2
backend/src/@types/fastify.d.ts
vendored
@ -79,6 +79,7 @@ import { TServiceTokenServiceFactory } from "@app/services/service-token/service
|
||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
@ -193,6 +194,7 @@ declare module "fastify" {
|
||||
migration: TExternalMigrationServiceFactory;
|
||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||
projectTemplate: TProjectTemplateServiceFactory;
|
||||
totp: TTotpServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
4
backend/src/@types/knex.d.ts
vendored
4
backend/src/@types/knex.d.ts
vendored
@ -314,6 +314,9 @@ import {
|
||||
TSuperAdmin,
|
||||
TSuperAdminInsert,
|
||||
TSuperAdminUpdate,
|
||||
TTotpConfigs,
|
||||
TTotpConfigsInsert,
|
||||
TTotpConfigsUpdate,
|
||||
TTrustedIps,
|
||||
TTrustedIpsInsert,
|
||||
TTrustedIpsUpdate,
|
||||
@ -826,5 +829,6 @@ declare module "knex/types/tables" {
|
||||
TProjectTemplatesInsert,
|
||||
TProjectTemplatesUpdate
|
||||
>;
|
||||
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
|
||||
}
|
||||
}
|
||||
|
@ -64,23 +64,25 @@ export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").nullable();
|
||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
});
|
||||
const hasCaCertIdColumn = await knex.schema.hasColumn(TableName.Certificate, "caCertId");
|
||||
if (!hasCaCertIdColumn) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").nullable();
|
||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
});
|
||||
|
||||
await knex.raw(`
|
||||
await knex.raw(`
|
||||
UPDATE "${TableName.Certificate}" cert
|
||||
SET "caCertId" = (
|
||||
SELECT caCert.id
|
||||
FROM "${TableName.CertificateAuthorityCert}" caCert
|
||||
WHERE caCert."caId" = cert."caId"
|
||||
)
|
||||
`);
|
||||
)`);
|
||||
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").notNullable().alter();
|
||||
});
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,7 @@ import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 30_000;
|
||||
const BATCH_SIZE = 10_000;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||
@ -12,7 +12,18 @@ export async function up(knex: Knex): Promise<void> {
|
||||
t.string("authMethod").nullable();
|
||||
});
|
||||
|
||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||
// first we remove identities without auth method that is unused
|
||||
// ! We delete all access tokens where the identity has no auth method set!
|
||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||
.whereNull(`${TableName.Identity}.authMethod`)
|
||||
.delete();
|
||||
|
||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||
.whereNull("authMethod")
|
||||
.limit(BATCH_SIZE)
|
||||
.select("id");
|
||||
let totalUpdated = 0;
|
||||
|
||||
do {
|
||||
@ -33,24 +44,15 @@ export async function up(knex: Knex): Promise<void> {
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||
.whereNull("authMethod")
|
||||
.limit(BATCH_SIZE)
|
||||
.select("id");
|
||||
|
||||
totalUpdated += batchIds.length;
|
||||
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
||||
} while (nullableAccessTokens.length > 0);
|
||||
|
||||
// ! We delete all access tokens where the identity has no auth method set!
|
||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.whereNotExists((queryBuilder) => {
|
||||
void queryBuilder
|
||||
.select("id")
|
||||
.from(TableName.Identity)
|
||||
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
||||
.whereNotNull("authMethod");
|
||||
})
|
||||
.delete();
|
||||
|
||||
// Finally we set the authMethod to notNullable after populating the column.
|
||||
// This will fail if the data is not populated correctly, so it's safe.
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
|
@ -0,0 +1,35 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn(
|
||||
TableName.CertificateTemplateEstConfig,
|
||||
"disableBootstrapCertValidation"
|
||||
);
|
||||
|
||||
const hasCaChainCol = await knex.schema.hasColumn(TableName.CertificateTemplateEstConfig, "encryptedCaChain");
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => {
|
||||
if (!hasDisableBootstrapCertValidationCol) {
|
||||
t.boolean("disableBootstrapCertValidation").defaultTo(false).notNullable();
|
||||
}
|
||||
|
||||
if (hasCaChainCol) {
|
||||
t.binary("encryptedCaChain").nullable().alter();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn(
|
||||
TableName.CertificateTemplateEstConfig,
|
||||
"disableBootstrapCertValidation"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => {
|
||||
if (hasDisableBootstrapCertValidationCol) {
|
||||
t.dropColumn("disableBootstrapCertValidation");
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization);
|
||||
});
|
||||
}
|
||||
}
|
54
backend/src/db/migrations/20241112082701_add-totp-support.ts
Normal file
54
backend/src/db/migrations/20241112082701_add-totp-support.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.TotpConfig))) {
|
||||
await knex.schema.createTable(TableName.TotpConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("userId").notNullable();
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.boolean("isVerified").defaultTo(false).notNullable();
|
||||
t.binary("encryptedRecoveryCodes").notNullable();
|
||||
t.binary("encryptedSecret").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.unique("userId");
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.TotpConfig);
|
||||
}
|
||||
|
||||
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (!doesOrgMfaMethodColExist) {
|
||||
t.string("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
|
||||
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
if (!doesUserSelectedMfaMethodColExist) {
|
||||
t.string("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.TotpConfig);
|
||||
await knex.schema.dropTableIfExists(TableName.TotpConfig);
|
||||
|
||||
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (doesOrgMfaMethodColExist) {
|
||||
t.dropColumn("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
|
||||
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
if (doesUserSelectedMfaMethodColExist) {
|
||||
t.dropColumn("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
}
|
@ -12,11 +12,12 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const CertificateTemplateEstConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
certificateTemplateId: z.string().uuid(),
|
||||
encryptedCaChain: zodBuffer,
|
||||
encryptedCaChain: zodBuffer.nullable().optional(),
|
||||
hashedPassphrase: z.string(),
|
||||
isEnabled: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
disableBootstrapCertValidation: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TCertificateTemplateEstConfigs = z.infer<typeof CertificateTemplateEstConfigsSchema>;
|
||||
|
@ -106,6 +106,7 @@ export * from "./secrets-v2";
|
||||
export * from "./service-tokens";
|
||||
export * from "./slack-integrations";
|
||||
export * from "./super-admin";
|
||||
export * from "./totp-configs";
|
||||
export * from "./trusted-ips";
|
||||
export * from "./user-actions";
|
||||
export * from "./user-aliases";
|
||||
|
@ -117,6 +117,7 @@ export enum TableName {
|
||||
ExternalKms = "external_kms",
|
||||
InternalKms = "internal_kms",
|
||||
InternalKmsKeyVersion = "internal_kms_key_version",
|
||||
TotpConfig = "totp_configs",
|
||||
// @depreciated
|
||||
KmsKeyVersion = "kms_key_versions",
|
||||
WorkflowIntegrations = "workflow_integrations",
|
||||
|
@ -21,7 +21,8 @@ export const OrganizationsSchema = z.object({
|
||||
kmsDefaultKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
defaultMembershipRole: z.string().default("member"),
|
||||
enforceMfa: z.boolean().default(false)
|
||||
enforceMfa: z.boolean().default(false),
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||
|
24
backend/src/db/schemas/totp-configs.ts
Normal file
24
backend/src/db/schemas/totp-configs.ts
Normal file
@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const TotpConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
userId: z.string().uuid(),
|
||||
isVerified: z.boolean().default(false),
|
||||
encryptedRecoveryCodes: zodBuffer,
|
||||
encryptedSecret: zodBuffer,
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TTotpConfigs = z.infer<typeof TotpConfigsSchema>;
|
||||
export type TTotpConfigsInsert = Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TTotpConfigsUpdate = Partial<Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>>;
|
@ -26,7 +26,8 @@ export const UsersSchema = z.object({
|
||||
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
|
||||
isLocked: z.boolean().default(false).nullable().optional(),
|
||||
temporaryLockDateEnd: z.date().nullable().optional(),
|
||||
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
|
||||
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional(),
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TUsers = z.infer<typeof UsersSchema>;
|
||||
|
@ -2,6 +2,9 @@ import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "./schemas";
|
||||
|
||||
interface PgTriggerResult {
|
||||
rows: Array<{ exists: boolean }>;
|
||||
}
|
||||
export const createJunctionTable = (knex: Knex, tableName: TableName, table1Name: TableName, table2Name: TableName) =>
|
||||
knex.schema.createTable(tableName, (table) => {
|
||||
table.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
@ -28,13 +31,26 @@ DROP FUNCTION IF EXISTS on_update_timestamp() CASCADE;
|
||||
|
||||
// we would be using this to apply updatedAt where ever we wanta
|
||||
// remember to set `timestamps(true,true,true)` before this on schema
|
||||
export const createOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
||||
knex.raw(`
|
||||
CREATE TRIGGER "${tableName}_updatedAt"
|
||||
BEFORE UPDATE ON ${tableName}
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE on_update_timestamp();
|
||||
`);
|
||||
export const createOnUpdateTrigger = async (knex: Knex, tableName: string) => {
|
||||
const triggerExists = await knex.raw<PgTriggerResult>(`
|
||||
SELECT EXISTS (
|
||||
SELECT 1
|
||||
FROM pg_trigger
|
||||
WHERE tgname = '${tableName}_updatedAt'
|
||||
);
|
||||
`);
|
||||
|
||||
if (!triggerExists?.rows?.[0]?.exists) {
|
||||
return knex.raw(`
|
||||
CREATE TRIGGER "${tableName}_updatedAt"
|
||||
BEFORE UPDATE ON ${tableName}
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE on_update_timestamp();
|
||||
`);
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
export const dropOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
||||
knex.raw(`DROP TRIGGER IF EXISTS "${tableName}_updatedAt" ON ${tableName}`);
|
||||
|
@ -122,6 +122,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
`email: ${email} firstName: ${profile.firstName as string}`
|
||||
);
|
||||
|
||||
throw new Error("Invalid saml request. Missing email or first name");
|
||||
}
|
||||
|
||||
const userMetadata = Object.keys(profile.attributes || {})
|
||||
|
@ -171,27 +171,29 @@ export const certificateEstServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const caCerts = estConfig.caChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => {
|
||||
return new x509.X509Certificate(cert);
|
||||
});
|
||||
if (!estConfig.disableBootstrapCertValidation) {
|
||||
const caCerts = estConfig.caChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => {
|
||||
return new x509.X509Certificate(cert);
|
||||
});
|
||||
|
||||
if (!caCerts) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
if (!caCerts) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
|
||||
const leafCertificate = decodeURIComponent(sslClientCert).match(
|
||||
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
|
||||
)?.[0];
|
||||
const leafCertificate = decodeURIComponent(sslClientCert).match(
|
||||
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
|
||||
)?.[0];
|
||||
|
||||
if (!leafCertificate) {
|
||||
throw new BadRequestError({ message: "Missing client certificate" });
|
||||
}
|
||||
if (!leafCertificate) {
|
||||
throw new BadRequestError({ message: "Missing client certificate" });
|
||||
}
|
||||
|
||||
const certObj = new x509.X509Certificate(leafCertificate);
|
||||
if (!(await isCertChainValid([certObj, ...caCerts]))) {
|
||||
throw new BadRequestError({ message: "Invalid certificate chain" });
|
||||
const certObj = new x509.X509Certificate(leafCertificate);
|
||||
if (!(await isCertChainValid([certObj, ...caCerts]))) {
|
||||
throw new BadRequestError({ message: "Invalid certificate chain" });
|
||||
}
|
||||
}
|
||||
|
||||
const { certificate } = await certificateAuthorityService.signCertFromCa({
|
||||
|
@ -17,7 +17,7 @@ import {
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
@ -56,7 +56,7 @@ type TOidcConfigServiceFactoryDep = {
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
smtpService: Pick<TSmtpService, "sendMail" | "verify">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
||||
};
|
||||
@ -223,6 +223,7 @@ export const oidcConfigServiceFactory = ({
|
||||
let newUser: TUsers | undefined;
|
||||
|
||||
if (serverCfg.trustOidcEmails) {
|
||||
// we prioritize getting the most complete user to create the new alias under
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
@ -230,6 +231,23 @@ export const oidcConfigServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!newUser) {
|
||||
// this fetches user entries created via invites
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
username: email
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (newUser && !newUser.isEmailVerified) {
|
||||
// we automatically mark it as email-verified because we've configured trust for OIDC emails
|
||||
newUser = await userDAL.updateById(newUser.id, {
|
||||
isEmailVerified: true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!newUser) {
|
||||
@ -332,14 +350,20 @@ export const oidcConfigServiceFactory = ({
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
await smtpService
|
||||
.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
throw new OidcAuthError({
|
||||
message: `Error sending email confirmation code for user registration - contact the Infisical instance admin. ${err.message}`
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
@ -395,6 +419,18 @@ export const oidcConfigServiceFactory = ({
|
||||
message: `Organization bot for organization with ID '${org.id}' not found`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
if (isActive && !serverCfg.trustOidcEmails) {
|
||||
const isSmtpConnected = await smtpService.verify();
|
||||
if (!isSmtpConnected) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Cannot enable OIDC when there are issues with the instance's SMTP configuration. Bypass this by turning on trust for OIDC emails in the server admin console."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
|
@ -29,4 +29,18 @@ function validateOrgSSO(actorAuthMethod: ActorAuthMethod, isOrgSsoEnforced: TOrg
|
||||
}
|
||||
}
|
||||
|
||||
export { isAuthMethodSaml, validateOrgSSO };
|
||||
const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
|
||||
const handler = {
|
||||
get(target: Record<string, string>, prop: string) {
|
||||
if (!(prop in target)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
target[prop] = `{{identity.metadata.${prop}}}`; // Add missing key as an "own" property
|
||||
}
|
||||
return target[prop];
|
||||
}
|
||||
};
|
||||
|
||||
return new Proxy(obj, handler);
|
||||
};
|
||||
|
||||
export { escapeHandlebarsMissingMetadata, isAuthMethodSaml, validateOrgSSO };
|
||||
|
@ -21,7 +21,7 @@ import { TServiceTokenDALFactory } from "@app/services/service-token/service-tok
|
||||
|
||||
import { orgAdminPermissions, orgMemberPermissions, orgNoAccessPermissions, OrgPermissionSet } from "./org-permission";
|
||||
import { TPermissionDALFactory } from "./permission-dal";
|
||||
import { validateOrgSSO } from "./permission-fns";
|
||||
import { escapeHandlebarsMissingMetadata, validateOrgSSO } from "./permission-fns";
|
||||
import { TBuildOrgPermissionDTO, TBuildProjectPermissionDTO } from "./permission-service-types";
|
||||
import {
|
||||
buildServiceTokenProjectPermission,
|
||||
@ -227,11 +227,13 @@ export const permissionServiceFactory = ({
|
||||
})) || [];
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false, strict: true });
|
||||
const metadataKeyValuePair = objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
);
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
@ -292,12 +294,15 @@ export const permissionServiceFactory = ({
|
||||
})) || [];
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false, strict: true });
|
||||
const metadataKeyValuePair = objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
);
|
||||
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
|
@ -157,6 +157,15 @@ const envSchema = z
|
||||
INFISICAL_CLOUD: zodStrBool.default("false"),
|
||||
MAINTENANCE_MODE: zodStrBool.default("false"),
|
||||
CAPTCHA_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// TELEMETRY
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED: zodStrBool.default("false"),
|
||||
OTEL_EXPORT_OTLP_ENDPOINT: zpStr(z.string().optional()),
|
||||
OTEL_OTLP_PUSH_INTERVAL: z.coerce.number().default(30000),
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME: zpStr(z.string().optional()),
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD: zpStr(z.string().optional()),
|
||||
OTEL_EXPORT_TYPE: z.enum(["prometheus", "otlp"]).optional(),
|
||||
|
||||
PLAIN_API_KEY: zpStr(z.string().optional()),
|
||||
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()),
|
||||
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
|
||||
@ -203,11 +212,11 @@ let envCfg: Readonly<z.infer<typeof envSchema>>;
|
||||
|
||||
export const getConfig = () => envCfg;
|
||||
// cannot import singleton logger directly as it needs config to load various transport
|
||||
export const initEnvConfig = (logger: Logger) => {
|
||||
export const initEnvConfig = (logger?: Logger) => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
logger.error("Invalid environment variables. Check the error below");
|
||||
logger.error(parsedEnv.error.issues);
|
||||
(logger ?? console).error("Invalid environment variables. Check the error below");
|
||||
(logger ?? console).error(parsedEnv.error.issues);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
|
@ -133,3 +133,15 @@ export class ScimRequestError extends Error {
|
||||
this.status = status;
|
||||
}
|
||||
}
|
||||
|
||||
export class OidcAuthError extends Error {
|
||||
name: string;
|
||||
|
||||
error: unknown;
|
||||
|
||||
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) {
|
||||
super(message || "Something went wrong");
|
||||
this.name = name || "OidcAuthError";
|
||||
this.error = error;
|
||||
}
|
||||
}
|
||||
|
91
backend/src/lib/telemetry/instrumentation.ts
Normal file
91
backend/src/lib/telemetry/instrumentation.ts
Normal file
@ -0,0 +1,91 @@
|
||||
import opentelemetry, { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
|
||||
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
|
||||
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
|
||||
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
|
||||
import { registerInstrumentations } from "@opentelemetry/instrumentation";
|
||||
import { Resource } from "@opentelemetry/resources";
|
||||
import { AggregationTemporality, MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
|
||||
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
|
||||
import dotenv from "dotenv";
|
||||
|
||||
import { initEnvConfig } from "../config/env";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const initTelemetryInstrumentation = ({
|
||||
exportType,
|
||||
otlpURL,
|
||||
otlpUser,
|
||||
otlpPassword,
|
||||
otlpPushInterval
|
||||
}: {
|
||||
exportType?: string;
|
||||
otlpURL?: string;
|
||||
otlpUser?: string;
|
||||
otlpPassword?: string;
|
||||
otlpPushInterval?: number;
|
||||
}) => {
|
||||
diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG);
|
||||
|
||||
const resource = Resource.default().merge(
|
||||
new Resource({
|
||||
[ATTR_SERVICE_NAME]: "infisical-core",
|
||||
[ATTR_SERVICE_VERSION]: "0.1.0"
|
||||
})
|
||||
);
|
||||
|
||||
const metricReaders = [];
|
||||
switch (exportType) {
|
||||
case "prometheus": {
|
||||
const promExporter = new PrometheusExporter();
|
||||
metricReaders.push(promExporter);
|
||||
break;
|
||||
}
|
||||
case "otlp": {
|
||||
const otlpExporter = new OTLPMetricExporter({
|
||||
url: `${otlpURL}/v1/metrics`,
|
||||
headers: {
|
||||
Authorization: `Basic ${btoa(`${otlpUser}:${otlpPassword}`)}`
|
||||
},
|
||||
temporalityPreference: AggregationTemporality.DELTA
|
||||
});
|
||||
metricReaders.push(
|
||||
new PeriodicExportingMetricReader({
|
||||
exporter: otlpExporter,
|
||||
exportIntervalMillis: otlpPushInterval
|
||||
})
|
||||
);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid OTEL export type");
|
||||
}
|
||||
|
||||
const meterProvider = new MeterProvider({
|
||||
resource,
|
||||
readers: metricReaders
|
||||
});
|
||||
|
||||
opentelemetry.metrics.setGlobalMeterProvider(meterProvider);
|
||||
|
||||
registerInstrumentations({
|
||||
instrumentations: [getNodeAutoInstrumentations()]
|
||||
});
|
||||
};
|
||||
|
||||
const setupTelemetry = () => {
|
||||
const appCfg = initEnvConfig();
|
||||
|
||||
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||
console.log("Initializing telemetry instrumentation");
|
||||
initTelemetryInstrumentation({
|
||||
otlpURL: appCfg.OTEL_EXPORT_OTLP_ENDPOINT,
|
||||
otlpUser: appCfg.OTEL_COLLECTOR_BASIC_AUTH_USERNAME,
|
||||
otlpPassword: appCfg.OTEL_COLLECTOR_BASIC_AUTH_PASSWORD,
|
||||
otlpPushInterval: appCfg.OTEL_OTLP_PUSH_INTERVAL,
|
||||
exportType: appCfg.OTEL_EXPORT_TYPE
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
void setupTelemetry();
|
@ -1,2 +1,3 @@
|
||||
export { isDisposableEmail } from "./validate-email";
|
||||
export { isValidFolderName, isValidSecretPath } from "./validate-folder-name";
|
||||
export { blockLocalAndPrivateIpAddresses } from "./validate-url";
|
||||
|
8
backend/src/lib/validator/validate-folder-name.ts
Normal file
8
backend/src/lib/validator/validate-folder-name.ts
Normal file
@ -0,0 +1,8 @@
|
||||
// regex to allow only alphanumeric, dash, underscore
|
||||
export const isValidFolderName = (name: string) => /^[a-zA-Z0-9-_]+$/.test(name);
|
||||
|
||||
export const isValidSecretPath = (path: string) =>
|
||||
path
|
||||
.split("/")
|
||||
.filter((el) => el.length)
|
||||
.every((name) => isValidFolderName(name));
|
@ -1,3 +1,5 @@
|
||||
import "./lib/telemetry/instrumentation";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import path from "path";
|
||||
|
||||
@ -18,6 +20,7 @@ dotenv.config();
|
||||
const run = async () => {
|
||||
const logger = await initLogger();
|
||||
const appCfg = initEnvConfig(logger);
|
||||
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.DB_ROOT_CERT,
|
||||
|
@ -22,6 +22,7 @@ import { TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
import { globalRateLimiterCfg } from "./config/rateLimiter";
|
||||
import { addErrorsToResponseSchemas } from "./plugins/add-errors-to-response-schemas";
|
||||
import { apiMetrics } from "./plugins/api-metrics";
|
||||
import { fastifyErrHandler } from "./plugins/error-handler";
|
||||
import { registerExternalNextjs } from "./plugins/external-nextjs";
|
||||
import { serializerCompiler, validatorCompiler, ZodTypeProvider } from "./plugins/fastify-zod";
|
||||
@ -86,6 +87,10 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
// pull ip based on various proxy headers
|
||||
await server.register(fastifyIp);
|
||||
|
||||
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||
await server.register(apiMetrics);
|
||||
}
|
||||
|
||||
await server.register(fastifySwagger);
|
||||
await server.register(fastifyFormBody);
|
||||
await server.register(fastifyErrHandler);
|
||||
|
@ -46,10 +46,10 @@ export const bootstrapCheck = async ({ db }: BootstrapOpt) => {
|
||||
await createTransport(smtpCfg)
|
||||
.verify()
|
||||
.then(async () => {
|
||||
console.info("SMTP successfully connected");
|
||||
console.info(`SMTP - Verified connection to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT}`);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(`SMTP - Failed to connect to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT}`);
|
||||
.catch((err: Error) => {
|
||||
console.error(`SMTP - Failed to connect to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT} - ${err.message}`);
|
||||
logger.error(err);
|
||||
});
|
||||
|
||||
|
21
backend/src/server/plugins/api-metrics.ts
Normal file
21
backend/src/server/plugins/api-metrics.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import opentelemetry from "@opentelemetry/api";
|
||||
import fp from "fastify-plugin";
|
||||
|
||||
export const apiMetrics = fp(async (fastify) => {
|
||||
const apiMeter = opentelemetry.metrics.getMeter("API");
|
||||
const latencyHistogram = apiMeter.createHistogram("API_latency", {
|
||||
unit: "ms"
|
||||
});
|
||||
|
||||
fastify.addHook("onResponse", async (request, reply) => {
|
||||
const { method } = request;
|
||||
const route = request.routerPath;
|
||||
const { statusCode } = reply;
|
||||
|
||||
latencyHistogram.record(reply.elapsedTime, {
|
||||
route,
|
||||
method,
|
||||
statusCode
|
||||
});
|
||||
});
|
||||
});
|
@ -10,6 +10,7 @@ import {
|
||||
GatewayTimeoutError,
|
||||
InternalServerError,
|
||||
NotFoundError,
|
||||
OidcAuthError,
|
||||
RateLimitError,
|
||||
ScimRequestError,
|
||||
UnauthorizedError
|
||||
@ -83,7 +84,10 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
||||
status: error.status,
|
||||
detail: error.detail
|
||||
});
|
||||
// Handle JWT errors and make them more human-readable for the end-user.
|
||||
} else if (error instanceof OidcAuthError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.InternalServerError)
|
||||
.send({ statusCode: HttpStatusCodes.InternalServerError, message: error.message, error: error.name });
|
||||
} else if (error instanceof jwt.JsonWebTokenError) {
|
||||
const message = (() => {
|
||||
if (error.message === JWTErrors.JwtExpired) {
|
||||
|
@ -201,6 +201,8 @@ import { getServerCfg, superAdminServiceFactory } from "@app/services/super-admi
|
||||
import { telemetryDALFactory } from "@app/services/telemetry/telemetry-dal";
|
||||
import { telemetryQueueServiceFactory } from "@app/services/telemetry/telemetry-queue";
|
||||
import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { totpConfigDALFactory } from "@app/services/totp/totp-config-dal";
|
||||
import { totpServiceFactory } from "@app/services/totp/totp-service";
|
||||
import { userDALFactory } from "@app/services/user/user-dal";
|
||||
import { userServiceFactory } from "@app/services/user/user-service";
|
||||
import { userAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@ -348,6 +350,7 @@ export const registerRoutes = async (
|
||||
const slackIntegrationDAL = slackIntegrationDALFactory(db);
|
||||
const projectSlackConfigDAL = projectSlackConfigDALFactory(db);
|
||||
const workflowIntegrationDAL = workflowIntegrationDALFactory(db);
|
||||
const totpConfigDAL = totpConfigDALFactory(db);
|
||||
|
||||
const externalGroupOrgRoleMappingDAL = externalGroupOrgRoleMappingDALFactory(db);
|
||||
|
||||
@ -511,12 +514,19 @@ export const registerRoutes = async (
|
||||
projectMembershipDAL
|
||||
});
|
||||
|
||||
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL });
|
||||
const totpService = totpServiceFactory({
|
||||
totpConfigDAL,
|
||||
userDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL, totpService });
|
||||
const passwordService = authPaswordServiceFactory({
|
||||
tokenService,
|
||||
smtpService,
|
||||
authDAL,
|
||||
userDAL
|
||||
userDAL,
|
||||
totpConfigDAL
|
||||
});
|
||||
|
||||
const projectBotService = projectBotServiceFactory({ permissionService, projectBotDAL, projectDAL });
|
||||
@ -1369,7 +1379,8 @@ export const registerRoutes = async (
|
||||
workflowIntegration: workflowIntegrationService,
|
||||
migration: migrationService,
|
||||
externalGroupOrgRoleMapping: externalGroupOrgRoleMappingService,
|
||||
projectTemplate: projectTemplateService
|
||||
projectTemplate: projectTemplateService,
|
||||
totp: totpService
|
||||
});
|
||||
|
||||
const cronJobs: CronJob[] = [];
|
||||
|
@ -108,7 +108,8 @@ export const registerAuthRoutes = async (server: FastifyZodProvider) => {
|
||||
tokenVersionId: tokenVersion.id,
|
||||
accessVersion: tokenVersion.accessVersion,
|
||||
organizationId: decodedToken.organizationId,
|
||||
isMfaVerified: decodedToken.isMfaVerified
|
||||
isMfaVerified: decodedToken.isMfaVerified,
|
||||
mfaMethod: decodedToken.mfaMethod
|
||||
},
|
||||
appCfg.AUTH_SECRET,
|
||||
{ expiresIn: appCfg.JWT_AUTH_LIFETIME }
|
||||
|
@ -14,7 +14,8 @@ import { validateTemplateRegexField } from "@app/services/certificate-template/c
|
||||
const sanitizedEstConfig = CertificateTemplateEstConfigsSchema.pick({
|
||||
id: true,
|
||||
certificateTemplateId: true,
|
||||
isEnabled: true
|
||||
isEnabled: true,
|
||||
disableBootstrapCertValidation: true
|
||||
});
|
||||
|
||||
export const registerCertificateTemplateRouter = async (server: FastifyZodProvider) => {
|
||||
@ -241,11 +242,18 @@ export const registerCertificateTemplateRouter = async (server: FastifyZodProvid
|
||||
params: z.object({
|
||||
certificateTemplateId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
caChain: z.string().trim().min(1),
|
||||
passphrase: z.string().min(1),
|
||||
isEnabled: z.boolean().default(true)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
caChain: z.string().trim().optional(),
|
||||
passphrase: z.string().min(1),
|
||||
isEnabled: z.boolean().default(true),
|
||||
disableBootstrapCertValidation: z.boolean().default(false)
|
||||
})
|
||||
.refine(
|
||||
({ caChain, disableBootstrapCertValidation }) =>
|
||||
disableBootstrapCertValidation || (!disableBootstrapCertValidation && caChain),
|
||||
"CA chain is required"
|
||||
),
|
||||
response: {
|
||||
200: sanitizedEstConfig
|
||||
}
|
||||
@ -289,8 +297,9 @@ export const registerCertificateTemplateRouter = async (server: FastifyZodProvid
|
||||
certificateTemplateId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
caChain: z.string().trim().min(1).optional(),
|
||||
caChain: z.string().trim().optional(),
|
||||
passphrase: z.string().min(1).optional(),
|
||||
disableBootstrapCertValidation: z.boolean().optional(),
|
||||
isEnabled: z.boolean().optional()
|
||||
}),
|
||||
response: {
|
||||
|
@ -840,4 +840,91 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/secrets-by-keys",
|
||||
config: {
|
||||
rateLimit: secretsLimit
|
||||
},
|
||||
schema: {
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim(),
|
||||
environment: z.string().trim(),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
keys: z.string().trim().transform(decodeURIComponent)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secrets: secretRawSchema
|
||||
.extend({
|
||||
secretPath: z.string().optional(),
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
.optional()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { secretPath, projectId, environment } = req.query;
|
||||
|
||||
const keys = req.query.keys?.split(",").filter((key) => Boolean(key.trim())) ?? [];
|
||||
if (!keys.length) throw new BadRequestError({ message: "One or more keys required" });
|
||||
|
||||
const { secrets } = await server.services.secret.getSecretsRaw({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
environment,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId,
|
||||
path: secretPath,
|
||||
keys
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.GET_SECRETS,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath,
|
||||
numberOfSecrets: secrets.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) {
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: secrets.length,
|
||||
workspaceId: projectId,
|
||||
environment,
|
||||
secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { secrets };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -891,6 +891,48 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider)
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:integrationAuthId/bitbucket/environments",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
integrationAuthId: z.string().trim()
|
||||
}),
|
||||
querystring: z.object({
|
||||
workspaceSlug: z.string().trim().min(1, { message: "Workspace slug required" }),
|
||||
repoSlug: z.string().trim().min(1, { message: "Repo slug required" })
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
environments: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
uuid: z.string(),
|
||||
type: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const environments = await server.services.integrationAuth.getBitbucketEnvironments({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.integrationAuthId,
|
||||
workspaceSlug: req.query.workspaceSlug,
|
||||
repoSlug: req.query.repoSlug
|
||||
});
|
||||
return { environments };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:integrationAuthId/northflank/secret-groups",
|
||||
|
@ -15,7 +15,7 @@ import { AUDIT_LOGS, ORGANIZATIONS } from "@app/lib/api-docs";
|
||||
import { getLastMidnightDateISO } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ActorType, AuthMode, MfaMethod } from "@app/services/auth/auth-type";
|
||||
|
||||
import { integrationAuthPubSchema } from "../sanitizedSchemas";
|
||||
|
||||
@ -259,7 +259,8 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
message: "Membership role must be a valid slug"
|
||||
})
|
||||
.optional(),
|
||||
enforceMfa: z.boolean().optional()
|
||||
enforceMfa: z.boolean().optional(),
|
||||
selectedMfaMethod: z.nativeEnum(MfaMethod).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -4,6 +4,7 @@ import { SecretFoldersSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { FOLDERS } from "@app/lib/api-docs";
|
||||
import { prefixWithSlash, removeTrailingSlash } from "@app/lib/fn";
|
||||
import { isValidFolderName } from "@app/lib/validator";
|
||||
import { readLimit, secretsLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -25,7 +26,13 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
body: z.object({
|
||||
workspaceId: z.string().trim().describe(FOLDERS.CREATE.workspaceId),
|
||||
environment: z.string().trim().describe(FOLDERS.CREATE.environment),
|
||||
name: z.string().trim().describe(FOLDERS.CREATE.name),
|
||||
name: z
|
||||
.string()
|
||||
.trim()
|
||||
.describe(FOLDERS.CREATE.name)
|
||||
.refine((name) => isValidFolderName(name), {
|
||||
message: "Invalid folder name. Only alphanumeric characters, dashes, and underscores are allowed."
|
||||
}),
|
||||
path: z
|
||||
.string()
|
||||
.trim()
|
||||
@ -97,7 +104,13 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
body: z.object({
|
||||
workspaceId: z.string().trim().describe(FOLDERS.UPDATE.workspaceId),
|
||||
environment: z.string().trim().describe(FOLDERS.UPDATE.environment),
|
||||
name: z.string().trim().describe(FOLDERS.UPDATE.name),
|
||||
name: z
|
||||
.string()
|
||||
.trim()
|
||||
.describe(FOLDERS.UPDATE.name)
|
||||
.refine((name) => isValidFolderName(name), {
|
||||
message: "Invalid folder name. Only alphanumeric characters, dashes, and underscores are allowed."
|
||||
}),
|
||||
path: z
|
||||
.string()
|
||||
.trim()
|
||||
@ -170,7 +183,13 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
.object({
|
||||
id: z.string().describe(FOLDERS.UPDATE.folderId),
|
||||
environment: z.string().trim().describe(FOLDERS.UPDATE.environment),
|
||||
name: z.string().trim().describe(FOLDERS.UPDATE.name),
|
||||
name: z
|
||||
.string()
|
||||
.trim()
|
||||
.describe(FOLDERS.UPDATE.name)
|
||||
.refine((name) => isValidFolderName(name), {
|
||||
message: "Invalid folder name. Only alphanumeric characters, dashes, and underscores are allowed."
|
||||
}),
|
||||
path: z
|
||||
.string()
|
||||
.trim()
|
||||
|
@ -169,4 +169,103 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
return groupMemberships;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/me/totp",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
isVerified: z.boolean(),
|
||||
recoveryCodes: z.string().array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
return server.services.totp.getUserTotpConfig({
|
||||
userId: req.permission.id
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/me/totp",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
return server.services.totp.deleteUserTotpConfig({
|
||||
userId: req.permission.id
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/me/totp/register",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
otpUrl: z.string(),
|
||||
recoveryCodes: z.string().array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT], {
|
||||
requireOrg: false
|
||||
}),
|
||||
handler: async (req) => {
|
||||
return server.services.totp.registerUserTotp({
|
||||
userId: req.permission.id
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/me/totp/verify",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
totp: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT], {
|
||||
requireOrg: false
|
||||
}),
|
||||
handler: async (req) => {
|
||||
return server.services.totp.verifyUserTotpConfig({
|
||||
userId: req.permission.id,
|
||||
totp: req.body.totp
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/me/totp/recovery-codes",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
return server.services.totp.createUserTotpRecoveryCodes({
|
||||
userId: req.permission.id
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -2,8 +2,9 @@ import jwt from "jsonwebtoken";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { mfaRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { AuthModeMfaJwtTokenPayload, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { AuthModeMfaJwtTokenPayload, AuthTokenType, MfaMethod } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
||||
const cfg = getConfig();
|
||||
@ -49,6 +50,38 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/mfa/check/totp",
|
||||
config: {
|
||||
rateLimit: mfaRateLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
isVerified: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
try {
|
||||
const totpConfig = await server.services.totp.getUserTotpConfig({
|
||||
userId: req.mfa.userId
|
||||
});
|
||||
|
||||
return {
|
||||
isVerified: Boolean(totpConfig)
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError || error instanceof BadRequestError) {
|
||||
return { isVerified: false };
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/mfa/verify",
|
||||
method: "POST",
|
||||
@ -57,7 +90,8 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
mfaToken: z.string().trim()
|
||||
mfaToken: z.string().trim(),
|
||||
mfaMethod: z.nativeEnum(MfaMethod).optional().default(MfaMethod.EMAIL)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -86,7 +120,8 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
||||
ip: req.realIp,
|
||||
userId: req.mfa.userId,
|
||||
orgId: req.mfa.orgId,
|
||||
mfaToken: req.body.mfaToken
|
||||
mfaToken: req.body.mfaToken,
|
||||
mfaMethod: req.body.mfaMethod
|
||||
});
|
||||
|
||||
void res.setCookie("jid", token.refresh, {
|
||||
|
@ -27,7 +27,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
body: z.object({
|
||||
emails: z.string().email().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.emails),
|
||||
usernames: z.string().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.usernames),
|
||||
roleSlugs: z.string().array().optional().describe(PROJECT_USERS.INVITE_MEMBER.roleSlugs)
|
||||
roleSlugs: z.string().array().min(1).optional().describe(PROJECT_USERS.INVITE_MEMBER.roleSlugs)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -49,7 +49,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
projects: [
|
||||
{
|
||||
id: req.params.projectId,
|
||||
projectRoleSlug: [ProjectMembershipRole.Member]
|
||||
projectRoleSlug: req.body.roleSlugs || [ProjectMembershipRole.Member]
|
||||
}
|
||||
]
|
||||
});
|
||||
|
@ -4,7 +4,7 @@ import { AuthTokenSessionsSchema, OrganizationsSchema, UserEncryptionKeysSchema,
|
||||
import { ApiKeysSchema } from "@app/db/schemas/api-keys";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMethod, AuthMode } from "@app/services/auth/auth-type";
|
||||
import { AuthMethod, AuthMode, MfaMethod } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@ -56,7 +56,8 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
isMfaEnabled: z.boolean()
|
||||
isMfaEnabled: z.boolean().optional(),
|
||||
selectedMfaMethod: z.nativeEnum(MfaMethod).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -66,7 +67,12 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
preHandler: verifyAuth([AuthMode.JWT, AuthMode.API_KEY]),
|
||||
handler: async (req) => {
|
||||
const user = await server.services.user.toggleUserMfa(req.permission.id, req.body.isMfaEnabled);
|
||||
const user = await server.services.user.updateUserMfa({
|
||||
userId: req.permission.id,
|
||||
isMfaEnabled: req.body.isMfaEnabled,
|
||||
selectedMfaMethod: req.body.selectedMfaMethod
|
||||
});
|
||||
|
||||
return { user };
|
||||
}
|
||||
});
|
||||
|
@ -48,7 +48,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.object({
|
||||
token: z.string(),
|
||||
isMfaEnabled: z.boolean()
|
||||
isMfaEnabled: z.boolean(),
|
||||
mfaMethod: z.string().optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -64,7 +65,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
if (tokens.isMfaEnabled) {
|
||||
return {
|
||||
token: tokens.mfa as string,
|
||||
isMfaEnabled: true
|
||||
isMfaEnabled: true,
|
||||
mfaMethod: tokens.mfaMethod
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -17,6 +17,7 @@ import { TokenType } from "../auth-token/auth-token-types";
|
||||
import { TOrgDALFactory } from "../org/org-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||
import { LoginMethod } from "../super-admin/super-admin-types";
|
||||
import { TTotpServiceFactory } from "../totp/totp-service";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { enforceUserLockStatus, validateProviderAuthToken } from "./auth-fns";
|
||||
import {
|
||||
@ -26,13 +27,14 @@ import {
|
||||
TOauthTokenExchangeDTO,
|
||||
TVerifyMfaTokenDTO
|
||||
} from "./auth-login-type";
|
||||
import { AuthMethod, AuthModeJwtTokenPayload, AuthModeMfaJwtTokenPayload, AuthTokenType } from "./auth-type";
|
||||
import { AuthMethod, AuthModeJwtTokenPayload, AuthModeMfaJwtTokenPayload, AuthTokenType, MfaMethod } from "./auth-type";
|
||||
|
||||
type TAuthLoginServiceFactoryDep = {
|
||||
userDAL: TUserDALFactory;
|
||||
orgDAL: TOrgDALFactory;
|
||||
tokenService: TAuthTokenServiceFactory;
|
||||
smtpService: TSmtpService;
|
||||
totpService: Pick<TTotpServiceFactory, "verifyUserTotp" | "verifyWithUserRecoveryCode">;
|
||||
};
|
||||
|
||||
export type TAuthLoginFactory = ReturnType<typeof authLoginServiceFactory>;
|
||||
@ -40,7 +42,8 @@ export const authLoginServiceFactory = ({
|
||||
userDAL,
|
||||
tokenService,
|
||||
smtpService,
|
||||
orgDAL
|
||||
orgDAL,
|
||||
totpService
|
||||
}: TAuthLoginServiceFactoryDep) => {
|
||||
/*
|
||||
* Private
|
||||
@ -100,7 +103,8 @@ export const authLoginServiceFactory = ({
|
||||
userAgent,
|
||||
organizationId,
|
||||
authMethod,
|
||||
isMfaVerified
|
||||
isMfaVerified,
|
||||
mfaMethod
|
||||
}: {
|
||||
user: TUsers;
|
||||
ip: string;
|
||||
@ -108,6 +112,7 @@ export const authLoginServiceFactory = ({
|
||||
organizationId?: string;
|
||||
authMethod: AuthMethod;
|
||||
isMfaVerified?: boolean;
|
||||
mfaMethod?: MfaMethod;
|
||||
}) => {
|
||||
const cfg = getConfig();
|
||||
await updateUserDeviceSession(user, ip, userAgent);
|
||||
@ -126,7 +131,8 @@ export const authLoginServiceFactory = ({
|
||||
tokenVersionId: tokenSession.id,
|
||||
accessVersion: tokenSession.accessVersion,
|
||||
organizationId,
|
||||
isMfaVerified
|
||||
isMfaVerified,
|
||||
mfaMethod
|
||||
},
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
@ -140,7 +146,8 @@ export const authLoginServiceFactory = ({
|
||||
tokenVersionId: tokenSession.id,
|
||||
refreshVersion: tokenSession.refreshVersion,
|
||||
organizationId,
|
||||
isMfaVerified
|
||||
isMfaVerified,
|
||||
mfaMethod
|
||||
},
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_REFRESH_LIFETIME }
|
||||
@ -353,8 +360,12 @@ export const authLoginServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
// send multi factor auth token if they it enabled
|
||||
if ((selectedOrg.enforceMfa || user.isMfaEnabled) && user.email && !decodedToken.isMfaVerified) {
|
||||
const shouldCheckMfa = selectedOrg.enforceMfa || user.isMfaEnabled;
|
||||
const orgMfaMethod = selectedOrg.enforceMfa ? selectedOrg.selectedMfaMethod ?? MfaMethod.EMAIL : undefined;
|
||||
const userMfaMethod = user.isMfaEnabled ? user.selectedMfaMethod ?? MfaMethod.EMAIL : undefined;
|
||||
const mfaMethod = orgMfaMethod ?? userMfaMethod;
|
||||
|
||||
if (shouldCheckMfa && (!decodedToken.isMfaVerified || decodedToken.mfaMethod !== mfaMethod)) {
|
||||
enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd);
|
||||
|
||||
const mfaToken = jwt.sign(
|
||||
@ -369,12 +380,14 @@ export const authLoginServiceFactory = ({
|
||||
}
|
||||
);
|
||||
|
||||
await sendUserMfaCode({
|
||||
userId: user.id,
|
||||
email: user.email
|
||||
});
|
||||
if (mfaMethod === MfaMethod.EMAIL && user.email) {
|
||||
await sendUserMfaCode({
|
||||
userId: user.id,
|
||||
email: user.email
|
||||
});
|
||||
}
|
||||
|
||||
return { isMfaEnabled: true, mfa: mfaToken } as const;
|
||||
return { isMfaEnabled: true, mfa: mfaToken, mfaMethod } as const;
|
||||
}
|
||||
|
||||
const tokens = await generateUserTokens({
|
||||
@ -383,7 +396,8 @@ export const authLoginServiceFactory = ({
|
||||
userAgent,
|
||||
ip: ipAddress,
|
||||
organizationId,
|
||||
isMfaVerified: decodedToken.isMfaVerified
|
||||
isMfaVerified: decodedToken.isMfaVerified,
|
||||
mfaMethod: decodedToken.mfaMethod
|
||||
});
|
||||
|
||||
return {
|
||||
@ -458,17 +472,39 @@ export const authLoginServiceFactory = ({
|
||||
* Multi factor authentication verification of code
|
||||
* Third step of login in which user completes with mfa
|
||||
* */
|
||||
const verifyMfaToken = async ({ userId, mfaToken, mfaJwtToken, ip, userAgent, orgId }: TVerifyMfaTokenDTO) => {
|
||||
const verifyMfaToken = async ({
|
||||
userId,
|
||||
mfaToken,
|
||||
mfaMethod,
|
||||
mfaJwtToken,
|
||||
ip,
|
||||
userAgent,
|
||||
orgId
|
||||
}: TVerifyMfaTokenDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const user = await userDAL.findById(userId);
|
||||
enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd);
|
||||
|
||||
try {
|
||||
await tokenService.validateTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_MFA,
|
||||
userId,
|
||||
code: mfaToken
|
||||
});
|
||||
if (mfaMethod === MfaMethod.EMAIL) {
|
||||
await tokenService.validateTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_MFA,
|
||||
userId,
|
||||
code: mfaToken
|
||||
});
|
||||
} else if (mfaMethod === MfaMethod.TOTP) {
|
||||
if (mfaToken.length === 6) {
|
||||
await totpService.verifyUserTotp({
|
||||
userId,
|
||||
totp: mfaToken
|
||||
});
|
||||
} else {
|
||||
await totpService.verifyWithUserRecoveryCode({
|
||||
userId,
|
||||
recoveryCode: mfaToken
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
const updatedUser = await processFailedMfaAttempt(userId);
|
||||
if (updatedUser.isLocked) {
|
||||
@ -513,7 +549,8 @@ export const authLoginServiceFactory = ({
|
||||
userAgent,
|
||||
organizationId: orgId,
|
||||
authMethod: decodedToken.authMethod,
|
||||
isMfaVerified: true
|
||||
isMfaVerified: true,
|
||||
mfaMethod
|
||||
});
|
||||
|
||||
return { token, user: userEnc };
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { AuthMethod } from "./auth-type";
|
||||
import { AuthMethod, MfaMethod } from "./auth-type";
|
||||
|
||||
export type TLoginGenServerPublicKeyDTO = {
|
||||
email: string;
|
||||
@ -19,6 +19,7 @@ export type TLoginClientProofDTO = {
|
||||
export type TVerifyMfaTokenDTO = {
|
||||
userId: string;
|
||||
mfaToken: string;
|
||||
mfaMethod: MfaMethod;
|
||||
mfaJwtToken: string;
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
|
@ -8,6 +8,7 @@ import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
|
||||
import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
|
||||
import { TokenType } from "../auth-token/auth-token-types";
|
||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||
import { TTotpConfigDALFactory } from "../totp/totp-config-dal";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { TAuthDALFactory } from "./auth-dal";
|
||||
import { TChangePasswordDTO, TCreateBackupPrivateKeyDTO, TResetPasswordViaBackupKeyDTO } from "./auth-password-type";
|
||||
@ -18,6 +19,7 @@ type TAuthPasswordServiceFactoryDep = {
|
||||
userDAL: TUserDALFactory;
|
||||
tokenService: TAuthTokenServiceFactory;
|
||||
smtpService: TSmtpService;
|
||||
totpConfigDAL: Pick<TTotpConfigDALFactory, "delete">;
|
||||
};
|
||||
|
||||
export type TAuthPasswordFactory = ReturnType<typeof authPaswordServiceFactory>;
|
||||
@ -25,7 +27,8 @@ export const authPaswordServiceFactory = ({
|
||||
authDAL,
|
||||
userDAL,
|
||||
tokenService,
|
||||
smtpService
|
||||
smtpService,
|
||||
totpConfigDAL
|
||||
}: TAuthPasswordServiceFactoryDep) => {
|
||||
/*
|
||||
* Pre setup for pass change with srp protocol
|
||||
@ -185,6 +188,12 @@ export const authPaswordServiceFactory = ({
|
||||
temporaryLockDateEnd: null,
|
||||
consecutiveFailedMfaAttempts: 0
|
||||
});
|
||||
|
||||
/* we reset the mobile authenticator configs of the user
|
||||
because we want this to be one of the recovery modes from account lockout */
|
||||
await totpConfigDAL.delete({
|
||||
userId
|
||||
});
|
||||
};
|
||||
|
||||
/*
|
||||
|
@ -53,6 +53,7 @@ export type AuthModeJwtTokenPayload = {
|
||||
accessVersion: number;
|
||||
organizationId?: string;
|
||||
isMfaVerified?: boolean;
|
||||
mfaMethod?: MfaMethod;
|
||||
};
|
||||
|
||||
export type AuthModeMfaJwtTokenPayload = {
|
||||
@ -71,6 +72,7 @@ export type AuthModeRefreshJwtTokenPayload = {
|
||||
refreshVersion: number;
|
||||
organizationId?: string;
|
||||
isMfaVerified?: boolean;
|
||||
mfaMethod?: MfaMethod;
|
||||
};
|
||||
|
||||
export type AuthModeProviderJwtTokenPayload = {
|
||||
@ -85,3 +87,8 @@ export type AuthModeProviderSignUpTokenPayload = {
|
||||
authTokenType: AuthTokenType.SIGNUP_TOKEN;
|
||||
userId: string;
|
||||
};
|
||||
|
||||
export enum MfaMethod {
|
||||
EMAIL = "email",
|
||||
TOTP = "totp"
|
||||
}
|
||||
|
@ -235,7 +235,8 @@ export const certificateTemplateServiceFactory = ({
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
actorOrgId,
|
||||
disableBootstrapCertValidation
|
||||
}: TCreateEstConfigurationDTO) => {
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.pkiEst) {
|
||||
@ -266,39 +267,45 @@ export const certificateTemplateServiceFactory = ({
|
||||
|
||||
const appCfg = getConfig();
|
||||
|
||||
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
|
||||
projectId: certTemplate.projectId,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
let encryptedCaChain: Buffer | undefined;
|
||||
if (caChain) {
|
||||
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
|
||||
projectId: certTemplate.projectId,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
// validate CA chain
|
||||
const certificates = caChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => new x509.X509Certificate(cert));
|
||||
// validate CA chain
|
||||
const certificates = caChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => new x509.X509Certificate(cert));
|
||||
|
||||
if (!certificates) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
if (!certificates) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
|
||||
if (!(await isCertChainValid(certificates))) {
|
||||
throw new BadRequestError({ message: "Invalid certificate chain" });
|
||||
}
|
||||
|
||||
const kmsEncryptor = await kmsService.encryptWithKmsKey({
|
||||
kmsId: certificateManagerKmsId
|
||||
});
|
||||
|
||||
const { cipherTextBlob } = await kmsEncryptor({
|
||||
plainText: Buffer.from(caChain)
|
||||
});
|
||||
|
||||
encryptedCaChain = cipherTextBlob;
|
||||
}
|
||||
|
||||
if (!(await isCertChainValid(certificates))) {
|
||||
throw new BadRequestError({ message: "Invalid certificate chain" });
|
||||
}
|
||||
|
||||
const kmsEncryptor = await kmsService.encryptWithKmsKey({
|
||||
kmsId: certificateManagerKmsId
|
||||
});
|
||||
|
||||
const { cipherTextBlob: encryptedCaChain } = await kmsEncryptor({
|
||||
plainText: Buffer.from(caChain)
|
||||
});
|
||||
|
||||
const hashedPassphrase = await bcrypt.hash(passphrase, appCfg.SALT_ROUNDS);
|
||||
const estConfig = await certificateTemplateEstConfigDAL.create({
|
||||
certificateTemplateId,
|
||||
hashedPassphrase,
|
||||
encryptedCaChain,
|
||||
isEnabled
|
||||
isEnabled,
|
||||
disableBootstrapCertValidation
|
||||
});
|
||||
|
||||
return { ...estConfig, projectId: certTemplate.projectId };
|
||||
@ -312,7 +319,8 @@ export const certificateTemplateServiceFactory = ({
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
actorOrgId,
|
||||
disableBootstrapCertValidation
|
||||
}: TUpdateEstConfigurationDTO) => {
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.pkiEst) {
|
||||
@ -360,7 +368,8 @@ export const certificateTemplateServiceFactory = ({
|
||||
});
|
||||
|
||||
const updatedData: TCertificateTemplateEstConfigsUpdate = {
|
||||
isEnabled
|
||||
isEnabled,
|
||||
disableBootstrapCertValidation
|
||||
};
|
||||
|
||||
if (caChain) {
|
||||
@ -442,18 +451,24 @@ export const certificateTemplateServiceFactory = ({
|
||||
kmsId: certificateManagerKmsId
|
||||
});
|
||||
|
||||
const decryptedCaChain = await kmsDecryptor({
|
||||
cipherTextBlob: estConfig.encryptedCaChain
|
||||
});
|
||||
let decryptedCaChain = "";
|
||||
if (estConfig.encryptedCaChain) {
|
||||
decryptedCaChain = (
|
||||
await kmsDecryptor({
|
||||
cipherTextBlob: estConfig.encryptedCaChain
|
||||
})
|
||||
).toString();
|
||||
}
|
||||
|
||||
return {
|
||||
certificateTemplateId,
|
||||
id: estConfig.id,
|
||||
isEnabled: estConfig.isEnabled,
|
||||
caChain: decryptedCaChain.toString(),
|
||||
caChain: decryptedCaChain,
|
||||
hashedPassphrase: estConfig.hashedPassphrase,
|
||||
projectId: certTemplate.projectId,
|
||||
orgId: certTemplate.orgId
|
||||
orgId: certTemplate.orgId,
|
||||
disableBootstrapCertValidation: estConfig.disableBootstrapCertValidation
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -34,9 +34,10 @@ export type TDeleteCertTemplateDTO = {
|
||||
|
||||
export type TCreateEstConfigurationDTO = {
|
||||
certificateTemplateId: string;
|
||||
caChain: string;
|
||||
caChain?: string;
|
||||
passphrase: string;
|
||||
isEnabled: boolean;
|
||||
disableBootstrapCertValidation: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateEstConfigurationDTO = {
|
||||
@ -44,6 +45,7 @@ export type TUpdateEstConfigurationDTO = {
|
||||
caChain?: string;
|
||||
passphrase?: string;
|
||||
isEnabled?: boolean;
|
||||
disableBootstrapCertValidation?: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetEstConfigurationDTO =
|
||||
|
@ -29,7 +29,7 @@ import {
|
||||
} from "./identity-aws-auth-types";
|
||||
|
||||
type TIdentityAwsAuthServiceFactoryDep = {
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@ -346,6 +346,8 @@ export const identityAwsAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => {
|
||||
const deletedAwsAuth = await identityAwsAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.AWS_AUTH }, tx);
|
||||
|
||||
return { ...deletedAwsAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityAwsAuth;
|
||||
|
@ -30,7 +30,7 @@ type TIdentityAzureAuthServiceFactoryDep = {
|
||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||
>;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@ -70,7 +70,9 @@ export const identityAzureAuthServiceFactory = ({
|
||||
.map((servicePrincipalId) => servicePrincipalId.trim())
|
||||
.some((servicePrincipalId) => servicePrincipalId === azureIdentity.oid);
|
||||
|
||||
if (!isServicePrincipalAllowed) throw new UnauthorizedError({ message: "Service principal not allowed" });
|
||||
if (!isServicePrincipalAllowed) {
|
||||
throw new UnauthorizedError({ message: `Service principal '${azureIdentity.oid}' not allowed` });
|
||||
}
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||
@ -317,6 +319,8 @@ export const identityAzureAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityAzureAuth = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||
const deletedAzureAuth = await identityAzureAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.AZURE_AUTH }, tx);
|
||||
|
||||
return { ...deletedAzureAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityAzureAuth;
|
||||
|
@ -28,7 +28,7 @@ import {
|
||||
type TIdentityGcpAuthServiceFactoryDep = {
|
||||
identityGcpAuthDAL: Pick<TIdentityGcpAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@ -365,6 +365,8 @@ export const identityGcpAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityGcpAuth = await identityGcpAuthDAL.transaction(async (tx) => {
|
||||
const deletedGcpAuth = await identityGcpAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.GCP_AUTH }, tx);
|
||||
|
||||
return { ...deletedGcpAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityGcpAuth;
|
||||
|
@ -41,7 +41,7 @@ type TIdentityKubernetesAuthServiceFactoryDep = {
|
||||
TIdentityKubernetesAuthDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "delete"
|
||||
>;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
@ -622,6 +622,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
||||
const deletedKubernetesAuth = await identityKubernetesAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.KUBERNETES_AUTH }, tx);
|
||||
return { ...deletedKubernetesAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityKubernetesAuth;
|
||||
|
@ -39,7 +39,7 @@ import {
|
||||
type TIdentityOidcAuthServiceFactoryDep = {
|
||||
identityOidcAuthDAL: TIdentityOidcAuthDALFactory;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
@ -539,6 +539,8 @@ export const identityOidcAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||
const deletedOidcAuth = await identityOidcAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.OIDC_AUTH }, tx);
|
||||
|
||||
return { ...deletedOidcAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
|
||||
|
@ -20,6 +20,7 @@ import { getApps } from "./integration-app-list";
|
||||
import { TIntegrationAuthDALFactory } from "./integration-auth-dal";
|
||||
import { IntegrationAuthMetadataSchema, TIntegrationAuthMetadata } from "./integration-auth-schema";
|
||||
import {
|
||||
TBitbucketEnvironment,
|
||||
TBitbucketWorkspace,
|
||||
TChecklyGroups,
|
||||
TDeleteIntegrationAuthByIdDTO,
|
||||
@ -30,6 +31,7 @@ import {
|
||||
THerokuPipelineCoupling,
|
||||
TIntegrationAuthAppsDTO,
|
||||
TIntegrationAuthAwsKmsKeyDTO,
|
||||
TIntegrationAuthBitbucketEnvironmentsDTO,
|
||||
TIntegrationAuthBitbucketWorkspaceDTO,
|
||||
TIntegrationAuthChecklyGroupsDTO,
|
||||
TIntegrationAuthGithubEnvsDTO,
|
||||
@ -1261,6 +1263,55 @@ export const integrationAuthServiceFactory = ({
|
||||
return workspaces;
|
||||
};
|
||||
|
||||
const getBitbucketEnvironments = async ({
|
||||
workspaceSlug,
|
||||
repoSlug,
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
id
|
||||
}: TIntegrationAuthBitbucketEnvironmentsDTO) => {
|
||||
const integrationAuth = await integrationAuthDAL.findById(id);
|
||||
if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
integrationAuth.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId);
|
||||
const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey);
|
||||
const environments: TBitbucketEnvironment[] = [];
|
||||
let hasNextPage = true;
|
||||
|
||||
let environmentsUrl = `${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${workspaceSlug}/${repoSlug}/environments`;
|
||||
|
||||
while (hasNextPage) {
|
||||
// eslint-disable-next-line
|
||||
const { data }: { data: { values: TBitbucketEnvironment[]; next: string } } = await request.get(environmentsUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
if (data?.values.length > 0) {
|
||||
environments.push(...data.values);
|
||||
}
|
||||
|
||||
if (data.next) {
|
||||
environmentsUrl = data.next;
|
||||
} else {
|
||||
hasNextPage = false;
|
||||
}
|
||||
}
|
||||
return environments;
|
||||
};
|
||||
|
||||
const getNorthFlankSecretGroups = async ({
|
||||
id,
|
||||
actor,
|
||||
@ -1499,6 +1550,7 @@ export const integrationAuthServiceFactory = ({
|
||||
getNorthFlankSecretGroups,
|
||||
getTeamcityBuildConfigs,
|
||||
getBitbucketWorkspaces,
|
||||
getBitbucketEnvironments,
|
||||
getIntegrationAccessToken,
|
||||
duplicateIntegrationAuth
|
||||
};
|
||||
|
@ -99,6 +99,12 @@ export type TIntegrationAuthBitbucketWorkspaceDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TIntegrationAuthBitbucketEnvironmentsDTO = {
|
||||
workspaceSlug: string;
|
||||
repoSlug: string;
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TIntegrationAuthNorthflankSecretGroupDTO = {
|
||||
id: string;
|
||||
appId: string;
|
||||
@ -148,6 +154,13 @@ export type TBitbucketWorkspace = {
|
||||
updated_on: string;
|
||||
};
|
||||
|
||||
export type TBitbucketEnvironment = {
|
||||
type: string;
|
||||
uuid: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
|
||||
export type TNorthflankSecretGroup = {
|
||||
id: string;
|
||||
name: string;
|
||||
|
@ -334,7 +334,7 @@ export const getIntegrationOptions = async () => {
|
||||
docsLink: ""
|
||||
},
|
||||
{
|
||||
name: "BitBucket",
|
||||
name: "Bitbucket",
|
||||
slug: "bitbucket",
|
||||
image: "BitBucket.png",
|
||||
isAvailable: true,
|
||||
|
@ -3631,7 +3631,14 @@ const syncSecretsBitBucket = async ({
|
||||
const res: { [key: string]: BitbucketVariable } = {};
|
||||
|
||||
let hasNextPage = true;
|
||||
let variablesUrl = `${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${integration.targetEnvironmentId}/${integration.appId}/pipelines_config/variables`;
|
||||
|
||||
const rootUrl = integration.targetServiceId
|
||||
? // scope: deployment environment
|
||||
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${integration.targetEnvironmentId}/${integration.appId}/deployments_config/environments/${integration.targetServiceId}/variables`
|
||||
: // scope: repository
|
||||
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${integration.targetEnvironmentId}/${integration.appId}/pipelines_config/variables`;
|
||||
|
||||
let variablesUrl = rootUrl;
|
||||
|
||||
while (hasNextPage) {
|
||||
const { data }: { data: VariablesResponse } = await request.get(variablesUrl, {
|
||||
@ -3658,7 +3665,7 @@ const syncSecretsBitBucket = async ({
|
||||
if (key in res) {
|
||||
// update existing secret
|
||||
await request.put(
|
||||
`${variablesUrl}/${res[key].uuid}`,
|
||||
`${rootUrl}/${res[key].uuid}`,
|
||||
{
|
||||
key,
|
||||
value: secrets[key].value,
|
||||
@ -3674,7 +3681,7 @@ const syncSecretsBitBucket = async ({
|
||||
} else {
|
||||
// create new secret
|
||||
await request.post(
|
||||
variablesUrl,
|
||||
rootUrl,
|
||||
{
|
||||
key,
|
||||
value: secrets[key].value,
|
||||
|
@ -268,7 +268,7 @@ export const orgServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
orgId,
|
||||
data: { name, slug, authEnforced, scimEnabled, defaultMembershipRoleSlug, enforceMfa }
|
||||
data: { name, slug, authEnforced, scimEnabled, defaultMembershipRoleSlug, enforceMfa, selectedMfaMethod }
|
||||
}: TUpdateOrgDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
@ -333,7 +333,8 @@ export const orgServiceFactory = ({
|
||||
authEnforced,
|
||||
scimEnabled,
|
||||
defaultMembershipRole,
|
||||
enforceMfa
|
||||
enforceMfa,
|
||||
selectedMfaMethod
|
||||
});
|
||||
if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` });
|
||||
return org;
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
|
||||
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
|
||||
import { ActorAuthMethod, ActorType, MfaMethod } from "../auth/auth-type";
|
||||
|
||||
export type TUpdateOrgMembershipDTO = {
|
||||
userId: string;
|
||||
@ -65,6 +65,7 @@ export type TUpdateOrgDTO = {
|
||||
scimEnabled: boolean;
|
||||
defaultMembershipRoleSlug: string;
|
||||
enforceMfa: boolean;
|
||||
selectedMfaMethod: MfaMethod;
|
||||
}>;
|
||||
} & TOrgPermission;
|
||||
|
||||
|
@ -6,6 +6,7 @@ import { BadRequestError, DatabaseError } from "@app/lib/errors";
|
||||
import { groupBy, removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { isValidSecretPath } from "@app/lib/validator";
|
||||
import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
||||
|
||||
import { TFindFoldersDeepByParentIdsDTO } from "./secret-folder-types";
|
||||
@ -214,6 +215,12 @@ export const secretFolderDALFactory = (db: TDbClient) => {
|
||||
const secretFolderOrm = ormify(db, TableName.SecretFolder);
|
||||
|
||||
const findBySecretPath = async (projectId: string, environment: string, path: string, tx?: Knex) => {
|
||||
const isValidPath = isValidSecretPath(path);
|
||||
if (!isValidPath)
|
||||
throw new BadRequestError({
|
||||
message: "Invalid secret path. Only alphanumeric characters, dashes, and underscores are allowed."
|
||||
});
|
||||
|
||||
try {
|
||||
const folder = await sqlFindFolderByPathQuery(
|
||||
tx || db.replicaNode(),
|
||||
@ -236,6 +243,12 @@ export const secretFolderDALFactory = (db: TDbClient) => {
|
||||
|
||||
// finds folders by path for multiple envs
|
||||
const findBySecretPathMultiEnv = async (projectId: string, environments: string[], path: string, tx?: Knex) => {
|
||||
const isValidPath = isValidSecretPath(path);
|
||||
if (!isValidPath)
|
||||
throw new BadRequestError({
|
||||
message: "Invalid secret path. Only alphanumeric characters, dashes, and underscores are allowed."
|
||||
});
|
||||
|
||||
try {
|
||||
const pathDepth = removeTrailingSlash(path).split("/").filter(Boolean).length + 1;
|
||||
|
||||
@ -267,6 +280,12 @@ export const secretFolderDALFactory = (db: TDbClient) => {
|
||||
// even if its the original given /path1/path2
|
||||
// it will stop automatically at /path2
|
||||
const findClosestFolder = async (projectId: string, environment: string, path: string, tx?: Knex) => {
|
||||
const isValidPath = isValidSecretPath(path);
|
||||
if (!isValidPath)
|
||||
throw new BadRequestError({
|
||||
message: "Invalid secret path. Only alphanumeric characters, dashes, and underscores are allowed."
|
||||
});
|
||||
|
||||
try {
|
||||
const folder = await sqlFindFolderByPathQuery(
|
||||
tx || db.replicaNode(),
|
||||
|
@ -361,6 +361,10 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
|
||||
void bd.whereILike(`${TableName.SecretV2}.key`, `%${filters?.search}%`);
|
||||
}
|
||||
}
|
||||
|
||||
if (filters?.keys) {
|
||||
void bd.whereIn(`${TableName.SecretV2}.key`, filters.keys);
|
||||
}
|
||||
})
|
||||
.where((bd) => {
|
||||
void bd.whereNull(`${TableName.SecretV2}.userId`).orWhere({ userId: userId || null });
|
||||
|
@ -10,9 +10,9 @@ import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
|
||||
import { TSecretV2BridgeDALFactory } from "./secret-v2-bridge-dal";
|
||||
import { TFnSecretBulkDelete, TFnSecretBulkInsert, TFnSecretBulkUpdate } from "./secret-v2-bridge-types";
|
||||
|
||||
const INTERPOLATION_SYNTAX_REG = /\${([^}]+)}/g;
|
||||
const INTERPOLATION_SYNTAX_REG = /\${([a-zA-Z0-9-_.]+)}/g;
|
||||
// akhilmhdh: JS regex with global save state in .test
|
||||
const INTERPOLATION_SYNTAX_REG_NON_GLOBAL = /\${([^}]+)}/;
|
||||
const INTERPOLATION_SYNTAX_REG_NON_GLOBAL = /\${([a-zA-Z0-9-_.]+)}/;
|
||||
|
||||
export const shouldUseSecretV2Bridge = (version: number) => version === 3;
|
||||
|
||||
@ -518,7 +518,10 @@ export const expandSecretReferencesFactory = ({
|
||||
}
|
||||
|
||||
if (referencedSecretValue) {
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referencedSecretValue);
|
||||
expandedValue = expandedValue.replaceAll(
|
||||
interpolationSyntax,
|
||||
() => referencedSecretValue // prevents special characters from triggering replacement patterns
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -150,9 +150,13 @@ export const secretV2BridgeServiceFactory = ({
|
||||
}
|
||||
});
|
||||
|
||||
if (referredSecrets.length !== references.length)
|
||||
if (
|
||||
referredSecrets.length !==
|
||||
new Set(references.map(({ secretKey, secretPath, environment }) => `${secretKey}.${secretPath}.${environment}`))
|
||||
.size // only count unique references
|
||||
)
|
||||
throw new BadRequestError({
|
||||
message: `Referenced secret not found. Found only ${diff(
|
||||
message: `Referenced secret(s) not found: ${diff(
|
||||
references.map((el) => el.secretKey),
|
||||
referredSecrets.map((el) => el.key)
|
||||
).join(",")}`
|
||||
@ -410,12 +414,13 @@ export const secretV2BridgeServiceFactory = ({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
const encryptedValue = secretValue
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secretValue) }).cipherTextBlob,
|
||||
references: getAllSecretReferences(secretValue).nestedReferences
|
||||
}
|
||||
: {};
|
||||
const encryptedValue =
|
||||
typeof secretValue === "string"
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secretValue) }).cipherTextBlob,
|
||||
references: getAllSecretReferences(secretValue).nestedReferences
|
||||
}
|
||||
: {};
|
||||
|
||||
if (secretValue) {
|
||||
const { nestedReferences, localReferences } = getAllSecretReferences(secretValue);
|
||||
@ -1161,7 +1166,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
const newSecrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkInsert({
|
||||
inputSecrets: inputSecrets.map((el) => {
|
||||
const references = secretReferencesGroupByInputSecretKey[el.secretKey].nestedReferences;
|
||||
const references = secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences;
|
||||
|
||||
return {
|
||||
version: 1,
|
||||
@ -1368,7 +1373,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
typeof el.secretValue !== "undefined"
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob,
|
||||
references: secretReferencesGroupByInputSecretKey[el.secretKey].nestedReferences
|
||||
references: secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences
|
||||
}
|
||||
: {};
|
||||
|
||||
|
@ -33,6 +33,7 @@ export type TGetSecretsDTO = {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
keys?: string[];
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetASecretDTO = {
|
||||
@ -294,6 +295,7 @@ export type TFindSecretsByFolderIdsFilter = {
|
||||
search?: string;
|
||||
tagSlugs?: string[];
|
||||
includeTagsInSearch?: boolean;
|
||||
keys?: string[];
|
||||
};
|
||||
|
||||
export type TGetSecretsRawByFolderMappingsDTO = {
|
||||
|
@ -185,6 +185,7 @@ export type TGetSecretsRawDTO = {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
keys?: string[];
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetASecretRawDTO = {
|
||||
|
@ -77,5 +77,21 @@ export const smtpServiceFactory = (cfg: TSmtpConfig) => {
|
||||
}
|
||||
};
|
||||
|
||||
return { sendMail };
|
||||
const verify = async () => {
|
||||
const isConnected = smtp
|
||||
.verify()
|
||||
.then(async () => {
|
||||
logger.info("SMTP connected");
|
||||
return true;
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
logger.error("SMTP error");
|
||||
logger.error(err);
|
||||
return false;
|
||||
});
|
||||
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
return { sendMail, verify };
|
||||
};
|
||||
|
11
backend/src/services/totp/totp-config-dal.ts
Normal file
11
backend/src/services/totp/totp-config-dal.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TTotpConfigDALFactory = ReturnType<typeof totpConfigDALFactory>;
|
||||
|
||||
export const totpConfigDALFactory = (db: TDbClient) => {
|
||||
const totpConfigDal = ormify(db, TableName.TotpConfig);
|
||||
|
||||
return totpConfigDal;
|
||||
};
|
3
backend/src/services/totp/totp-fns.ts
Normal file
3
backend/src/services/totp/totp-fns.ts
Normal file
@ -0,0 +1,3 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
export const generateRecoveryCode = () => String(crypto.randomInt(10 ** 7, 10 ** 8 - 1));
|
270
backend/src/services/totp/totp-service.ts
Normal file
270
backend/src/services/totp/totp-service.ts
Normal file
@ -0,0 +1,270 @@
|
||||
import { authenticator } from "otplib";
|
||||
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { TTotpConfigDALFactory } from "./totp-config-dal";
|
||||
import { generateRecoveryCode } from "./totp-fns";
|
||||
import {
|
||||
TCreateUserTotpRecoveryCodesDTO,
|
||||
TDeleteUserTotpConfigDTO,
|
||||
TGetUserTotpConfigDTO,
|
||||
TRegisterUserTotpDTO,
|
||||
TVerifyUserTotpConfigDTO,
|
||||
TVerifyUserTotpDTO,
|
||||
TVerifyWithUserRecoveryCodeDTO
|
||||
} from "./totp-types";
|
||||
|
||||
type TTotpServiceFactoryDep = {
|
||||
userDAL: TUserDALFactory;
|
||||
totpConfigDAL: TTotpConfigDALFactory;
|
||||
kmsService: TKmsServiceFactory;
|
||||
};
|
||||
|
||||
export type TTotpServiceFactory = ReturnType<typeof totpServiceFactory>;
|
||||
|
||||
const MAX_RECOVERY_CODE_LIMIT = 10;
|
||||
|
||||
export const totpServiceFactory = ({ totpConfigDAL, kmsService, userDAL }: TTotpServiceFactoryDep) => {
|
||||
const getUserTotpConfig = async ({ userId }: TGetUserTotpConfigDTO) => {
|
||||
const totpConfig = await totpConfigDAL.findOne({
|
||||
userId
|
||||
});
|
||||
|
||||
if (!totpConfig) {
|
||||
throw new NotFoundError({
|
||||
message: "TOTP configuration not found"
|
||||
});
|
||||
}
|
||||
|
||||
if (!totpConfig.isVerified) {
|
||||
throw new BadRequestError({
|
||||
message: "TOTP configuration has not been verified"
|
||||
});
|
||||
}
|
||||
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(",");
|
||||
|
||||
return {
|
||||
isVerified: totpConfig.isVerified,
|
||||
recoveryCodes
|
||||
};
|
||||
};
|
||||
|
||||
const registerUserTotp = async ({ userId }: TRegisterUserTotpDTO) => {
|
||||
const totpConfig = await totpConfigDAL.transaction(async (tx) => {
|
||||
const verifiedTotpConfig = await totpConfigDAL.findOne(
|
||||
{
|
||||
userId,
|
||||
isVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (verifiedTotpConfig) {
|
||||
throw new BadRequestError({
|
||||
message: "TOTP configuration for user already exists"
|
||||
});
|
||||
}
|
||||
|
||||
const unverifiedTotpConfig = await totpConfigDAL.findOne({
|
||||
userId,
|
||||
isVerified: false
|
||||
});
|
||||
|
||||
if (unverifiedTotpConfig) {
|
||||
return unverifiedTotpConfig;
|
||||
}
|
||||
|
||||
const encryptWithRoot = kmsService.encryptWithRootKey();
|
||||
|
||||
// create new TOTP configuration
|
||||
const secret = authenticator.generateSecret();
|
||||
const encryptedSecret = encryptWithRoot(Buffer.from(secret));
|
||||
const recoveryCodes = Array.from({ length: MAX_RECOVERY_CODE_LIMIT }).map(generateRecoveryCode);
|
||||
const encryptedRecoveryCodes = encryptWithRoot(Buffer.from(recoveryCodes.join(",")));
|
||||
const newTotpConfig = await totpConfigDAL.create({
|
||||
userId,
|
||||
encryptedRecoveryCodes,
|
||||
encryptedSecret
|
||||
});
|
||||
|
||||
return newTotpConfig;
|
||||
});
|
||||
|
||||
const user = await userDAL.findById(userId);
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
|
||||
const secret = decryptWithRoot(totpConfig.encryptedSecret).toString();
|
||||
const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(",");
|
||||
const otpUrl = authenticator.keyuri(user.username, "Infisical", secret);
|
||||
|
||||
return {
|
||||
otpUrl,
|
||||
recoveryCodes
|
||||
};
|
||||
};
|
||||
|
||||
const verifyUserTotpConfig = async ({ userId, totp }: TVerifyUserTotpConfigDTO) => {
|
||||
const totpConfig = await totpConfigDAL.findOne({
|
||||
userId
|
||||
});
|
||||
|
||||
if (!totpConfig) {
|
||||
throw new NotFoundError({
|
||||
message: "TOTP configuration not found"
|
||||
});
|
||||
}
|
||||
|
||||
if (totpConfig.isVerified) {
|
||||
throw new BadRequestError({
|
||||
message: "TOTP configuration has already been verified"
|
||||
});
|
||||
}
|
||||
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
const secret = decryptWithRoot(totpConfig.encryptedSecret).toString();
|
||||
const isValid = authenticator.verify({
|
||||
token: totp,
|
||||
secret
|
||||
});
|
||||
|
||||
if (isValid) {
|
||||
await totpConfigDAL.updateById(totpConfig.id, {
|
||||
isVerified: true
|
||||
});
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid TOTP token"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const verifyUserTotp = async ({ userId, totp }: TVerifyUserTotpDTO) => {
|
||||
const totpConfig = await totpConfigDAL.findOne({
|
||||
userId
|
||||
});
|
||||
|
||||
if (!totpConfig) {
|
||||
throw new NotFoundError({
|
||||
message: "TOTP configuration not found"
|
||||
});
|
||||
}
|
||||
|
||||
if (!totpConfig.isVerified) {
|
||||
throw new BadRequestError({
|
||||
message: "TOTP configuration has not been verified"
|
||||
});
|
||||
}
|
||||
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
const secret = decryptWithRoot(totpConfig.encryptedSecret).toString();
|
||||
const isValid = authenticator.verify({
|
||||
token: totp,
|
||||
secret
|
||||
});
|
||||
|
||||
if (!isValid) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Invalid TOTP"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const verifyWithUserRecoveryCode = async ({ userId, recoveryCode }: TVerifyWithUserRecoveryCodeDTO) => {
|
||||
const totpConfig = await totpConfigDAL.findOne({
|
||||
userId
|
||||
});
|
||||
|
||||
if (!totpConfig) {
|
||||
throw new NotFoundError({
|
||||
message: "TOTP configuration not found"
|
||||
});
|
||||
}
|
||||
|
||||
if (!totpConfig.isVerified) {
|
||||
throw new BadRequestError({
|
||||
message: "TOTP configuration has not been verified"
|
||||
});
|
||||
}
|
||||
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
const encryptWithRoot = kmsService.encryptWithRootKey();
|
||||
|
||||
const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(",");
|
||||
const matchingCode = recoveryCodes.find((code) => recoveryCode === code);
|
||||
if (!matchingCode) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Invalid TOTP recovery code"
|
||||
});
|
||||
}
|
||||
|
||||
const updatedRecoveryCodes = recoveryCodes.filter((code) => code !== matchingCode);
|
||||
const encryptedRecoveryCodes = encryptWithRoot(Buffer.from(updatedRecoveryCodes.join(",")));
|
||||
await totpConfigDAL.updateById(totpConfig.id, {
|
||||
encryptedRecoveryCodes
|
||||
});
|
||||
};
|
||||
|
||||
const deleteUserTotpConfig = async ({ userId }: TDeleteUserTotpConfigDTO) => {
|
||||
const totpConfig = await totpConfigDAL.findOne({
|
||||
userId
|
||||
});
|
||||
|
||||
if (!totpConfig) {
|
||||
throw new NotFoundError({
|
||||
message: "TOTP configuration not found"
|
||||
});
|
||||
}
|
||||
|
||||
await totpConfigDAL.deleteById(totpConfig.id);
|
||||
};
|
||||
|
||||
const createUserTotpRecoveryCodes = async ({ userId }: TCreateUserTotpRecoveryCodesDTO) => {
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
const encryptWithRoot = kmsService.encryptWithRootKey();
|
||||
|
||||
return totpConfigDAL.transaction(async (tx) => {
|
||||
const totpConfig = await totpConfigDAL.findOne(
|
||||
{
|
||||
userId,
|
||||
isVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!totpConfig) {
|
||||
throw new NotFoundError({
|
||||
message: "Valid TOTP configuration not found"
|
||||
});
|
||||
}
|
||||
|
||||
const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(",");
|
||||
if (recoveryCodes.length >= MAX_RECOVERY_CODE_LIMIT) {
|
||||
throw new BadRequestError({
|
||||
message: `Cannot have more than ${MAX_RECOVERY_CODE_LIMIT} recovery codes at a time`
|
||||
});
|
||||
}
|
||||
|
||||
const toGenerateCount = MAX_RECOVERY_CODE_LIMIT - recoveryCodes.length;
|
||||
const newRecoveryCodes = Array.from({ length: toGenerateCount }).map(generateRecoveryCode);
|
||||
const encryptedRecoveryCodes = encryptWithRoot(Buffer.from([...recoveryCodes, ...newRecoveryCodes].join(",")));
|
||||
|
||||
await totpConfigDAL.updateById(totpConfig.id, {
|
||||
encryptedRecoveryCodes
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
registerUserTotp,
|
||||
verifyUserTotpConfig,
|
||||
getUserTotpConfig,
|
||||
verifyUserTotp,
|
||||
verifyWithUserRecoveryCode,
|
||||
deleteUserTotpConfig,
|
||||
createUserTotpRecoveryCodes
|
||||
};
|
||||
};
|
30
backend/src/services/totp/totp-types.ts
Normal file
30
backend/src/services/totp/totp-types.ts
Normal file
@ -0,0 +1,30 @@
|
||||
export type TRegisterUserTotpDTO = {
|
||||
userId: string;
|
||||
};
|
||||
|
||||
export type TVerifyUserTotpConfigDTO = {
|
||||
userId: string;
|
||||
totp: string;
|
||||
};
|
||||
|
||||
export type TGetUserTotpConfigDTO = {
|
||||
userId: string;
|
||||
};
|
||||
|
||||
export type TVerifyUserTotpDTO = {
|
||||
userId: string;
|
||||
totp: string;
|
||||
};
|
||||
|
||||
export type TVerifyWithUserRecoveryCodeDTO = {
|
||||
userId: string;
|
||||
recoveryCode: string;
|
||||
};
|
||||
|
||||
export type TDeleteUserTotpConfigDTO = {
|
||||
userId: string;
|
||||
};
|
||||
|
||||
export type TCreateUserTotpRecoveryCodesDTO = {
|
||||
userId: string;
|
||||
};
|
@ -15,7 +15,7 @@ import { AuthMethod } from "../auth/auth-type";
|
||||
import { TGroupProjectDALFactory } from "../group-project/group-project-dal";
|
||||
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
|
||||
import { TUserDALFactory } from "./user-dal";
|
||||
import { TListUserGroupsDTO } from "./user-types";
|
||||
import { TListUserGroupsDTO, TUpdateUserMfaDTO } from "./user-types";
|
||||
|
||||
type TUserServiceFactoryDep = {
|
||||
userDAL: Pick<
|
||||
@ -171,15 +171,24 @@ export const userServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
const toggleUserMfa = async (userId: string, isMfaEnabled: boolean) => {
|
||||
const updateUserMfa = async ({ userId, isMfaEnabled, selectedMfaMethod }: TUpdateUserMfaDTO) => {
|
||||
const user = await userDAL.findById(userId);
|
||||
|
||||
if (!user || !user.email) throw new BadRequestError({ name: "Failed to toggle MFA" });
|
||||
|
||||
let mfaMethods;
|
||||
if (isMfaEnabled === undefined) {
|
||||
mfaMethods = undefined;
|
||||
} else {
|
||||
mfaMethods = isMfaEnabled ? ["email"] : [];
|
||||
}
|
||||
|
||||
const updatedUser = await userDAL.updateById(userId, {
|
||||
isMfaEnabled,
|
||||
mfaMethods: isMfaEnabled ? ["email"] : []
|
||||
mfaMethods,
|
||||
selectedMfaMethod
|
||||
});
|
||||
|
||||
return updatedUser;
|
||||
};
|
||||
|
||||
@ -327,7 +336,7 @@ export const userServiceFactory = ({
|
||||
return {
|
||||
sendEmailVerificationCode,
|
||||
verifyEmailVerificationCode,
|
||||
toggleUserMfa,
|
||||
updateUserMfa,
|
||||
updateUserName,
|
||||
updateAuthMethods,
|
||||
deleteUser,
|
||||
|
@ -1,5 +1,7 @@
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
|
||||
import { MfaMethod } from "../auth/auth-type";
|
||||
|
||||
export type TListUserGroupsDTO = {
|
||||
username: string;
|
||||
} & Omit<TOrgPermission, "orgId">;
|
||||
@ -8,3 +10,9 @@ export enum UserEncryption {
|
||||
V1 = 1,
|
||||
V2 = 2
|
||||
}
|
||||
|
||||
export type TUpdateUserMfaDTO = {
|
||||
userId: string;
|
||||
isMfaEnabled?: boolean;
|
||||
selectedMfaMethod?: MfaMethod;
|
||||
};
|
||||
|
@ -138,6 +138,7 @@ type GetOrganizationsResponse struct {
|
||||
type SelectOrganizationResponse struct {
|
||||
Token string `json:"token"`
|
||||
MfaEnabled bool `json:"isMfaEnabled"`
|
||||
MfaMethod string `json:"mfaMethod"`
|
||||
}
|
||||
|
||||
type SelectOrganizationRequest struct {
|
||||
@ -260,8 +261,9 @@ type GetLoginTwoV2Response struct {
|
||||
}
|
||||
|
||||
type VerifyMfaTokenRequest struct {
|
||||
Email string `json:"email"`
|
||||
MFAToken string `json:"mfaToken"`
|
||||
Email string `json:"email"`
|
||||
MFAToken string `json:"mfaToken"`
|
||||
MFAMethod string `json:"mfaMethod"`
|
||||
}
|
||||
|
||||
type VerifyMfaTokenResponse struct {
|
||||
|
@ -79,13 +79,14 @@ var initCmd = &cobra.Command{
|
||||
if tokenResponse.MfaEnabled {
|
||||
i := 1
|
||||
for i < 6 {
|
||||
mfaVerifyCode := askForMFACode()
|
||||
|
||||
mfaVerifyCode := askForMFACode(tokenResponse.MfaMethod)
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(tokenResponse.Token)
|
||||
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
||||
Email: userCreds.UserCredentials.Email,
|
||||
MFAToken: mfaVerifyCode,
|
||||
Email: userCreds.UserCredentials.Email,
|
||||
MFAToken: mfaVerifyCode,
|
||||
MFAMethod: tokenResponse.MfaMethod,
|
||||
})
|
||||
if requestError != nil {
|
||||
util.HandleError(err)
|
||||
@ -99,7 +100,7 @@ var initCmd = &cobra.Command{
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if mfaErrorResponse.Context.Code == "mfa_expired" {
|
||||
util.PrintErrorMessageAndExit("Your 2FA verification code has expired, please try logging in again")
|
||||
break
|
||||
|
@ -343,7 +343,7 @@ func cliDefaultLogin(userCredentialsToBeStored *models.UserCredentials) {
|
||||
if loginTwoResponse.MfaEnabled {
|
||||
i := 1
|
||||
for i < 6 {
|
||||
mfaVerifyCode := askForMFACode()
|
||||
mfaVerifyCode := askForMFACode("email")
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(loginTwoResponse.Token)
|
||||
@ -532,7 +532,7 @@ func askForDomain() error {
|
||||
const (
|
||||
INFISICAL_CLOUD_US = "Infisical Cloud (US Region)"
|
||||
INFISICAL_CLOUD_EU = "Infisical Cloud (EU Region)"
|
||||
SELF_HOSTING = "Self-Hosting"
|
||||
SELF_HOSTING = "Self-Hosting or Dedicated Instance"
|
||||
ADD_NEW_DOMAIN = "Add a new domain"
|
||||
)
|
||||
|
||||
@ -756,13 +756,14 @@ func GetJwtTokenWithOrganizationId(oldJwtToken string, email string) string {
|
||||
if selectedOrgRes.MfaEnabled {
|
||||
i := 1
|
||||
for i < 6 {
|
||||
mfaVerifyCode := askForMFACode()
|
||||
mfaVerifyCode := askForMFACode(selectedOrgRes.MfaMethod)
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(selectedOrgRes.Token)
|
||||
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
||||
Email: email,
|
||||
MFAToken: mfaVerifyCode,
|
||||
Email: email,
|
||||
MFAToken: mfaVerifyCode,
|
||||
MFAMethod: selectedOrgRes.MfaMethod,
|
||||
})
|
||||
if requestError != nil {
|
||||
util.HandleError(err)
|
||||
@ -817,9 +818,15 @@ func generateFromPassword(password string, salt []byte, p *params) (hash []byte,
|
||||
return hash, nil
|
||||
}
|
||||
|
||||
func askForMFACode() string {
|
||||
func askForMFACode(mfaMethod string) string {
|
||||
var label string
|
||||
if mfaMethod == "totp" {
|
||||
label = "Enter the verification code from your mobile authenticator app or use a recovery code"
|
||||
} else {
|
||||
label = "Enter the 2FA verification code sent to your email"
|
||||
}
|
||||
mfaCodePromptUI := promptui.Prompt{
|
||||
Label: "Enter the 2FA verification code sent to your email",
|
||||
Label: label,
|
||||
}
|
||||
|
||||
mfaVerifyCode, err := mfaCodePromptUI.Run()
|
||||
|
28
company/handbook/compensation.mdx
Normal file
28
company/handbook/compensation.mdx
Normal file
@ -0,0 +1,28 @@
|
||||
---
|
||||
title: "Compensation"
|
||||
sidebarTitle: "Compensation"
|
||||
description: "This guide explains how various compensation processes work at Infisical."
|
||||
---
|
||||
|
||||
## Probation period
|
||||
|
||||
We are fully committed to ensuring that you are set up for success, but also understand that it may take some time to determine whether or not there is a long term fit between you and Infisical.
|
||||
|
||||
The first 3 months of your employment with Infisical is a probation period. During this time, you can choose to end your contract with 1 week's notice. If we chose to end your contract, Infisical will pay you 4 weeks' pay, but usually ask you to finish on the same day.
|
||||
|
||||
People in sales roles, such as Account Executives, have a 6 month probation period - this is to account for the fact that it can be difficult to establish whether or not someone is able to close contracts within their first 3 months, given sales cycles.
|
||||
|
||||
Your manager is responsible for monitoring and specifically reviewing your performance throughout this initial period. If under-performance is a concern, or if there is any hesitation regarding the future at Infisical, this should be discussed immediately with you and your manager.
|
||||
|
||||
|
||||
## Severance
|
||||
|
||||
At Infisical, average performance gets a generous severance.
|
||||
|
||||
If Infisical decides to end your contract after the first 3 months of employment have been completed, we will give you 10 weeks' pay. It is likely we will ask you to stop working immediately.
|
||||
|
||||
If the decision to leave is yours, then we just require 1 month of notice.
|
||||
|
||||
We have structured notice in this way as we believe it is in neither Infisical's nor your interest to lock you into a role that is no longer right for you due to financial considerations. This extended notice period only applies in the case of under-performance or a change in business needs - if your contract is terminated due to gross misconduct then you may be dismissed without notice. If this policy conflicts with the requirements of your local jurisdiction, then those local laws will take priority.
|
||||
|
||||
|
@ -58,6 +58,7 @@
|
||||
"pages": [
|
||||
"handbook/onboarding",
|
||||
"handbook/spending-money",
|
||||
"handbook/compensation",
|
||||
"handbook/time-off",
|
||||
"handbook/hiring",
|
||||
"handbook/meetings",
|
||||
|
@ -86,6 +86,7 @@ services:
|
||||
- .env
|
||||
ports:
|
||||
- 4000:4000
|
||||
- 9464:9464 # for OTEL collection of Prometheus metrics
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
|
||||
@ -95,6 +96,42 @@ services:
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus
|
||||
volumes:
|
||||
- ./prometheus.dev.yml:/etc/prometheus/prometheus.yml
|
||||
ports:
|
||||
- "9090:9090"
|
||||
command:
|
||||
- "--config.file=/etc/prometheus/prometheus.yml"
|
||||
profiles: [metrics]
|
||||
|
||||
otel-collector:
|
||||
image: otel/opentelemetry-collector-contrib
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otelcol-contrib/config.yaml
|
||||
ports:
|
||||
- 1888:1888 # pprof extension
|
||||
- 8888:8888 # Prometheus metrics exposed by the Collector
|
||||
- 8889:8889 # Prometheus exporter metrics
|
||||
- 13133:13133 # health_check extension
|
||||
- 4317:4317 # OTLP gRPC receiver
|
||||
- 4318:4318 # OTLP http receiver
|
||||
- 55679:55679 # zpages extension
|
||||
profiles: [metrics-otel]
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana
|
||||
container_name: grafana
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- GF_LOG_LEVEL=debug
|
||||
ports:
|
||||
- "3005:3000"
|
||||
volumes:
|
||||
- "grafana_storage:/var/lib/grafana"
|
||||
profiles: [metrics]
|
||||
|
||||
frontend:
|
||||
container_name: infisical-dev-frontend
|
||||
restart: unless-stopped
|
||||
@ -166,3 +203,4 @@ volumes:
|
||||
driver: local
|
||||
ldap_data:
|
||||
ldap_config:
|
||||
grafana_storage:
|
||||
|
@ -69,4 +69,4 @@ volumes:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
infisical:
|
||||
infisical:
|
@ -3,6 +3,3 @@ title: "Bulk Create"
|
||||
openapi: "POST /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@ -3,6 +3,3 @@ title: "Create"
|
||||
openapi: "POST /api/v3/secrets/raw/{secretName}"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@ -3,6 +3,3 @@ title: "Bulk Delete"
|
||||
openapi: "DELETE /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@ -3,6 +3,3 @@ title: "Delete"
|
||||
openapi: "DELETE /api/v3/secrets/raw/{secretName}"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
@ -2,7 +2,3 @@
|
||||
title: "List"
|
||||
openapi: "GET /api/v3/secrets/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
@ -3,6 +3,3 @@ title: "Retrieve"
|
||||
openapi: "GET /api/v3/secrets/raw/{secretName}"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
@ -3,6 +3,3 @@ title: "Bulk Update"
|
||||
openapi: "PATCH /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@ -2,7 +2,3 @@
|
||||
title: "Update"
|
||||
openapi: "PATCH /api/v3/secrets/raw/{secretName}"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
@ -9,7 +9,7 @@ You can use it across various environments, whether it's local development, CI/C
|
||||
## Installation
|
||||
|
||||
<Tabs>
|
||||
<Tab title="MacOS">
|
||||
<Tab title="MacOS">
|
||||
Use [brew](https://brew.sh/) package manager
|
||||
|
||||
```bash
|
||||
@ -21,9 +21,8 @@ You can use it across various environments, whether it's local development, CI/C
|
||||
```bash
|
||||
brew update && brew upgrade infisical
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="Windows">
|
||||
</Tab>
|
||||
<Tab title="Windows">
|
||||
Use [Scoop](https://scoop.sh/) package manager
|
||||
|
||||
```bash
|
||||
@ -40,7 +39,20 @@ You can use it across various environments, whether it's local development, CI/C
|
||||
scoop update infisical
|
||||
```
|
||||
|
||||
</Tab>
|
||||
</Tab>
|
||||
<Tab title="NPM">
|
||||
Use [NPM](https://www.npmjs.com/) package manager
|
||||
|
||||
```bash
|
||||
npm install -g @infisical/cli
|
||||
```
|
||||
|
||||
### Updates
|
||||
|
||||
```bash
|
||||
npm update -g @infisical/cli
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Alpine">
|
||||
Install prerequisite
|
||||
```bash
|
||||
|
145
docs/documentation/platform/identities/oidc-auth/gitlab.mdx
Normal file
145
docs/documentation/platform/identities/oidc-auth/gitlab.mdx
Normal file
@ -0,0 +1,145 @@
|
||||
---
|
||||
title: GitLab
|
||||
description: "Learn how to authenticate GitLab pipelines with Infisical using OpenID Connect (OIDC)."
|
||||
---
|
||||
|
||||
**OIDC Auth** is a platform-agnostic JWT-based authentication method that can be used to authenticate from any platform or environment using an identity provider with OpenID Connect.
|
||||
|
||||
## Diagram
|
||||
|
||||
The following sequence diagram illustrates the OIDC Auth workflow for authenticating GitLab pipelines with Infisical.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant Client as GitLab Pipeline
|
||||
participant Idp as GitLab Identity Provider
|
||||
participant Infis as Infisical
|
||||
|
||||
Client->>Idp: Step 1: Request identity token
|
||||
Idp-->>Client: Return JWT with verifiable claims
|
||||
|
||||
Note over Client,Infis: Step 2: Login Operation
|
||||
Client->>Infis: Send signed JWT to /api/v1/auth/oidc-auth/login
|
||||
|
||||
Note over Infis,Idp: Step 3: Query verification
|
||||
Infis->>Idp: Request JWT public key using OIDC Discovery
|
||||
Idp-->>Infis: Return public key
|
||||
|
||||
Note over Infis: Step 4: JWT validation
|
||||
Infis->>Client: Return short-lived access token
|
||||
|
||||
Note over Client,Infis: Step 5: Access Infisical API with Token
|
||||
Client->>Infis: Make authenticated requests using the short-lived access token
|
||||
```
|
||||
|
||||
## Concept
|
||||
|
||||
At a high-level, Infisical authenticates a client by verifying the JWT and checking that it meets specific requirements (e.g. it is issued by a trusted identity provider) at the `/api/v1/auth/oidc-auth/login` endpoint. If successful,
|
||||
then Infisical returns a short-lived access token that can be used to make authenticated requests to the Infisical API.
|
||||
|
||||
To be more specific:
|
||||
|
||||
1. The GitLab pipeline requests an identity token from GitLab's identity provider.
|
||||
2. The fetched identity token is sent to Infisical at the `/api/v1/auth/oidc-auth/login` endpoint.
|
||||
3. Infisical fetches the public key that was used to sign the identity token from GitLab's identity provider using OIDC Discovery.
|
||||
4. Infisical validates the JWT using the public key provided by the identity provider and checks that the subject, audience, and claims of the token matches with the set criteria.
|
||||
5. If all is well, Infisical returns a short-lived access token that the GitLab pipeline can use to make authenticated requests to the Infisical API.
|
||||
|
||||
<Note>
|
||||
Infisical needs network-level access to GitLab's identity provider endpoints.
|
||||
</Note>
|
||||
|
||||
## Guide
|
||||
|
||||
In the following steps, we explore how to create and use identities to access the Infisical API using the OIDC Auth authentication method.
|
||||
|
||||
<Steps>
|
||||
<Step title="Creating an identity">
|
||||
To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**.
|
||||
|
||||

|
||||
|
||||
When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
|
||||
|
||||

|
||||
|
||||
Now input a few details for your new identity. Here's some guidance for each field:
|
||||
|
||||
- Name (required): A friendly name for the identity.
|
||||
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
|
||||
|
||||
Once you've created an identity, you'll be redirected to a page where you can manage the identity.
|
||||
|
||||

|
||||
|
||||
Since the identity has been configured with Universal Auth by default, you should re-configure it to use OIDC Auth instead. To do this, press to edit the **Authentication** section,
|
||||
remove the existing Universal Auth configuration, and add a new OIDC Auth configuration onto the identity.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
<Warning>Restrict access by configuring the Subject, Audiences, and Claims fields</Warning>
|
||||
|
||||
Here's some more guidance on each field:
|
||||
- OIDC Discovery URL: The URL used to retrieve the OpenID Connect configuration from the identity provider. This will be used to fetch the public key needed for verifying the provided JWT. For GitLab SaaS (GitLab.com), this should be set to `https://gitlab.com`. For self-hosted GitLab instances, use the domain of your GitLab instance.
|
||||
- Issuer: The unique identifier of the identity provider issuing the JWT. This value is used to verify the iss (issuer) claim in the JWT to ensure the token is issued by a trusted provider. This should also be set to the domain of the Gitlab instance.
|
||||
- CA Certificate: The PEM-encoded CA cert for establishing secure communication with the Identity Provider endpoints. For GitLab.com, this can be left blank.
|
||||
- Subject: The expected principal that is the subject of the JWT. For GitLab pipelines, this should be set to a string that uniquely identifies the pipeline and its context, in the format `project_path:{group}/{project}:ref_type:{type}:ref:{branch_name}` (e.g., `project_path:example-group/example-project:ref_type:branch:ref:main`).
|
||||
- Claims: Additional information or attributes that should be present in the JWT for it to be valid. You can refer to GitLab's [documentation](https://docs.gitlab.com/ee/ci/secrets/id_token_authentication.html#token-payload) for the list of supported claims.
|
||||
- Access Token TTL (default is `2592000` equivalent to 30 days): The lifetime for an acccess token in seconds. This value will be referenced at renewal time.
|
||||
- Access Token Max TTL (default is `2592000` equivalent to 30 days): The maximum lifetime for an acccess token in seconds. This value will be referenced at renewal time.
|
||||
- Access Token Max Number of Uses (default is `0`): The maximum number of times that an access token can be used; a value of `0` implies infinite number of uses.
|
||||
- Access Token Trusted IPs: The IPs or CIDR ranges that access tokens can be used from. By default, each token is given the `0.0.0.0/0`, allowing usage from any network address.
|
||||
<Tip>For more details on the appropriate values for the OIDC fields, refer to GitLab's [documentation](https://docs.gitlab.com/ee/ci/secrets/id_token_authentication.html#token-payload). </Tip>
|
||||
<Info>The `subject`, `audiences`, and `claims` fields support glob pattern matching; however, we highly recommend using hardcoded values whenever possible.</Info>
|
||||
</Step>
|
||||
<Step title="Adding an identity to a project">
|
||||
To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project.
|
||||
|
||||
To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**.
|
||||
|
||||
Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to.
|
||||
|
||||

|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Accessing the Infisical API with the identity">
|
||||
As demonstration, we will be using the Infisical CLI to fetch Infisical secrets and utilize them within a GitLab pipeline.
|
||||
|
||||
To access Infisical secrets as the identity, you need to use an identity token from GitLab which matches the OIDC configuration defined for the machine identity.
|
||||
This can be done by defining the `id_tokens` property. The resulting token would then be used to login with OIDC like the following: `infisical login --method=oidc-auth --oidc-jwt=$GITLAB_TOKEN`
|
||||
|
||||
Below is a complete example of how a GitLab pipeline can be configured to work with secrets from Infisical using the Infisical CLI with OIDC Auth:
|
||||
|
||||
```yaml
|
||||
image: ubuntu
|
||||
|
||||
stages:
|
||||
- build
|
||||
|
||||
build-job:
|
||||
stage: build
|
||||
id_tokens:
|
||||
INFISICAL_ID_TOKEN:
|
||||
aud: infisical-aud-test
|
||||
script:
|
||||
- apt update && apt install -y curl
|
||||
- curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash
|
||||
- apt-get update && apt-get install -y infisical
|
||||
- export INFISICAL_TOKEN=$(infisical login --method=oidc-auth --machine-identity-id=4e807a78-1b1c-4bd6-9609-ef2b0cf4fd54 --oidc-jwt=$INFISICAL_ID_TOKEN --silent --plain)
|
||||
- infisical run --projectId=1d0443c1-cd43-4b3a-91a3-9d5f81254a89 --env=dev -- npm run build
|
||||
```
|
||||
|
||||
The `id_tokens` keyword is used to request an ID token for the job. In this example, an ID token named `INFISICAL_ID_TOKEN` is requested with the audience (`aud`) claim set to "infisical-aud-test". This ID token will be used to authenticate with Infisical.
|
||||
<Note>
|
||||
Each identity access token has a time-to-live (TTL) which you can infer from the response of the login operation; the default TTL is `7200` seconds, which can be adjusted.
|
||||
|
||||
If an identity access token expires, it can no longer authenticate with the Infisical API. In this case, a new access token should be obtained by performing another login operation.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
|
||||
</Steps>
|
262
docs/documentation/platform/kms/hsm-integration.mdx
Normal file
262
docs/documentation/platform/kms/hsm-integration.mdx
Normal file
@ -0,0 +1,262 @@
|
||||
---
|
||||
title: "HSM Integration"
|
||||
description: "Learn more about integrating an HSM with Infisical KMS."
|
||||
---
|
||||
|
||||
<Note>
|
||||
Changing the encryption strategy for your instance is an Enterprise-only feature.
|
||||
This section is intended for users who have obtained an Enterprise license and are on-premise.
|
||||
|
||||
|
||||
Please reach out to sales@infisical.com if you have any questions.
|
||||
</Note>
|
||||
|
||||
## Overview
|
||||
|
||||
Infisical KMS currently supports two encryption strategies:
|
||||
1. **Standard Encryption**: This is the default encryption strategy used by Infisical KMS. It uses a software-protected encryption key to encrypt KMS keys within your Infisical instance. The root encryption key is defined by setting the `ENCRYPTION_KEY` environment variable.
|
||||
2. **Hardware Security Module (HSM)**: This encryption strategy uses a Hardware Security Module (HSM) to create a root encryption key that is stored on a physical device to encrypt the KMS keys within your instance.
|
||||
|
||||
## Hardware Security Module (HSM)
|
||||
|
||||

|
||||
|
||||
Using a hardware security module comes with the added benefit of having a secure and tamper-proof device to store your encryption keys. This ensures that your data is protected from unauthorized access.
|
||||
|
||||
<Warning>
|
||||
All encryption keys used for cryptographic operations are stored within the HSM. This means that if the HSM is lost or destroyed, you will no longer be able to decrypt your data stored within Infisical. Most providers offer recovery options for HSM devices, which you should consider when setting up an HSM device.
|
||||
</Warning>
|
||||
|
||||
Enabling HSM encryption has a set of key benefits:
|
||||
1. **Root Key Wrapping**: The root KMS encryption key that is used to secure your Infisical instance will be encrypted using the HSM device rather than the standard software-protected key.
|
||||
2. **FIPS 140-2/3 Compliance**: Using an HSM device ensures that your Infisical instance is FIPS 140-2 or FIPS 140-3 compliant. For FIPS 140-3, ensure that your HSM is FIPS 140-3 validated.
|
||||
|
||||
#### Caveats
|
||||
- **Performance**: Using an HSM device can have a performance impact on your Infisical instance. This is due to the additional latency introduced by the HSM device. This is however only noticeable when your instance(s) start up or when the encryption strategy is changed.
|
||||
- **Key Recovery**: If the HSM device is lost or destroyed, you will no longer be able to decrypt your data stored within Infisical. Most HSM providers offer recovery options, which you should consider when setting up an HSM device.
|
||||
|
||||
### Requirements
|
||||
- An Infisical instance with a version number that is equal to or greater than `v0.91.0`.
|
||||
- If you are using Docker, your instance must be using the `infisical/infisical-fips` image.
|
||||
- An HSM device from a provider such as [Thales Luna HSM](https://cpl.thalesgroup.com/encryption/data-protection-on-demand/services/luna-cloud-hsm), [AWS CloudHSM](https://aws.amazon.com/cloudhsm/), or others.
|
||||
|
||||
|
||||
### FIPS Compliance
|
||||
FIPS, also known as the Federal Information Processing Standard, is a set of standards that are used to accredit cryptographic modules. FIPS 140-2 and FIPS 140-3 are the two most common standards used for cryptographic modules. If your HSM uses FIPS 140-3 validated hardware, Infisical will automatically be FIPS 140-3 compliant. If your HSM uses FIPS 140-2 validated hardware, Infisical will be FIPS 140-2 compliant.
|
||||
|
||||
HSM devices are especially useful for organizations that operate in regulated industries such as healthcare, finance, and government, where data security and compliance are of the utmost importance.
|
||||
|
||||
For organizations that work with US government agencies, FIPS compliance is almost always a requirement when dealing with sensitive information. FIPS compliance ensures that the cryptographic modules used by the organization meet the security requirements set by the US government.
|
||||
|
||||
## Setup Instructions
|
||||
|
||||
|
||||
<Steps>
|
||||
<Step title="Setting up an HSM Device">
|
||||
To set up HSM encryption, you need to configure an HSM provider and HSM key. The HSM provider is used to connect to the HSM device, and the HSM key is used to encrypt Infisical's KMS keys. We recommend using a Cloud HSM provider such as [Thales Luna HSM](https://cpl.thalesgroup.com/encryption/data-protection-on-demand/services/luna-cloud-hsm) or [AWS CloudHSM](https://aws.amazon.com/cloudhsm/).
|
||||
|
||||
You need to follow the instructions provided by the HSM provider to set up the HSM device. Once the HSM device is set up, the HSM device can be used within Infisical.
|
||||
|
||||
After setting up the HSM from your provider, you will have a set of files that you can use to access the HSM. These files need to be present on the machine where Infisical is running.
|
||||
If you are using containers, you will need to mount the folder where these files are stored as a volume in the container.
|
||||
|
||||
The setup process for an HSM device varies depending on the provider. We have created a guide for Thales Luna Cloud HSM, which you can find below.
|
||||
|
||||
</Step>
|
||||
<Step title="Configure HSM on Infisical">
|
||||
|
||||
<Warning>
|
||||
Are you using Docker? If you are using Docker, please follow the instructions in the [Using HSM's with Docker](#using-hsms-with-docker) section.
|
||||
</Warning>
|
||||
|
||||
Configuring the HSM on Infisical requires setting a set of environment variables:
|
||||
- `HSM_LIB_PATH`: The path to the PKCS#11 library provided by the HSM provider. This usually comes in the form of a `.so` for Linux and MacOS, or a `.dll` file for Windows. For Docker, you need to mount the library path as a volume. Further instructions can be found below. If you are using Docker, make sure to set the HSM_LIB_PATH environment variable to the path where the library is mounted in the container.
|
||||
- `HSM_PIN`: The PKCS#11 PIN to use for authentication with the HSM device.
|
||||
- `HSM_SLOT`: The slot number to use for the HSM device. This is typically between `0` and `5` for most HSM devices.
|
||||
- `HSM_KEY_LABEL`: The label of the key to use for encryption. **Please note that if no key is found with the provided label, the HSM will create a new key with the provided label.**
|
||||
|
||||
You can read more about the [default instance configurations](/self-hosting/configuration/envars) here.
|
||||
</Step>
|
||||
<Step title="Restart Instance">
|
||||
After setting up the HSM, you need to restart the Infisical instance for the changes to take effect.
|
||||
</Step>
|
||||
<Step title="Navigate to the Server Admin Console">
|
||||

|
||||
</Step>
|
||||
<Step title="Update the KMS Encryption Strategy to HSM">
|
||||

|
||||
|
||||
Once you press the 'Save' button, your Infisical instance will immediately switch to the HSM encryption strategy. This will re-encrypt your KMS key with keys from the HSM device.
|
||||
</Step>
|
||||
<Step title="Verify Encryption Strategy">
|
||||
To verify that the HSM was correctly configured, you can try creating a new secret in one of your projects. If the secret is created successfully, the HSM is now being used for encryption.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
|
||||
## Using HSMs with Docker
|
||||
When using Docker, you need to mount the path containing the HSM client files. This section covers how to configure your Infisical instance to use an HSM with Docker.
|
||||
<Tabs>
|
||||
<Tab title="Thales Luna Cloud HSM">
|
||||
<Steps>
|
||||
<Step title="Create HSM client folder">
|
||||
When using Docker, you are able to set your HSM library path to any location on your machine. In this example, we are going to be using `/etc/luna-docker`.
|
||||
|
||||
```bash
|
||||
mkdir /etc/luna-docker
|
||||
```
|
||||
|
||||
After [setting up your Luna Cloud HSM client](https://thalesdocs.com/gphsm/luna/7/docs/network/Content/install/client_install/add_dpod.htm), you should have a set of files, referred to as the HSM client. You don't need all the files, but for simplicity we recommend copying all the files from the client.
|
||||
|
||||
A folder structure of a client folder will often look like this:
|
||||
```
|
||||
partition-ca-certificate.pem
|
||||
partition-certificate.pem
|
||||
server-certificate.pem
|
||||
Chrystoki.conf
|
||||
/plugins
|
||||
libcloud.plugin
|
||||
/lock
|
||||
/libs
|
||||
/64
|
||||
libCryptoki2.so
|
||||
/jsp
|
||||
LunaProvider.jar
|
||||
/64
|
||||
libLunaAPI.so
|
||||
/etc
|
||||
openssl.cnf
|
||||
/bin
|
||||
/64
|
||||
ckdemo
|
||||
lunacm
|
||||
multitoken
|
||||
vtl
|
||||
```
|
||||
|
||||
The most important parts of the client folder is the `Chrystoki.conf` file, and the `libs`, `plugins`, and `jsp` folders. You need to copy these files to the folder you created in the first step.
|
||||
|
||||
```bash
|
||||
cp -r /<path-to-where-your-luna-client-is-located> /etc/luna-docker
|
||||
```
|
||||
|
||||
</Step>
|
||||
|
||||
<Step title="Update Chrystoki.conf">
|
||||
The `Chrystoki.conf` file is used to configure the HSM client. You need to update the `Chrystoki.conf` file to point to the correct file paths.
|
||||
|
||||
In this example, we will be mounting the `/etc/luna-docker` folder to the Docker container under a different path. The path we will use in this example is `/usr/safenet/lunaclient`. This means `/etc/luna-docker` will be mounted to `/usr/safenet/lunaclient` in the Docker container.
|
||||
|
||||
An example config file will look like this:
|
||||
|
||||
```Chrystoki.conf
|
||||
Chrystoki2 = {
|
||||
# This path points to the mounted path, /usr/safenet/lunaclient
|
||||
LibUNIX64 = /usr/safenet/lunaclient/libs/64/libCryptoki2.so;
|
||||
}
|
||||
|
||||
Luna = {
|
||||
DefaultTimeOut = 500000;
|
||||
PEDTimeout1 = 100000;
|
||||
PEDTimeout2 = 200000;
|
||||
PEDTimeout3 = 20000;
|
||||
KeypairGenTimeOut = 2700000;
|
||||
CloningCommandTimeOut = 300000;
|
||||
CommandTimeOutPedSet = 720000;
|
||||
}
|
||||
|
||||
CardReader = {
|
||||
LunaG5Slots = 0;
|
||||
RemoteCommand = 1;
|
||||
}
|
||||
|
||||
Misc = {
|
||||
# Update the paths to point to the mounted path if your folder structure is different from the one mentioned in the previous step.
|
||||
PluginModuleDir = /usr/safenet/lunaclient/plugins;
|
||||
MutexFolder = /usr/safenet/lunaclient/lock;
|
||||
PE1746Enabled = 1;
|
||||
ToolsDir = /usr/bin;
|
||||
|
||||
}
|
||||
|
||||
Presentation = {
|
||||
ShowEmptySlots = no;
|
||||
}
|
||||
|
||||
LunaSA Client = {
|
||||
ReceiveTimeout = 20000;
|
||||
# Update the paths to point to the mounted path if your folder structure is different from the one mentioned in the previous step.
|
||||
SSLConfigFile = /usr/safenet/lunaclient/etc/openssl.cnf;
|
||||
ClientPrivKeyFile = ./etc/ClientNameKey.pem;
|
||||
ClientCertFile = ./etc/ClientNameCert.pem;
|
||||
ServerCAFile = ./etc/CAFile.pem;
|
||||
NetClient = 1;
|
||||
TCPKeepAlive = 1;
|
||||
}
|
||||
|
||||
|
||||
REST = {
|
||||
AppLogLevel = error
|
||||
ServerName = <REDACTED>;
|
||||
ServerPort = 443;
|
||||
AuthTokenConfigURI = <REDACTED>;
|
||||
AuthTokenClientId = <REDACTED>;
|
||||
AuthTokenClientSecret = <REDACTED>;
|
||||
RestClient = 1;
|
||||
ClientTimeoutSec = 120;
|
||||
ClientPoolSize = 32;
|
||||
ClientEofRetryCount = 15;
|
||||
ClientConnectRetryCount = 900;
|
||||
ClientConnectIntervalMs = 1000;
|
||||
}
|
||||
XTC = {
|
||||
Enabled = 1;
|
||||
TimeoutSec = 600;
|
||||
}
|
||||
```
|
||||
|
||||
Save the file after updating the paths.
|
||||
</Step>
|
||||
|
||||
<Step title="Run Docker">
|
||||
Running Docker with HSM encryption requires setting the HSM-related environment variables as mentioned previously in the [HSM setup instructions](#setup-instructions). You can set these environment variables in your Docker run command.
|
||||
|
||||
We are setting the environment variables for Docker via the command line in this example, but you can also pass in a `.env` file to set these environment variables.
|
||||
|
||||
<Warning>
|
||||
If no key is found with the provided key label, the HSM will create a new key with the provided label.
|
||||
Infisical depends on an AES and HMAC key to be present in the HSM. If these keys are not present, Infisical will create them. The AES key label will be the value of the `HSM_KEY_LABEL` environment variable, and the HMAC key label will be the value of the `HSM_KEY_LABEL` environment variable with the suffix `_HMAC`.
|
||||
</Warning>
|
||||
|
||||
```bash
|
||||
docker run -p 80:8080 \
|
||||
-v /etc/luna-docker:/usr/safenet/lunaclient \
|
||||
-e HSM_LIB_PATH="/usr/safenet/lunaclient/libs/64/libCryptoki2.so" \
|
||||
-e HSM_PIN="<your-hsm-device-pin>" \
|
||||
-e HSM_SLOT=<hsm-device-slot> \
|
||||
-e HSM_KEY_LABEL="<your-key-label>" \
|
||||
|
||||
# The rest are unrelated to HSM setup...
|
||||
-e ENCRYPTION_KEY="<>" \
|
||||
-e AUTH_SECRET="<>" \
|
||||
-e DB_CONNECTION_URI="<>" \
|
||||
-e REDIS_URL="<>" \
|
||||
-e SITE_URL="<>" \
|
||||
infisical/infisical-fips:<version> # Replace <version> with the version you want to use
|
||||
```
|
||||
|
||||
We recommend reading further about [using Infisical with Docker](/self-hosting/deployment-options/standalone-infisical).
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
After following these steps, your Docker setup will be ready to use HSM encryption.
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
## Disabling HSM Encryption
|
||||
|
||||
To disable HSM encryption, navigate to Infisical's Server Admin Console and set the KMS encryption strategy to `Software-based Encryption`. This will revert the encryption strategy back to the default software-based encryption.
|
||||
|
||||
<Note>
|
||||
In order to disable HSM encryption, the Infisical instance must be able to access the HSM device. If the HSM device is no longer accessible, you will not be able to disable HSM encryption.
|
||||
</Note>
|
@ -8,6 +8,10 @@ description: "Learn how to manage and use cryptographic keys with Infisical."
|
||||
|
||||
Infisical can be used as a Key Management System (KMS), referred to as Infisical KMS, to centralize management of keys to be used for cryptographic operations like encryption/decryption.
|
||||
|
||||
By default your Infisical data such as projects and the data within them are encrypted at rest using Infisical's own KMS. This ensures that your data is secure and protected from unauthorized access.
|
||||
|
||||
If you are on-premise, your KMS root key will be created at random with the `ROOT_ENCRYPTION_KEY` environment variable. You can also use a Hardware Security Module (HSM), to create the root key. Read more about [HSM](/docs/documentation/platform/kms/encryption-strategies).
|
||||
|
||||
<Note>
|
||||
Keys managed in KMS are not extractable from the platform. Additionally, data
|
||||
is never stored when performing cryptographic operations.
|
||||
|
@ -4,19 +4,18 @@ sidebarTitle: "MFA"
|
||||
description: "Learn how to secure your Infisical account with MFA."
|
||||
---
|
||||
|
||||
MFA requires users to provide multiple forms of identification to access their account. Currently, this means logging in with your password and a 6-digit code sent to your email.
|
||||
MFA requires users to provide multiple forms of identification to access their account.
|
||||
|
||||
## Email 2FA
|
||||
|
||||
Check the box in Personal Settings > Two-factor Authentication to enable email-based 2FA.
|
||||
If 2-factor authentication is enabled in the Personal settings page, email will be used for MFA by default.
|
||||
|
||||

|
||||

|
||||
|
||||
<Note>
|
||||
Infisical currently supports email-based 2FA. We're actively working on
|
||||
building support for other forms of identification via SMS and Authenticator
|
||||
App.
|
||||
</Note>
|
||||
## Mobile Authenticator 2FA
|
||||
|
||||
You can use any mobile authenticator app (Authy, Google Authenticator, Duo, etc.) to secure your account. After registration with an authenticator, select **Mobile Authenticator** as your 2FA method.
|
||||

|
||||
|
||||
## Entra ID / Azure AD MFA
|
||||
|
||||
@ -25,32 +24,39 @@ Check the box in Personal Settings > Two-factor Authentication to enable email-b
|
||||
|
||||
We also encourage you to have your team download and setup the
|
||||
[Microsoft Authenticator App](https://www.microsoft.com/en-us/security/mobile-authenticator-app) prior to enabling MFA.
|
||||
|
||||
</Note>
|
||||
|
||||
<Steps>
|
||||
<Step title="Open your Infisical Application in the Microsoft Entra Admin Center">
|
||||

|
||||
</Step>
|
||||
<Step title="Tap on Conditional Access under the Security Tab">
|
||||

|
||||
</Step>
|
||||
<Step title="Tap on Create New Policy from Templates">
|
||||

|
||||
</Step>
|
||||
<Step title="Select Require MFA for All Users and Tap on Review + Create">
|
||||

|
||||
<Note>
|
||||
By default all users except the configuring admin will be setup to require MFA.
|
||||
Microsoft encourages keeping at least one admin excluded from MFA to prevent accidental lockout.
|
||||
</Note>
|
||||
</Step>
|
||||
<Step title="Set Policy State to Enabled and Tap on Create">
|
||||

|
||||
</Step>
|
||||
<Step title="MFA is now Required When Accessing Infisical">
|
||||

|
||||
<Note>
|
||||
If users have not setup MFA for Entra / Azure they will be prompted to do so at this time.
|
||||
</Note>
|
||||
</Step>
|
||||
</Steps>
|
||||
<Step title="Open your Infisical Application in the Microsoft Entra Admin Center">
|
||||

|
||||
</Step>
|
||||
<Step title="Tap on Conditional Access under the Security Tab">
|
||||

|
||||
</Step>
|
||||
<Step title="Tap on Create New Policy from Templates">
|
||||

|
||||
</Step>
|
||||
<Step title="Select Require MFA for All Users and Tap on Review + Create">
|
||||

|
||||
<Note>
|
||||
By default all users except the configuring admin will be setup to require
|
||||
MFA. Microsoft encourages keeping at least one admin excluded from MFA to
|
||||
prevent accidental lockout.
|
||||
</Note>
|
||||
</Step>
|
||||
<Step title="Set Policy State to Enabled and Tap on Create">
|
||||

|
||||
</Step>
|
||||
<Step title="MFA is now Required When Accessing Infisical">
|
||||

|
||||
<Note>
|
||||
If users have not setup MFA for Entra / Azure they will be prompted to do
|
||||
so at this time.
|
||||
</Note>
|
||||
</Step>
|
||||
</Steps>
|
||||
|
@ -35,6 +35,7 @@ These endpoints are exposed on port 8443 under the .well-known/est path e.g.
|
||||
|
||||

|
||||
|
||||
- **Disable Bootstrap Certificate Validation** - Enable this if your devices are not configured with a bootstrap certificate.
|
||||
- **Certificate Authority Chain** - This is the certificate chain used to validate your devices' manufacturing/pre-installed certificates. This will be used to authenticate your devices with Infisical's EST server.
|
||||
- **Passphrase** - This is also used to authenticate your devices with Infisical's EST server. When configuring the clients, use the value defined here as the EST password.
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user