mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-18 01:29:25 +00:00
Compare commits
143 Commits
doc/add-gi
...
fix-remove
Author | SHA1 | Date | |
---|---|---|---|
46ad1d47a9 | |||
64bfa4f334 | |||
e3eb14bfd9 | |||
24b50651c9 | |||
d7b494c6f8 | |||
93208afb36 | |||
1a084d8fcf | |||
dd4f133c6c | |||
c41d27e1ae | |||
1866ed8d23 | |||
7b3b232dde | |||
9d618b4ae9 | |||
5330ab2171 | |||
662e588c22 | |||
90057d80ff | |||
1eda7aaaac | |||
00dcadbc08 | |||
7a7289ebd0 | |||
e5d4677fd6 | |||
bce3f3d676 | |||
300372fa98 | |||
47a4f8bae9 | |||
863719f296 | |||
7317dc1cf5 | |||
75df898e78 | |||
0de6add3f7 | |||
0c008b6393 | |||
0c3894496c | |||
35fbd5d49d | |||
d03b453e3d | |||
96e331b678 | |||
d4d468660d | |||
75a4965928 | |||
660c09ded4 | |||
b5287d91c0 | |||
6a17763237 | |||
f2bd3daea2 | |||
9a62efea4f | |||
506c30bcdb | |||
735ad4ff65 | |||
41e36dfcef | |||
421d8578b7 | |||
6685f8aa0a | |||
d6c37c1065 | |||
54f3f94185 | |||
907537f7c0 | |||
61263b9384 | |||
d71c85e052 | |||
b6d8be2105 | |||
0693f81d0a | |||
61d516ef35 | |||
31fc64fb4c | |||
8bf7e4c4d1 | |||
2027d4b44e | |||
d401c9074e | |||
afe35dbbb5 | |||
6ff1602fd5 | |||
6603364749 | |||
53bea22b85 | |||
7c84adc1c2 | |||
fa8d6735a1 | |||
a6137f267d | |||
d521ee7b7e | |||
827931e416 | |||
faa83344a7 | |||
3be3d807d2 | |||
9f7ea3c4e5 | |||
e67218f170 | |||
269c40c67c | |||
089a7e880b | |||
64ec741f1a | |||
c98233ddaf | |||
ae17981c41 | |||
6c49c7da3c | |||
2de04b6fe5 | |||
5c9ec1e4be | |||
ba89491d4c | |||
483e596a7a | |||
65f122bd41 | |||
682b552fdc | |||
d4cfd0b6ed | |||
ba1fd8a3f7 | |||
e8f09d2c7b | |||
774371a218 | |||
c4b54de303 | |||
433971a72d | |||
4acf9413f0 | |||
f0549cab98 | |||
d75e49dce5 | |||
8819abd710 | |||
796f76da46 | |||
d6e1ed4d1e | |||
1295b68d80 | |||
c79f84c064 | |||
d0c50960ef | |||
85089a08e1 | |||
4053078d95 | |||
6bae3628c0 | |||
4cb935dae7 | |||
ccad684ab2 | |||
fd77708cad | |||
9aebd712d1 | |||
05f07b25ac | |||
5b0dbf04b2 | |||
b050db84ab | |||
8fef6911f1 | |||
44ba31a743 | |||
6bdbac4750 | |||
60fb195706 | |||
c8109b4e84 | |||
1f2b0443cc | |||
dd1cabf9f6 | |||
8b781b925a | |||
ddcf5b576b | |||
7138b392f2 | |||
bfce1021fb | |||
93c0313b28 | |||
8cfc217519 | |||
d272c6217a | |||
2fe2ddd9fc | |||
e330ddd5ee | |||
7aba9c1a50 | |||
4cd8e0fa67 | |||
ea3d164ead | |||
df468e4865 | |||
66e96018c4 | |||
3b02eedca6 | |||
a55fe2b788 | |||
5d7a267f1d | |||
b16ab6f763 | |||
334a728259 | |||
4a3143e689 | |||
14810de054 | |||
8cfcbaa12c | |||
ada63b9e7d | |||
3f6a0c77f1 | |||
9e4b66e215 | |||
8a14914bc3 | |||
fc3a409164 | |||
ffc58b0313 | |||
9a7e05369c | |||
33b49f4466 | |||
60895537a7 |
@ -74,6 +74,14 @@ CAPTCHA_SECRET=
|
|||||||
|
|
||||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||||
|
|
||||||
|
OTEL_TELEMETRY_COLLECTION_ENABLED=
|
||||||
|
OTEL_EXPORT_TYPE=
|
||||||
|
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||||
|
OTEL_OTLP_PUSH_INTERVAL=
|
||||||
|
|
||||||
|
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
||||||
|
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
||||||
|
|
||||||
PLAIN_API_KEY=
|
PLAIN_API_KEY=
|
||||||
PLAIN_WISH_LABEL_IDS=
|
PLAIN_WISH_LABEL_IDS=
|
||||||
|
|
||||||
|
@ -10,8 +10,7 @@ on:
|
|||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
# packages: write
|
|
||||||
# issues: write
|
|
||||||
jobs:
|
jobs:
|
||||||
cli-integration-tests:
|
cli-integration-tests:
|
||||||
name: Run tests before deployment
|
name: Run tests before deployment
|
||||||
@ -26,6 +25,63 @@ jobs:
|
|||||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||||
|
|
||||||
|
npm-release:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
env:
|
||||||
|
working-directory: ./npm
|
||||||
|
needs:
|
||||||
|
- cli-integration-tests
|
||||||
|
- goreleaser
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Extract version
|
||||||
|
run: |
|
||||||
|
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||||
|
echo "Version extracted: $VERSION"
|
||||||
|
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Print version
|
||||||
|
run: echo ${{ env.CLI_VERSION }}
|
||||||
|
|
||||||
|
- name: Setup Node
|
||||||
|
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
cache: "npm"
|
||||||
|
cache-dependency-path: ./npm/package-lock.json
|
||||||
|
- name: Install dependencies
|
||||||
|
working-directory: ${{ env.working-directory }}
|
||||||
|
run: npm install --ignore-scripts
|
||||||
|
|
||||||
|
- name: Set NPM version
|
||||||
|
working-directory: ${{ env.working-directory }}
|
||||||
|
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||||
|
|
||||||
|
- name: Setup NPM
|
||||||
|
working-directory: ${{ env.working-directory }}
|
||||||
|
run: |
|
||||||
|
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||||
|
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||||
|
|
||||||
|
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||||
|
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||||
|
env:
|
||||||
|
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
|
||||||
|
- name: Pack NPM
|
||||||
|
working-directory: ${{ env.working-directory }}
|
||||||
|
run: npm pack
|
||||||
|
|
||||||
|
- name: Publish NPM
|
||||||
|
working-directory: ${{ env.working-directory }}
|
||||||
|
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||||
|
env:
|
||||||
|
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
|
||||||
goreleaser:
|
goreleaser:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [cli-integration-tests]
|
needs: [cli-integration-tests]
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -71,3 +71,5 @@ frontend-build
|
|||||||
cli/infisical-merge
|
cli/infisical-merge
|
||||||
cli/test/infisical-merge
|
cli/test/infisical-merge
|
||||||
/backend/binary
|
/backend/binary
|
||||||
|
|
||||||
|
/npm/bin
|
||||||
|
@ -1,6 +1,12 @@
|
|||||||
#!/usr/bin/env sh
|
#!/usr/bin/env sh
|
||||||
. "$(dirname -- "$0")/_/husky.sh"
|
. "$(dirname -- "$0")/_/husky.sh"
|
||||||
|
|
||||||
|
# Check if infisical is installed
|
||||||
|
if ! command -v infisical >/dev/null 2>&1; then
|
||||||
|
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
npx lint-staged
|
npx lint-staged
|
||||||
|
|
||||||
infisical scan git-changes --staged -v
|
infisical scan git-changes --staged -v
|
||||||
|
4
Makefile
4
Makefile
@ -10,6 +10,9 @@ up-dev:
|
|||||||
up-dev-ldap:
|
up-dev-ldap:
|
||||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||||
|
|
||||||
|
up-dev-metrics:
|
||||||
|
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||||
|
|
||||||
up-prod:
|
up-prod:
|
||||||
docker-compose -f docker-compose.prod.yml up --build
|
docker-compose -f docker-compose.prod.yml up --build
|
||||||
|
|
||||||
@ -27,4 +30,3 @@ reviewable-api:
|
|||||||
npm run type:check
|
npm run type:check
|
||||||
|
|
||||||
reviewable: reviewable-ui reviewable-api
|
reviewable: reviewable-ui reviewable-api
|
||||||
|
|
||||||
|
@ -5,6 +5,9 @@ export const mockSmtpServer = (): TSmtpService => {
|
|||||||
return {
|
return {
|
||||||
sendMail: async (data) => {
|
sendMail: async (data) => {
|
||||||
storage.push(data);
|
storage.push(data);
|
||||||
|
},
|
||||||
|
verify: async () => {
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
1667
backend/package-lock.json
generated
1667
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -50,6 +50,7 @@
|
|||||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||||
|
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
||||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||||
@ -58,6 +59,7 @@
|
|||||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||||
|
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||||
@ -138,6 +140,14 @@
|
|||||||
"@octokit/plugin-retry": "^5.0.5",
|
"@octokit/plugin-retry": "^5.0.5",
|
||||||
"@octokit/rest": "^20.0.2",
|
"@octokit/rest": "^20.0.2",
|
||||||
"@octokit/webhooks-types": "^7.3.1",
|
"@octokit/webhooks-types": "^7.3.1",
|
||||||
|
"@opentelemetry/api": "^1.9.0",
|
||||||
|
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||||
|
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||||
|
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||||
|
"@opentelemetry/instrumentation": "^0.55.0",
|
||||||
|
"@opentelemetry/resources": "^1.28.0",
|
||||||
|
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||||
|
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||||
"@peculiar/asn1-schema": "^2.3.8",
|
"@peculiar/asn1-schema": "^2.3.8",
|
||||||
"@peculiar/x509": "^1.12.1",
|
"@peculiar/x509": "^1.12.1",
|
||||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||||
@ -181,6 +191,7 @@
|
|||||||
"openid-client": "^5.6.5",
|
"openid-client": "^5.6.5",
|
||||||
"ora": "^7.0.1",
|
"ora": "^7.0.1",
|
||||||
"oracledb": "^6.4.0",
|
"oracledb": "^6.4.0",
|
||||||
|
"otplib": "^12.0.1",
|
||||||
"passport-github": "^1.1.0",
|
"passport-github": "^1.1.0",
|
||||||
"passport-gitlab2": "^5.0.0",
|
"passport-gitlab2": "^5.0.0",
|
||||||
"passport-google-oauth20": "^2.0.0",
|
"passport-google-oauth20": "^2.0.0",
|
||||||
|
@ -8,61 +8,80 @@ const prompt = promptSync({
|
|||||||
sigint: true
|
sigint: true
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const sanitizeInputParam = (value: string) => {
|
||||||
|
// Escape double quotes and wrap the entire value in double quotes
|
||||||
|
if (value) {
|
||||||
|
return `"${value.replace(/"/g, '\\"')}"`;
|
||||||
|
}
|
||||||
|
return '""';
|
||||||
|
};
|
||||||
|
|
||||||
const exportDb = () => {
|
const exportDb = () => {
|
||||||
const exportHost = prompt("Enter your Postgres Host to migrate from: ");
|
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
|
||||||
const exportPort = prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432";
|
const exportPort = sanitizeInputParam(
|
||||||
const exportUser = prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical";
|
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
|
||||||
const exportPassword = prompt("Enter your Postgres Password to migrate from: ");
|
);
|
||||||
const exportDatabase = prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical";
|
const exportUser = sanitizeInputParam(
|
||||||
|
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
|
||||||
|
);
|
||||||
|
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
|
||||||
|
const exportDatabase = sanitizeInputParam(
|
||||||
|
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
|
||||||
|
);
|
||||||
|
|
||||||
// we do not include the audit_log and secret_sharing entries
|
// we do not include the audit_log and secret_sharing entries
|
||||||
execSync(
|
execSync(
|
||||||
`PGDATABASE="${exportDatabase}" PGPASSWORD="${exportPassword}" PGHOST="${exportHost}" PGPORT=${exportPort} PGUSER=${exportUser} pg_dump infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||||
__dirname,
|
__dirname,
|
||||||
"../src/db/dump.sql"
|
"../src/db/backup.dump"
|
||||||
)}`,
|
)}`,
|
||||||
{ stdio: "inherit" }
|
{ stdio: "inherit" }
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const importDbForOrg = () => {
|
const importDbForOrg = () => {
|
||||||
const importHost = prompt("Enter your Postgres Host to migrate to: ");
|
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
|
||||||
const importPort = prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432";
|
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
|
||||||
const importUser = prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical";
|
const importUser = sanitizeInputParam(
|
||||||
const importPassword = prompt("Enter your Postgres Password to migrate to: ");
|
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
|
||||||
const importDatabase = prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical";
|
);
|
||||||
const orgId = prompt("Enter the organization ID to migrate: ");
|
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
|
||||||
|
const importDatabase = sanitizeInputParam(
|
||||||
|
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
|
||||||
|
);
|
||||||
|
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
|
||||||
|
|
||||||
if (!existsSync(path.join(__dirname, "../src/db/dump.sql"))) {
|
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
|
||||||
console.log("File not found, please export the database first.");
|
console.log("File not found, please export the database first.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
execSync(
|
execSync(
|
||||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -f ${path.join(
|
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
|
||||||
__dirname,
|
__dirname,
|
||||||
"../src/db/dump.sql"
|
"../src/db/backup.dump"
|
||||||
)}`
|
)}`,
|
||||||
|
{ maxBuffer: 1024 * 1024 * 4096 }
|
||||||
);
|
);
|
||||||
|
|
||||||
execSync(
|
execSync(
|
||||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||||
);
|
);
|
||||||
|
|
||||||
// delete global/instance-level resources not relevant to the organization to migrate
|
// delete global/instance-level resources not relevant to the organization to migrate
|
||||||
// users
|
// users
|
||||||
execSync(
|
execSync(
|
||||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||||
);
|
);
|
||||||
|
|
||||||
// identities
|
// identities
|
||||||
execSync(
|
execSync(
|
||||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||||
);
|
);
|
||||||
|
|
||||||
// reset slack configuration in superAdmin
|
// reset slack configuration in superAdmin
|
||||||
execSync(
|
execSync(
|
||||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||||
);
|
);
|
||||||
|
|
||||||
console.log("Organization migrated successfully.");
|
console.log("Organization migrated successfully.");
|
||||||
|
2
backend/src/@types/fastify.d.ts
vendored
2
backend/src/@types/fastify.d.ts
vendored
@ -79,6 +79,7 @@ import { TServiceTokenServiceFactory } from "@app/services/service-token/service
|
|||||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||||
|
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
|
||||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||||
@ -193,6 +194,7 @@ declare module "fastify" {
|
|||||||
migration: TExternalMigrationServiceFactory;
|
migration: TExternalMigrationServiceFactory;
|
||||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||||
projectTemplate: TProjectTemplateServiceFactory;
|
projectTemplate: TProjectTemplateServiceFactory;
|
||||||
|
totp: TTotpServiceFactory;
|
||||||
};
|
};
|
||||||
// this is exclusive use for middlewares in which we need to inject data
|
// this is exclusive use for middlewares in which we need to inject data
|
||||||
// everywhere else access using service layer
|
// everywhere else access using service layer
|
||||||
|
4
backend/src/@types/knex.d.ts
vendored
4
backend/src/@types/knex.d.ts
vendored
@ -314,6 +314,9 @@ import {
|
|||||||
TSuperAdmin,
|
TSuperAdmin,
|
||||||
TSuperAdminInsert,
|
TSuperAdminInsert,
|
||||||
TSuperAdminUpdate,
|
TSuperAdminUpdate,
|
||||||
|
TTotpConfigs,
|
||||||
|
TTotpConfigsInsert,
|
||||||
|
TTotpConfigsUpdate,
|
||||||
TTrustedIps,
|
TTrustedIps,
|
||||||
TTrustedIpsInsert,
|
TTrustedIpsInsert,
|
||||||
TTrustedIpsUpdate,
|
TTrustedIpsUpdate,
|
||||||
@ -826,5 +829,6 @@ declare module "knex/types/tables" {
|
|||||||
TProjectTemplatesInsert,
|
TProjectTemplatesInsert,
|
||||||
TProjectTemplatesUpdate
|
TProjectTemplatesUpdate
|
||||||
>;
|
>;
|
||||||
|
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -64,23 +64,25 @@ export async function up(knex: Knex): Promise<void> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
const hasCaCertIdColumn = await knex.schema.hasColumn(TableName.Certificate, "caCertId");
|
||||||
t.uuid("caCertId").nullable();
|
if (!hasCaCertIdColumn) {
|
||||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||||
});
|
t.uuid("caCertId").nullable();
|
||||||
|
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||||
|
});
|
||||||
|
|
||||||
await knex.raw(`
|
await knex.raw(`
|
||||||
UPDATE "${TableName.Certificate}" cert
|
UPDATE "${TableName.Certificate}" cert
|
||||||
SET "caCertId" = (
|
SET "caCertId" = (
|
||||||
SELECT caCert.id
|
SELECT caCert.id
|
||||||
FROM "${TableName.CertificateAuthorityCert}" caCert
|
FROM "${TableName.CertificateAuthorityCert}" caCert
|
||||||
WHERE caCert."caId" = cert."caId"
|
WHERE caCert."caId" = cert."caId"
|
||||||
)
|
)`);
|
||||||
`);
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||||
t.uuid("caCertId").notNullable().alter();
|
t.uuid("caCertId").notNullable().alter();
|
||||||
});
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@ import { Knex } from "knex";
|
|||||||
|
|
||||||
import { TableName } from "../schemas";
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
const BATCH_SIZE = 30_000;
|
const BATCH_SIZE = 10_000;
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
export async function up(knex: Knex): Promise<void> {
|
||||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||||
@ -12,7 +12,18 @@ export async function up(knex: Knex): Promise<void> {
|
|||||||
t.string("authMethod").nullable();
|
t.string("authMethod").nullable();
|
||||||
});
|
});
|
||||||
|
|
||||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
// first we remove identities without auth method that is unused
|
||||||
|
// ! We delete all access tokens where the identity has no auth method set!
|
||||||
|
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||||
|
await knex(TableName.IdentityAccessToken)
|
||||||
|
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||||
|
.whereNull(`${TableName.Identity}.authMethod`)
|
||||||
|
.delete();
|
||||||
|
|
||||||
|
let nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||||
|
.whereNull("authMethod")
|
||||||
|
.limit(BATCH_SIZE)
|
||||||
|
.select("id");
|
||||||
let totalUpdated = 0;
|
let totalUpdated = 0;
|
||||||
|
|
||||||
do {
|
do {
|
||||||
@ -33,24 +44,15 @@ export async function up(knex: Knex): Promise<void> {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
// eslint-disable-next-line no-await-in-loop
|
||||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||||
|
.whereNull("authMethod")
|
||||||
|
.limit(BATCH_SIZE)
|
||||||
|
.select("id");
|
||||||
|
|
||||||
totalUpdated += batchIds.length;
|
totalUpdated += batchIds.length;
|
||||||
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
||||||
} while (nullableAccessTokens.length > 0);
|
} while (nullableAccessTokens.length > 0);
|
||||||
|
|
||||||
// ! We delete all access tokens where the identity has no auth method set!
|
|
||||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
|
||||||
await knex(TableName.IdentityAccessToken)
|
|
||||||
.whereNotExists((queryBuilder) => {
|
|
||||||
void queryBuilder
|
|
||||||
.select("id")
|
|
||||||
.from(TableName.Identity)
|
|
||||||
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
|
||||||
.whereNotNull("authMethod");
|
|
||||||
})
|
|
||||||
.delete();
|
|
||||||
|
|
||||||
// Finally we set the authMethod to notNullable after populating the column.
|
// Finally we set the authMethod to notNullable after populating the column.
|
||||||
// This will fail if the data is not populated correctly, so it's safe.
|
// This will fail if the data is not populated correctly, so it's safe.
|
||||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||||
|
@ -0,0 +1,21 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||||
|
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||||
|
t.dropForeign("orgId");
|
||||||
|
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||||
|
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||||
|
t.dropForeign("orgId");
|
||||||
|
t.foreign("orgId").references("id").inTable(TableName.Organization);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
54
backend/src/db/migrations/20241112082701_add-totp-support.ts
Normal file
54
backend/src/db/migrations/20241112082701_add-totp-support.ts
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (!(await knex.schema.hasTable(TableName.TotpConfig))) {
|
||||||
|
await knex.schema.createTable(TableName.TotpConfig, (t) => {
|
||||||
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
|
t.uuid("userId").notNullable();
|
||||||
|
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||||
|
t.boolean("isVerified").defaultTo(false).notNullable();
|
||||||
|
t.binary("encryptedRecoveryCodes").notNullable();
|
||||||
|
t.binary("encryptedSecret").notNullable();
|
||||||
|
t.timestamps(true, true, true);
|
||||||
|
t.unique("userId");
|
||||||
|
});
|
||||||
|
|
||||||
|
await createOnUpdateTrigger(knex, TableName.TotpConfig);
|
||||||
|
}
|
||||||
|
|
||||||
|
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
||||||
|
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||||
|
if (!doesOrgMfaMethodColExist) {
|
||||||
|
t.string("selectedMfaMethod");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
||||||
|
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||||
|
if (!doesUserSelectedMfaMethodColExist) {
|
||||||
|
t.string("selectedMfaMethod");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
await dropOnUpdateTrigger(knex, TableName.TotpConfig);
|
||||||
|
await knex.schema.dropTableIfExists(TableName.TotpConfig);
|
||||||
|
|
||||||
|
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
||||||
|
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||||
|
if (doesOrgMfaMethodColExist) {
|
||||||
|
t.dropColumn("selectedMfaMethod");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
||||||
|
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||||
|
if (doesUserSelectedMfaMethodColExist) {
|
||||||
|
t.dropColumn("selectedMfaMethod");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
@ -0,0 +1,20 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||||
|
await knex(TableName.IdentityMetadata).whereNull("value").delete();
|
||||||
|
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||||
|
t.string("value", 1020).notNullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||||
|
t.string("value", 1020).alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -106,6 +106,7 @@ export * from "./secrets-v2";
|
|||||||
export * from "./service-tokens";
|
export * from "./service-tokens";
|
||||||
export * from "./slack-integrations";
|
export * from "./slack-integrations";
|
||||||
export * from "./super-admin";
|
export * from "./super-admin";
|
||||||
|
export * from "./totp-configs";
|
||||||
export * from "./trusted-ips";
|
export * from "./trusted-ips";
|
||||||
export * from "./user-actions";
|
export * from "./user-actions";
|
||||||
export * from "./user-aliases";
|
export * from "./user-aliases";
|
||||||
|
@ -117,6 +117,7 @@ export enum TableName {
|
|||||||
ExternalKms = "external_kms",
|
ExternalKms = "external_kms",
|
||||||
InternalKms = "internal_kms",
|
InternalKms = "internal_kms",
|
||||||
InternalKmsKeyVersion = "internal_kms_key_version",
|
InternalKmsKeyVersion = "internal_kms_key_version",
|
||||||
|
TotpConfig = "totp_configs",
|
||||||
// @depreciated
|
// @depreciated
|
||||||
KmsKeyVersion = "kms_key_versions",
|
KmsKeyVersion = "kms_key_versions",
|
||||||
WorkflowIntegrations = "workflow_integrations",
|
WorkflowIntegrations = "workflow_integrations",
|
||||||
|
@ -21,7 +21,8 @@ export const OrganizationsSchema = z.object({
|
|||||||
kmsDefaultKeyId: z.string().uuid().nullable().optional(),
|
kmsDefaultKeyId: z.string().uuid().nullable().optional(),
|
||||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||||
defaultMembershipRole: z.string().default("member"),
|
defaultMembershipRole: z.string().default("member"),
|
||||||
enforceMfa: z.boolean().default(false)
|
enforceMfa: z.boolean().default(false),
|
||||||
|
selectedMfaMethod: z.string().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||||
|
24
backend/src/db/schemas/totp-configs.ts
Normal file
24
backend/src/db/schemas/totp-configs.ts
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
// Code generated by automation script, DO NOT EDIT.
|
||||||
|
// Automated by pulling database and generating zod schema
|
||||||
|
// To update. Just run npm run generate:schema
|
||||||
|
// Written by akhilmhdh.
|
||||||
|
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
|
export const TotpConfigsSchema = z.object({
|
||||||
|
id: z.string().uuid(),
|
||||||
|
userId: z.string().uuid(),
|
||||||
|
isVerified: z.boolean().default(false),
|
||||||
|
encryptedRecoveryCodes: zodBuffer,
|
||||||
|
encryptedSecret: zodBuffer,
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date()
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TTotpConfigs = z.infer<typeof TotpConfigsSchema>;
|
||||||
|
export type TTotpConfigsInsert = Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>;
|
||||||
|
export type TTotpConfigsUpdate = Partial<Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>>;
|
@ -26,7 +26,8 @@ export const UsersSchema = z.object({
|
|||||||
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
|
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
|
||||||
isLocked: z.boolean().default(false).nullable().optional(),
|
isLocked: z.boolean().default(false).nullable().optional(),
|
||||||
temporaryLockDateEnd: z.date().nullable().optional(),
|
temporaryLockDateEnd: z.date().nullable().optional(),
|
||||||
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
|
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional(),
|
||||||
|
selectedMfaMethod: z.string().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TUsers = z.infer<typeof UsersSchema>;
|
export type TUsers = z.infer<typeof UsersSchema>;
|
||||||
|
@ -2,6 +2,9 @@ import { Knex } from "knex";
|
|||||||
|
|
||||||
import { TableName } from "./schemas";
|
import { TableName } from "./schemas";
|
||||||
|
|
||||||
|
interface PgTriggerResult {
|
||||||
|
rows: Array<{ exists: boolean }>;
|
||||||
|
}
|
||||||
export const createJunctionTable = (knex: Knex, tableName: TableName, table1Name: TableName, table2Name: TableName) =>
|
export const createJunctionTable = (knex: Knex, tableName: TableName, table1Name: TableName, table2Name: TableName) =>
|
||||||
knex.schema.createTable(tableName, (table) => {
|
knex.schema.createTable(tableName, (table) => {
|
||||||
table.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
table.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
@ -28,13 +31,26 @@ DROP FUNCTION IF EXISTS on_update_timestamp() CASCADE;
|
|||||||
|
|
||||||
// we would be using this to apply updatedAt where ever we wanta
|
// we would be using this to apply updatedAt where ever we wanta
|
||||||
// remember to set `timestamps(true,true,true)` before this on schema
|
// remember to set `timestamps(true,true,true)` before this on schema
|
||||||
export const createOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
export const createOnUpdateTrigger = async (knex: Knex, tableName: string) => {
|
||||||
knex.raw(`
|
const triggerExists = await knex.raw<PgTriggerResult>(`
|
||||||
CREATE TRIGGER "${tableName}_updatedAt"
|
SELECT EXISTS (
|
||||||
BEFORE UPDATE ON ${tableName}
|
SELECT 1
|
||||||
FOR EACH ROW
|
FROM pg_trigger
|
||||||
EXECUTE PROCEDURE on_update_timestamp();
|
WHERE tgname = '${tableName}_updatedAt'
|
||||||
`);
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
|
if (!triggerExists?.rows?.[0]?.exists) {
|
||||||
|
return knex.raw(`
|
||||||
|
CREATE TRIGGER "${tableName}_updatedAt"
|
||||||
|
BEFORE UPDATE ON ${tableName}
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE PROCEDURE on_update_timestamp();
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
export const dropOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
export const dropOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
||||||
knex.raw(`DROP TRIGGER IF EXISTS "${tableName}_updatedAt" ON ${tableName}`);
|
knex.raw(`DROP TRIGGER IF EXISTS "${tableName}_updatedAt" ON ${tableName}`);
|
||||||
|
@ -122,6 +122,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
|||||||
},
|
},
|
||||||
`email: ${email} firstName: ${profile.firstName as string}`
|
`email: ${email} firstName: ${profile.firstName as string}`
|
||||||
);
|
);
|
||||||
|
|
||||||
|
throw new Error("Invalid saml request. Missing email or first name");
|
||||||
}
|
}
|
||||||
|
|
||||||
const userMetadata = Object.keys(profile.attributes || {})
|
const userMetadata = Object.keys(profile.attributes || {})
|
||||||
|
@ -13,6 +13,7 @@ import { RabbitMqProvider } from "./rabbit-mq";
|
|||||||
import { RedisDatabaseProvider } from "./redis";
|
import { RedisDatabaseProvider } from "./redis";
|
||||||
import { SapHanaProvider } from "./sap-hana";
|
import { SapHanaProvider } from "./sap-hana";
|
||||||
import { SqlDatabaseProvider } from "./sql-database";
|
import { SqlDatabaseProvider } from "./sql-database";
|
||||||
|
import { TotpProvider } from "./totp";
|
||||||
|
|
||||||
export const buildDynamicSecretProviders = () => ({
|
export const buildDynamicSecretProviders = () => ({
|
||||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||||
@ -27,5 +28,6 @@ export const buildDynamicSecretProviders = () => ({
|
|||||||
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
||||||
[DynamicSecretProviders.Ldap]: LdapProvider(),
|
[DynamicSecretProviders.Ldap]: LdapProvider(),
|
||||||
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
|
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
|
||||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider()
|
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
|
||||||
|
[DynamicSecretProviders.Totp]: TotpProvider()
|
||||||
});
|
});
|
||||||
|
@ -17,6 +17,17 @@ export enum LdapCredentialType {
|
|||||||
Static = "static"
|
Static = "static"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum TotpConfigType {
|
||||||
|
URL = "url",
|
||||||
|
MANUAL = "manual"
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum TotpAlgorithm {
|
||||||
|
SHA1 = "sha1",
|
||||||
|
SHA256 = "sha256",
|
||||||
|
SHA512 = "sha512"
|
||||||
|
}
|
||||||
|
|
||||||
export const DynamicSecretRedisDBSchema = z.object({
|
export const DynamicSecretRedisDBSchema = z.object({
|
||||||
host: z.string().trim().toLowerCase(),
|
host: z.string().trim().toLowerCase(),
|
||||||
port: z.number(),
|
port: z.number(),
|
||||||
@ -221,6 +232,34 @@ export const LdapSchema = z.union([
|
|||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
|
||||||
|
z.object({
|
||||||
|
configType: z.literal(TotpConfigType.URL),
|
||||||
|
url: z
|
||||||
|
.string()
|
||||||
|
.url()
|
||||||
|
.trim()
|
||||||
|
.min(1)
|
||||||
|
.refine((val) => {
|
||||||
|
const urlObj = new URL(val);
|
||||||
|
const secret = urlObj.searchParams.get("secret");
|
||||||
|
|
||||||
|
return Boolean(secret);
|
||||||
|
}, "OTP URL must contain secret field")
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
configType: z.literal(TotpConfigType.MANUAL),
|
||||||
|
secret: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1)
|
||||||
|
.transform((val) => val.replace(/\s+/g, "")),
|
||||||
|
period: z.number().optional(),
|
||||||
|
algorithm: z.nativeEnum(TotpAlgorithm).optional(),
|
||||||
|
digits: z.number().optional()
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
export enum DynamicSecretProviders {
|
export enum DynamicSecretProviders {
|
||||||
SqlDatabase = "sql-database",
|
SqlDatabase = "sql-database",
|
||||||
Cassandra = "cassandra",
|
Cassandra = "cassandra",
|
||||||
@ -234,7 +273,8 @@ export enum DynamicSecretProviders {
|
|||||||
AzureEntraID = "azure-entra-id",
|
AzureEntraID = "azure-entra-id",
|
||||||
Ldap = "ldap",
|
Ldap = "ldap",
|
||||||
SapHana = "sap-hana",
|
SapHana = "sap-hana",
|
||||||
Snowflake = "snowflake"
|
Snowflake = "snowflake",
|
||||||
|
Totp = "totp"
|
||||||
}
|
}
|
||||||
|
|
||||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||||
@ -250,7 +290,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
|||||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema })
|
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
|
||||||
|
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema })
|
||||||
]);
|
]);
|
||||||
|
|
||||||
export type TDynamicProviderFns = {
|
export type TDynamicProviderFns = {
|
||||||
|
92
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
92
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
import { authenticator } from "otplib";
|
||||||
|
import { HashAlgorithms } from "otplib/core";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
|
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
|
||||||
|
|
||||||
|
export const TotpProvider = (): TDynamicProviderFns => {
|
||||||
|
const validateProviderInputs = async (inputs: unknown) => {
|
||||||
|
const providerInputs = await DynamicSecretTotpSchema.parseAsync(inputs);
|
||||||
|
|
||||||
|
return providerInputs;
|
||||||
|
};
|
||||||
|
|
||||||
|
const validateConnection = async () => {
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
const create = async (inputs: unknown) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
|
const entityId = alphaNumericNanoId(32);
|
||||||
|
const authenticatorInstance = authenticator.clone();
|
||||||
|
|
||||||
|
let secret: string;
|
||||||
|
let period: number | null | undefined;
|
||||||
|
let digits: number | null | undefined;
|
||||||
|
let algorithm: HashAlgorithms | null | undefined;
|
||||||
|
|
||||||
|
if (providerInputs.configType === TotpConfigType.URL) {
|
||||||
|
const urlObj = new URL(providerInputs.url);
|
||||||
|
secret = urlObj.searchParams.get("secret") as string;
|
||||||
|
const periodFromUrl = urlObj.searchParams.get("period");
|
||||||
|
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||||
|
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||||
|
|
||||||
|
if (periodFromUrl) {
|
||||||
|
period = +periodFromUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (digitsFromUrl) {
|
||||||
|
digits = +digitsFromUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (algorithmFromUrl) {
|
||||||
|
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
secret = providerInputs.secret;
|
||||||
|
period = providerInputs.period;
|
||||||
|
digits = providerInputs.digits;
|
||||||
|
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (digits) {
|
||||||
|
authenticatorInstance.options = { digits };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (algorithm) {
|
||||||
|
authenticatorInstance.options = { algorithm };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (period) {
|
||||||
|
authenticatorInstance.options = { step: period };
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
entityId,
|
||||||
|
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||||
|
return { entityId };
|
||||||
|
};
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||||
|
const renew = async (_inputs: unknown, _entityId: string) => {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Lease renewal is not supported for TOTPs"
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
validateProviderInputs,
|
||||||
|
validateConnection,
|
||||||
|
create,
|
||||||
|
revoke,
|
||||||
|
renew
|
||||||
|
};
|
||||||
|
};
|
@ -17,7 +17,7 @@ import {
|
|||||||
infisicalSymmetricDecrypt,
|
infisicalSymmetricDecrypt,
|
||||||
infisicalSymmetricEncypt
|
infisicalSymmetricEncypt
|
||||||
} from "@app/lib/crypto/encryption";
|
} from "@app/lib/crypto/encryption";
|
||||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
|
||||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||||
@ -56,7 +56,7 @@ type TOidcConfigServiceFactoryDep = {
|
|||||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||||
smtpService: Pick<TSmtpService, "sendMail">;
|
smtpService: Pick<TSmtpService, "sendMail" | "verify">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
||||||
};
|
};
|
||||||
@ -223,6 +223,7 @@ export const oidcConfigServiceFactory = ({
|
|||||||
let newUser: TUsers | undefined;
|
let newUser: TUsers | undefined;
|
||||||
|
|
||||||
if (serverCfg.trustOidcEmails) {
|
if (serverCfg.trustOidcEmails) {
|
||||||
|
// we prioritize getting the most complete user to create the new alias under
|
||||||
newUser = await userDAL.findOne(
|
newUser = await userDAL.findOne(
|
||||||
{
|
{
|
||||||
email,
|
email,
|
||||||
@ -230,6 +231,23 @@ export const oidcConfigServiceFactory = ({
|
|||||||
},
|
},
|
||||||
tx
|
tx
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!newUser) {
|
||||||
|
// this fetches user entries created via invites
|
||||||
|
newUser = await userDAL.findOne(
|
||||||
|
{
|
||||||
|
username: email
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
|
|
||||||
|
if (newUser && !newUser.isEmailVerified) {
|
||||||
|
// we automatically mark it as email-verified because we've configured trust for OIDC emails
|
||||||
|
newUser = await userDAL.updateById(newUser.id, {
|
||||||
|
isEmailVerified: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!newUser) {
|
if (!newUser) {
|
||||||
@ -332,14 +350,20 @@ export const oidcConfigServiceFactory = ({
|
|||||||
userId: user.id
|
userId: user.id
|
||||||
});
|
});
|
||||||
|
|
||||||
await smtpService.sendMail({
|
await smtpService
|
||||||
template: SmtpTemplates.EmailVerification,
|
.sendMail({
|
||||||
subjectLine: "Infisical confirmation code",
|
template: SmtpTemplates.EmailVerification,
|
||||||
recipients: [user.email],
|
subjectLine: "Infisical confirmation code",
|
||||||
substitutions: {
|
recipients: [user.email],
|
||||||
code: token
|
substitutions: {
|
||||||
}
|
code: token
|
||||||
});
|
}
|
||||||
|
})
|
||||||
|
.catch((err: Error) => {
|
||||||
|
throw new OidcAuthError({
|
||||||
|
message: `Error sending email confirmation code for user registration - contact the Infisical instance admin. ${err.message}`
|
||||||
|
});
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return { isUserCompleted, providerAuthToken };
|
return { isUserCompleted, providerAuthToken };
|
||||||
@ -395,6 +419,18 @@ export const oidcConfigServiceFactory = ({
|
|||||||
message: `Organization bot for organization with ID '${org.id}' not found`,
|
message: `Organization bot for organization with ID '${org.id}' not found`,
|
||||||
name: "OrgBotNotFound"
|
name: "OrgBotNotFound"
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const serverCfg = await getServerCfg();
|
||||||
|
if (isActive && !serverCfg.trustOidcEmails) {
|
||||||
|
const isSmtpConnected = await smtpService.verify();
|
||||||
|
if (!isSmtpConnected) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message:
|
||||||
|
"Cannot enable OIDC when there are issues with the instance's SMTP configuration. Bypass this by turning on trust for OIDC emails in the server admin console."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
const key = infisicalSymmetricDecrypt({
|
||||||
ciphertext: orgBot.encryptedSymmetricKey,
|
ciphertext: orgBot.encryptedSymmetricKey,
|
||||||
iv: orgBot.symmetricKeyIV,
|
iv: orgBot.symmetricKeyIV,
|
||||||
|
@ -127,14 +127,15 @@ export const permissionDALFactory = (db: TDbClient) => {
|
|||||||
|
|
||||||
const getProjectPermission = async (userId: string, projectId: string) => {
|
const getProjectPermission = async (userId: string, projectId: string) => {
|
||||||
try {
|
try {
|
||||||
|
const subQueryUserGroups = db(TableName.UserGroupMembership).where("userId", userId).select("groupId");
|
||||||
const docs = await db
|
const docs = await db
|
||||||
.replicaNode()(TableName.Users)
|
.replicaNode()(TableName.Users)
|
||||||
.where(`${TableName.Users}.id`, userId)
|
.where(`${TableName.Users}.id`, userId)
|
||||||
.leftJoin(TableName.UserGroupMembership, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
|
||||||
.leftJoin(TableName.GroupProjectMembership, (queryBuilder) => {
|
.leftJoin(TableName.GroupProjectMembership, (queryBuilder) => {
|
||||||
void queryBuilder
|
void queryBuilder
|
||||||
.on(`${TableName.GroupProjectMembership}.projectId`, db.raw("?", [projectId]))
|
.on(`${TableName.GroupProjectMembership}.projectId`, db.raw("?", [projectId]))
|
||||||
.andOn(`${TableName.GroupProjectMembership}.groupId`, `${TableName.UserGroupMembership}.groupId`);
|
// @ts-expect-error akhilmhdh: this is valid knexjs query. Its just ts type argument is missing it
|
||||||
|
.andOnIn(`${TableName.GroupProjectMembership}.groupId`, subQueryUserGroups);
|
||||||
})
|
})
|
||||||
.leftJoin(
|
.leftJoin(
|
||||||
TableName.GroupProjectMembershipRole,
|
TableName.GroupProjectMembershipRole,
|
||||||
|
@ -29,4 +29,18 @@ function validateOrgSSO(actorAuthMethod: ActorAuthMethod, isOrgSsoEnforced: TOrg
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export { isAuthMethodSaml, validateOrgSSO };
|
const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
|
||||||
|
const handler = {
|
||||||
|
get(target: Record<string, string>, prop: string) {
|
||||||
|
if (!(prop in target)) {
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
target[prop] = `{{identity.metadata.${prop}}}`; // Add missing key as an "own" property
|
||||||
|
}
|
||||||
|
return target[prop];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return new Proxy(obj, handler);
|
||||||
|
};
|
||||||
|
|
||||||
|
export { escapeHandlebarsMissingMetadata, isAuthMethodSaml, validateOrgSSO };
|
||||||
|
@ -21,7 +21,7 @@ import { TServiceTokenDALFactory } from "@app/services/service-token/service-tok
|
|||||||
|
|
||||||
import { orgAdminPermissions, orgMemberPermissions, orgNoAccessPermissions, OrgPermissionSet } from "./org-permission";
|
import { orgAdminPermissions, orgMemberPermissions, orgNoAccessPermissions, OrgPermissionSet } from "./org-permission";
|
||||||
import { TPermissionDALFactory } from "./permission-dal";
|
import { TPermissionDALFactory } from "./permission-dal";
|
||||||
import { validateOrgSSO } from "./permission-fns";
|
import { escapeHandlebarsMissingMetadata, validateOrgSSO } from "./permission-fns";
|
||||||
import { TBuildOrgPermissionDTO, TBuildProjectPermissionDTO } from "./permission-service-types";
|
import { TBuildOrgPermissionDTO, TBuildProjectPermissionDTO } from "./permission-service-types";
|
||||||
import {
|
import {
|
||||||
buildServiceTokenProjectPermission,
|
buildServiceTokenProjectPermission,
|
||||||
@ -227,11 +227,13 @@ export const permissionServiceFactory = ({
|
|||||||
})) || [];
|
})) || [];
|
||||||
|
|
||||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false, strict: true });
|
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||||
const metadataKeyValuePair = objectify(
|
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||||
userProjectPermission.metadata,
|
objectify(
|
||||||
(i) => i.key,
|
userProjectPermission.metadata,
|
||||||
(i) => i.value
|
(i) => i.key,
|
||||||
|
(i) => i.value
|
||||||
|
)
|
||||||
);
|
);
|
||||||
const interpolateRules = templatedRules(
|
const interpolateRules = templatedRules(
|
||||||
{
|
{
|
||||||
@ -292,12 +294,15 @@ export const permissionServiceFactory = ({
|
|||||||
})) || [];
|
})) || [];
|
||||||
|
|
||||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false, strict: true });
|
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||||
const metadataKeyValuePair = objectify(
|
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||||
identityProjectPermission.metadata,
|
objectify(
|
||||||
(i) => i.key,
|
identityProjectPermission.metadata,
|
||||||
(i) => i.value
|
(i) => i.key,
|
||||||
|
(i) => i.value
|
||||||
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
const interpolateRules = templatedRules(
|
const interpolateRules = templatedRules(
|
||||||
{
|
{
|
||||||
identity: {
|
identity: {
|
||||||
|
@ -1,14 +1,7 @@
|
|||||||
import picomatch from "picomatch";
|
import picomatch from "picomatch";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
export enum PermissionConditionOperators {
|
import { PermissionConditionOperators } from "@app/lib/casl";
|
||||||
$IN = "$in",
|
|
||||||
$ALL = "$all",
|
|
||||||
$REGEX = "$regex",
|
|
||||||
$EQ = "$eq",
|
|
||||||
$NEQ = "$ne",
|
|
||||||
$GLOB = "$glob"
|
|
||||||
}
|
|
||||||
|
|
||||||
export const PermissionConditionSchema = {
|
export const PermissionConditionSchema = {
|
||||||
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
|
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
import { AbilityBuilder, createMongoAbility, ForcedSubject, MongoAbility } from "@casl/ability";
|
import { AbilityBuilder, createMongoAbility, ForcedSubject, MongoAbility } from "@casl/ability";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { conditionsMatcher } from "@app/lib/casl";
|
import { conditionsMatcher, PermissionConditionOperators } from "@app/lib/casl";
|
||||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||||
|
|
||||||
import { PermissionConditionOperators, PermissionConditionSchema } from "./permission-types";
|
import { PermissionConditionSchema } from "./permission-types";
|
||||||
|
|
||||||
export enum ProjectPermissionActions {
|
export enum ProjectPermissionActions {
|
||||||
Read = "read",
|
Read = "read",
|
||||||
|
@ -54,3 +54,12 @@ export const isAtLeastAsPrivileged = (permissions1: MongoAbility, permissions2:
|
|||||||
|
|
||||||
return set1.size >= set2.size;
|
return set1.size >= set2.size;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export enum PermissionConditionOperators {
|
||||||
|
$IN = "$in",
|
||||||
|
$ALL = "$all",
|
||||||
|
$REGEX = "$regex",
|
||||||
|
$EQ = "$eq",
|
||||||
|
$NEQ = "$ne",
|
||||||
|
$GLOB = "$glob"
|
||||||
|
}
|
||||||
|
@ -157,6 +157,15 @@ const envSchema = z
|
|||||||
INFISICAL_CLOUD: zodStrBool.default("false"),
|
INFISICAL_CLOUD: zodStrBool.default("false"),
|
||||||
MAINTENANCE_MODE: zodStrBool.default("false"),
|
MAINTENANCE_MODE: zodStrBool.default("false"),
|
||||||
CAPTCHA_SECRET: zpStr(z.string().optional()),
|
CAPTCHA_SECRET: zpStr(z.string().optional()),
|
||||||
|
|
||||||
|
// TELEMETRY
|
||||||
|
OTEL_TELEMETRY_COLLECTION_ENABLED: zodStrBool.default("false"),
|
||||||
|
OTEL_EXPORT_OTLP_ENDPOINT: zpStr(z.string().optional()),
|
||||||
|
OTEL_OTLP_PUSH_INTERVAL: z.coerce.number().default(30000),
|
||||||
|
OTEL_COLLECTOR_BASIC_AUTH_USERNAME: zpStr(z.string().optional()),
|
||||||
|
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD: zpStr(z.string().optional()),
|
||||||
|
OTEL_EXPORT_TYPE: z.enum(["prometheus", "otlp"]).optional(),
|
||||||
|
|
||||||
PLAIN_API_KEY: zpStr(z.string().optional()),
|
PLAIN_API_KEY: zpStr(z.string().optional()),
|
||||||
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()),
|
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()),
|
||||||
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
|
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
|
||||||
@ -203,11 +212,11 @@ let envCfg: Readonly<z.infer<typeof envSchema>>;
|
|||||||
|
|
||||||
export const getConfig = () => envCfg;
|
export const getConfig = () => envCfg;
|
||||||
// cannot import singleton logger directly as it needs config to load various transport
|
// cannot import singleton logger directly as it needs config to load various transport
|
||||||
export const initEnvConfig = (logger: Logger) => {
|
export const initEnvConfig = (logger?: Logger) => {
|
||||||
const parsedEnv = envSchema.safeParse(process.env);
|
const parsedEnv = envSchema.safeParse(process.env);
|
||||||
if (!parsedEnv.success) {
|
if (!parsedEnv.success) {
|
||||||
logger.error("Invalid environment variables. Check the error below");
|
(logger ?? console).error("Invalid environment variables. Check the error below");
|
||||||
logger.error(parsedEnv.error.issues);
|
(logger ?? console).error(parsedEnv.error.issues);
|
||||||
process.exit(-1);
|
process.exit(-1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -133,3 +133,15 @@ export class ScimRequestError extends Error {
|
|||||||
this.status = status;
|
this.status = status;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export class OidcAuthError extends Error {
|
||||||
|
name: string;
|
||||||
|
|
||||||
|
error: unknown;
|
||||||
|
|
||||||
|
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) {
|
||||||
|
super(message || "Something went wrong");
|
||||||
|
this.name = name || "OidcAuthError";
|
||||||
|
this.error = error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
91
backend/src/lib/telemetry/instrumentation.ts
Normal file
91
backend/src/lib/telemetry/instrumentation.ts
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
import opentelemetry, { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
|
||||||
|
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
|
||||||
|
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
|
||||||
|
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
|
||||||
|
import { registerInstrumentations } from "@opentelemetry/instrumentation";
|
||||||
|
import { Resource } from "@opentelemetry/resources";
|
||||||
|
import { AggregationTemporality, MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
|
||||||
|
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
|
||||||
|
import dotenv from "dotenv";
|
||||||
|
|
||||||
|
import { initEnvConfig } from "../config/env";
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
|
const initTelemetryInstrumentation = ({
|
||||||
|
exportType,
|
||||||
|
otlpURL,
|
||||||
|
otlpUser,
|
||||||
|
otlpPassword,
|
||||||
|
otlpPushInterval
|
||||||
|
}: {
|
||||||
|
exportType?: string;
|
||||||
|
otlpURL?: string;
|
||||||
|
otlpUser?: string;
|
||||||
|
otlpPassword?: string;
|
||||||
|
otlpPushInterval?: number;
|
||||||
|
}) => {
|
||||||
|
diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG);
|
||||||
|
|
||||||
|
const resource = Resource.default().merge(
|
||||||
|
new Resource({
|
||||||
|
[ATTR_SERVICE_NAME]: "infisical-core",
|
||||||
|
[ATTR_SERVICE_VERSION]: "0.1.0"
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
const metricReaders = [];
|
||||||
|
switch (exportType) {
|
||||||
|
case "prometheus": {
|
||||||
|
const promExporter = new PrometheusExporter();
|
||||||
|
metricReaders.push(promExporter);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case "otlp": {
|
||||||
|
const otlpExporter = new OTLPMetricExporter({
|
||||||
|
url: `${otlpURL}/v1/metrics`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Basic ${btoa(`${otlpUser}:${otlpPassword}`)}`
|
||||||
|
},
|
||||||
|
temporalityPreference: AggregationTemporality.DELTA
|
||||||
|
});
|
||||||
|
metricReaders.push(
|
||||||
|
new PeriodicExportingMetricReader({
|
||||||
|
exporter: otlpExporter,
|
||||||
|
exportIntervalMillis: otlpPushInterval
|
||||||
|
})
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
throw new Error("Invalid OTEL export type");
|
||||||
|
}
|
||||||
|
|
||||||
|
const meterProvider = new MeterProvider({
|
||||||
|
resource,
|
||||||
|
readers: metricReaders
|
||||||
|
});
|
||||||
|
|
||||||
|
opentelemetry.metrics.setGlobalMeterProvider(meterProvider);
|
||||||
|
|
||||||
|
registerInstrumentations({
|
||||||
|
instrumentations: [getNodeAutoInstrumentations()]
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const setupTelemetry = () => {
|
||||||
|
const appCfg = initEnvConfig();
|
||||||
|
|
||||||
|
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||||
|
console.log("Initializing telemetry instrumentation");
|
||||||
|
initTelemetryInstrumentation({
|
||||||
|
otlpURL: appCfg.OTEL_EXPORT_OTLP_ENDPOINT,
|
||||||
|
otlpUser: appCfg.OTEL_COLLECTOR_BASIC_AUTH_USERNAME,
|
||||||
|
otlpPassword: appCfg.OTEL_COLLECTOR_BASIC_AUTH_PASSWORD,
|
||||||
|
otlpPushInterval: appCfg.OTEL_OTLP_PUSH_INTERVAL,
|
||||||
|
exportType: appCfg.OTEL_EXPORT_TYPE
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
void setupTelemetry();
|
@ -1,3 +1,5 @@
|
|||||||
|
import "./lib/telemetry/instrumentation";
|
||||||
|
|
||||||
import dotenv from "dotenv";
|
import dotenv from "dotenv";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
|
||||||
@ -18,6 +20,7 @@ dotenv.config();
|
|||||||
const run = async () => {
|
const run = async () => {
|
||||||
const logger = await initLogger();
|
const logger = await initLogger();
|
||||||
const appCfg = initEnvConfig(logger);
|
const appCfg = initEnvConfig(logger);
|
||||||
|
|
||||||
const db = initDbConnection({
|
const db = initDbConnection({
|
||||||
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
||||||
dbRootCert: appCfg.DB_ROOT_CERT,
|
dbRootCert: appCfg.DB_ROOT_CERT,
|
||||||
|
@ -22,6 +22,7 @@ import { TSmtpService } from "@app/services/smtp/smtp-service";
|
|||||||
|
|
||||||
import { globalRateLimiterCfg } from "./config/rateLimiter";
|
import { globalRateLimiterCfg } from "./config/rateLimiter";
|
||||||
import { addErrorsToResponseSchemas } from "./plugins/add-errors-to-response-schemas";
|
import { addErrorsToResponseSchemas } from "./plugins/add-errors-to-response-schemas";
|
||||||
|
import { apiMetrics } from "./plugins/api-metrics";
|
||||||
import { fastifyErrHandler } from "./plugins/error-handler";
|
import { fastifyErrHandler } from "./plugins/error-handler";
|
||||||
import { registerExternalNextjs } from "./plugins/external-nextjs";
|
import { registerExternalNextjs } from "./plugins/external-nextjs";
|
||||||
import { serializerCompiler, validatorCompiler, ZodTypeProvider } from "./plugins/fastify-zod";
|
import { serializerCompiler, validatorCompiler, ZodTypeProvider } from "./plugins/fastify-zod";
|
||||||
@ -86,6 +87,10 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
|||||||
// pull ip based on various proxy headers
|
// pull ip based on various proxy headers
|
||||||
await server.register(fastifyIp);
|
await server.register(fastifyIp);
|
||||||
|
|
||||||
|
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||||
|
await server.register(apiMetrics);
|
||||||
|
}
|
||||||
|
|
||||||
await server.register(fastifySwagger);
|
await server.register(fastifySwagger);
|
||||||
await server.register(fastifyFormBody);
|
await server.register(fastifyFormBody);
|
||||||
await server.register(fastifyErrHandler);
|
await server.register(fastifyErrHandler);
|
||||||
|
@ -46,10 +46,10 @@ export const bootstrapCheck = async ({ db }: BootstrapOpt) => {
|
|||||||
await createTransport(smtpCfg)
|
await createTransport(smtpCfg)
|
||||||
.verify()
|
.verify()
|
||||||
.then(async () => {
|
.then(async () => {
|
||||||
console.info("SMTP successfully connected");
|
console.info(`SMTP - Verified connection to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT}`);
|
||||||
})
|
})
|
||||||
.catch((err) => {
|
.catch((err: Error) => {
|
||||||
console.error(`SMTP - Failed to connect to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT}`);
|
console.error(`SMTP - Failed to connect to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT} - ${err.message}`);
|
||||||
logger.error(err);
|
logger.error(err);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
21
backend/src/server/plugins/api-metrics.ts
Normal file
21
backend/src/server/plugins/api-metrics.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import opentelemetry from "@opentelemetry/api";
|
||||||
|
import fp from "fastify-plugin";
|
||||||
|
|
||||||
|
export const apiMetrics = fp(async (fastify) => {
|
||||||
|
const apiMeter = opentelemetry.metrics.getMeter("API");
|
||||||
|
const latencyHistogram = apiMeter.createHistogram("API_latency", {
|
||||||
|
unit: "ms"
|
||||||
|
});
|
||||||
|
|
||||||
|
fastify.addHook("onResponse", async (request, reply) => {
|
||||||
|
const { method } = request;
|
||||||
|
const route = request.routerPath;
|
||||||
|
const { statusCode } = reply;
|
||||||
|
|
||||||
|
latencyHistogram.record(reply.elapsedTime, {
|
||||||
|
route,
|
||||||
|
method,
|
||||||
|
statusCode
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -1,4 +1,4 @@
|
|||||||
import { ForbiddenError } from "@casl/ability";
|
import { ForbiddenError, PureAbility } from "@casl/ability";
|
||||||
import fastifyPlugin from "fastify-plugin";
|
import fastifyPlugin from "fastify-plugin";
|
||||||
import jwt from "jsonwebtoken";
|
import jwt from "jsonwebtoken";
|
||||||
import { ZodError } from "zod";
|
import { ZodError } from "zod";
|
||||||
@ -10,6 +10,7 @@ import {
|
|||||||
GatewayTimeoutError,
|
GatewayTimeoutError,
|
||||||
InternalServerError,
|
InternalServerError,
|
||||||
NotFoundError,
|
NotFoundError,
|
||||||
|
OidcAuthError,
|
||||||
RateLimitError,
|
RateLimitError,
|
||||||
ScimRequestError,
|
ScimRequestError,
|
||||||
UnauthorizedError
|
UnauthorizedError
|
||||||
@ -63,7 +64,13 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
|||||||
void res.status(HttpStatusCodes.Forbidden).send({
|
void res.status(HttpStatusCodes.Forbidden).send({
|
||||||
statusCode: HttpStatusCodes.Forbidden,
|
statusCode: HttpStatusCodes.Forbidden,
|
||||||
error: "PermissionDenied",
|
error: "PermissionDenied",
|
||||||
message: `You are not allowed to ${error.action} on ${error.subjectType} - ${JSON.stringify(error.subject)}`
|
message: `You are not allowed to ${error.action} on ${error.subjectType}`,
|
||||||
|
details: (error.ability as PureAbility).rulesFor(error.action as string, error.subjectType).map((el) => ({
|
||||||
|
action: el.action,
|
||||||
|
inverted: el.inverted,
|
||||||
|
subject: el.subject,
|
||||||
|
conditions: el.conditions
|
||||||
|
}))
|
||||||
});
|
});
|
||||||
} else if (error instanceof ForbiddenRequestError) {
|
} else if (error instanceof ForbiddenRequestError) {
|
||||||
void res.status(HttpStatusCodes.Forbidden).send({
|
void res.status(HttpStatusCodes.Forbidden).send({
|
||||||
@ -83,7 +90,10 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
|||||||
status: error.status,
|
status: error.status,
|
||||||
detail: error.detail
|
detail: error.detail
|
||||||
});
|
});
|
||||||
// Handle JWT errors and make them more human-readable for the end-user.
|
} else if (error instanceof OidcAuthError) {
|
||||||
|
void res
|
||||||
|
.status(HttpStatusCodes.InternalServerError)
|
||||||
|
.send({ statusCode: HttpStatusCodes.InternalServerError, message: error.message, error: error.name });
|
||||||
} else if (error instanceof jwt.JsonWebTokenError) {
|
} else if (error instanceof jwt.JsonWebTokenError) {
|
||||||
const message = (() => {
|
const message = (() => {
|
||||||
if (error.message === JWTErrors.JwtExpired) {
|
if (error.message === JWTErrors.JwtExpired) {
|
||||||
|
@ -201,6 +201,8 @@ import { getServerCfg, superAdminServiceFactory } from "@app/services/super-admi
|
|||||||
import { telemetryDALFactory } from "@app/services/telemetry/telemetry-dal";
|
import { telemetryDALFactory } from "@app/services/telemetry/telemetry-dal";
|
||||||
import { telemetryQueueServiceFactory } from "@app/services/telemetry/telemetry-queue";
|
import { telemetryQueueServiceFactory } from "@app/services/telemetry/telemetry-queue";
|
||||||
import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||||
|
import { totpConfigDALFactory } from "@app/services/totp/totp-config-dal";
|
||||||
|
import { totpServiceFactory } from "@app/services/totp/totp-service";
|
||||||
import { userDALFactory } from "@app/services/user/user-dal";
|
import { userDALFactory } from "@app/services/user/user-dal";
|
||||||
import { userServiceFactory } from "@app/services/user/user-service";
|
import { userServiceFactory } from "@app/services/user/user-service";
|
||||||
import { userAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
import { userAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||||
@ -348,6 +350,7 @@ export const registerRoutes = async (
|
|||||||
const slackIntegrationDAL = slackIntegrationDALFactory(db);
|
const slackIntegrationDAL = slackIntegrationDALFactory(db);
|
||||||
const projectSlackConfigDAL = projectSlackConfigDALFactory(db);
|
const projectSlackConfigDAL = projectSlackConfigDALFactory(db);
|
||||||
const workflowIntegrationDAL = workflowIntegrationDALFactory(db);
|
const workflowIntegrationDAL = workflowIntegrationDALFactory(db);
|
||||||
|
const totpConfigDAL = totpConfigDALFactory(db);
|
||||||
|
|
||||||
const externalGroupOrgRoleMappingDAL = externalGroupOrgRoleMappingDALFactory(db);
|
const externalGroupOrgRoleMappingDAL = externalGroupOrgRoleMappingDALFactory(db);
|
||||||
|
|
||||||
@ -511,12 +514,19 @@ export const registerRoutes = async (
|
|||||||
projectMembershipDAL
|
projectMembershipDAL
|
||||||
});
|
});
|
||||||
|
|
||||||
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL });
|
const totpService = totpServiceFactory({
|
||||||
|
totpConfigDAL,
|
||||||
|
userDAL,
|
||||||
|
kmsService
|
||||||
|
});
|
||||||
|
|
||||||
|
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL, totpService });
|
||||||
const passwordService = authPaswordServiceFactory({
|
const passwordService = authPaswordServiceFactory({
|
||||||
tokenService,
|
tokenService,
|
||||||
smtpService,
|
smtpService,
|
||||||
authDAL,
|
authDAL,
|
||||||
userDAL
|
userDAL,
|
||||||
|
totpConfigDAL
|
||||||
});
|
});
|
||||||
|
|
||||||
const projectBotService = projectBotServiceFactory({ permissionService, projectBotDAL, projectDAL });
|
const projectBotService = projectBotServiceFactory({ permissionService, projectBotDAL, projectDAL });
|
||||||
@ -1369,7 +1379,8 @@ export const registerRoutes = async (
|
|||||||
workflowIntegration: workflowIntegrationService,
|
workflowIntegration: workflowIntegrationService,
|
||||||
migration: migrationService,
|
migration: migrationService,
|
||||||
externalGroupOrgRoleMapping: externalGroupOrgRoleMappingService,
|
externalGroupOrgRoleMapping: externalGroupOrgRoleMappingService,
|
||||||
projectTemplate: projectTemplateService
|
projectTemplate: projectTemplateService,
|
||||||
|
totp: totpService
|
||||||
});
|
});
|
||||||
|
|
||||||
const cronJobs: CronJob[] = [];
|
const cronJobs: CronJob[] = [];
|
||||||
|
@ -47,6 +47,7 @@ export const DefaultResponseErrorsSchema = {
|
|||||||
403: z.object({
|
403: z.object({
|
||||||
statusCode: z.literal(403),
|
statusCode: z.literal(403),
|
||||||
message: z.string(),
|
message: z.string(),
|
||||||
|
details: z.any().optional(),
|
||||||
error: z.string()
|
error: z.string()
|
||||||
}),
|
}),
|
||||||
500: z.object({
|
500: z.object({
|
||||||
|
@ -108,7 +108,8 @@ export const registerAuthRoutes = async (server: FastifyZodProvider) => {
|
|||||||
tokenVersionId: tokenVersion.id,
|
tokenVersionId: tokenVersion.id,
|
||||||
accessVersion: tokenVersion.accessVersion,
|
accessVersion: tokenVersion.accessVersion,
|
||||||
organizationId: decodedToken.organizationId,
|
organizationId: decodedToken.organizationId,
|
||||||
isMfaVerified: decodedToken.isMfaVerified
|
isMfaVerified: decodedToken.isMfaVerified,
|
||||||
|
mfaMethod: decodedToken.mfaMethod
|
||||||
},
|
},
|
||||||
appCfg.AUTH_SECRET,
|
appCfg.AUTH_SECRET,
|
||||||
{ expiresIn: appCfg.JWT_AUTH_LIFETIME }
|
{ expiresIn: appCfg.JWT_AUTH_LIFETIME }
|
||||||
|
@ -840,4 +840,91 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/secrets-by-keys",
|
||||||
|
config: {
|
||||||
|
rateLimit: secretsLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
querystring: z.object({
|
||||||
|
projectId: z.string().trim(),
|
||||||
|
environment: z.string().trim(),
|
||||||
|
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||||
|
keys: z.string().trim().transform(decodeURIComponent)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
secrets: secretRawSchema
|
||||||
|
.extend({
|
||||||
|
secretPath: z.string().optional(),
|
||||||
|
tags: SecretTagsSchema.pick({
|
||||||
|
id: true,
|
||||||
|
slug: true,
|
||||||
|
color: true
|
||||||
|
})
|
||||||
|
.extend({ name: z.string() })
|
||||||
|
.array()
|
||||||
|
.optional()
|
||||||
|
})
|
||||||
|
.array()
|
||||||
|
.optional()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { secretPath, projectId, environment } = req.query;
|
||||||
|
|
||||||
|
const keys = req.query.keys?.split(",").filter((key) => Boolean(key.trim())) ?? [];
|
||||||
|
if (!keys.length) throw new BadRequestError({ message: "One or more keys required" });
|
||||||
|
|
||||||
|
const { secrets } = await server.services.secret.getSecretsRaw({
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorOrgId: req.permission.orgId,
|
||||||
|
environment,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
projectId,
|
||||||
|
path: secretPath,
|
||||||
|
keys
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
projectId,
|
||||||
|
...req.auditLogInfo,
|
||||||
|
event: {
|
||||||
|
type: EventType.GET_SECRETS,
|
||||||
|
metadata: {
|
||||||
|
environment,
|
||||||
|
secretPath,
|
||||||
|
numberOfSecrets: secrets.length
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) {
|
||||||
|
await server.services.telemetry.sendPostHogEvents({
|
||||||
|
event: PostHogEventTypes.SecretPulled,
|
||||||
|
distinctId: getTelemetryDistinctId(req),
|
||||||
|
properties: {
|
||||||
|
numberOfSecrets: secrets.length,
|
||||||
|
workspaceId: projectId,
|
||||||
|
environment,
|
||||||
|
secretPath,
|
||||||
|
channel: getUserAgentType(req.headers["user-agent"]),
|
||||||
|
...req.auditLogInfo
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { secrets };
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
@ -15,7 +15,7 @@ import { AUDIT_LOGS, ORGANIZATIONS } from "@app/lib/api-docs";
|
|||||||
import { getLastMidnightDateISO } from "@app/lib/fn";
|
import { getLastMidnightDateISO } from "@app/lib/fn";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
import { ActorType, AuthMode, MfaMethod } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
import { integrationAuthPubSchema } from "../sanitizedSchemas";
|
import { integrationAuthPubSchema } from "../sanitizedSchemas";
|
||||||
|
|
||||||
@ -259,7 +259,8 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
|||||||
message: "Membership role must be a valid slug"
|
message: "Membership role must be a valid slug"
|
||||||
})
|
})
|
||||||
.optional(),
|
.optional(),
|
||||||
enforceMfa: z.boolean().optional()
|
enforceMfa: z.boolean().optional(),
|
||||||
|
selectedMfaMethod: z.nativeEnum(MfaMethod).optional()
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
|
@ -169,4 +169,103 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
|||||||
return groupMemberships;
|
return groupMemberships;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/me/totp",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
isVerified: z.boolean(),
|
||||||
|
recoveryCodes: z.string().array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
return server.services.totp.getUserTotpConfig({
|
||||||
|
userId: req.permission.id
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "DELETE",
|
||||||
|
url: "/me/totp",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
return server.services.totp.deleteUserTotpConfig({
|
||||||
|
userId: req.permission.id
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "POST",
|
||||||
|
url: "/me/totp/register",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
otpUrl: z.string(),
|
||||||
|
recoveryCodes: z.string().array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT], {
|
||||||
|
requireOrg: false
|
||||||
|
}),
|
||||||
|
handler: async (req) => {
|
||||||
|
return server.services.totp.registerUserTotp({
|
||||||
|
userId: req.permission.id
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "POST",
|
||||||
|
url: "/me/totp/verify",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
body: z.object({
|
||||||
|
totp: z.string()
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT], {
|
||||||
|
requireOrg: false
|
||||||
|
}),
|
||||||
|
handler: async (req) => {
|
||||||
|
return server.services.totp.verifyUserTotpConfig({
|
||||||
|
userId: req.permission.id,
|
||||||
|
totp: req.body.totp
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "POST",
|
||||||
|
url: "/me/totp/recovery-codes",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
return server.services.totp.createUserTotpRecoveryCodes({
|
||||||
|
userId: req.permission.id
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
@ -2,8 +2,9 @@ import jwt from "jsonwebtoken";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
|
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { mfaRateLimit } from "@app/server/config/rateLimiter";
|
import { mfaRateLimit } from "@app/server/config/rateLimiter";
|
||||||
import { AuthModeMfaJwtTokenPayload, AuthTokenType } from "@app/services/auth/auth-type";
|
import { AuthModeMfaJwtTokenPayload, AuthTokenType, MfaMethod } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
||||||
const cfg = getConfig();
|
const cfg = getConfig();
|
||||||
@ -49,6 +50,38 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/mfa/check/totp",
|
||||||
|
config: {
|
||||||
|
rateLimit: mfaRateLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
isVerified: z.boolean()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
handler: async (req) => {
|
||||||
|
try {
|
||||||
|
const totpConfig = await server.services.totp.getUserTotpConfig({
|
||||||
|
userId: req.mfa.userId
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
isVerified: Boolean(totpConfig)
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof NotFoundError || error instanceof BadRequestError) {
|
||||||
|
return { isVerified: false };
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
server.route({
|
server.route({
|
||||||
url: "/mfa/verify",
|
url: "/mfa/verify",
|
||||||
method: "POST",
|
method: "POST",
|
||||||
@ -57,7 +90,8 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
|||||||
},
|
},
|
||||||
schema: {
|
schema: {
|
||||||
body: z.object({
|
body: z.object({
|
||||||
mfaToken: z.string().trim()
|
mfaToken: z.string().trim(),
|
||||||
|
mfaMethod: z.nativeEnum(MfaMethod).optional().default(MfaMethod.EMAIL)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
@ -86,7 +120,8 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
|||||||
ip: req.realIp,
|
ip: req.realIp,
|
||||||
userId: req.mfa.userId,
|
userId: req.mfa.userId,
|
||||||
orgId: req.mfa.orgId,
|
orgId: req.mfa.orgId,
|
||||||
mfaToken: req.body.mfaToken
|
mfaToken: req.body.mfaToken,
|
||||||
|
mfaMethod: req.body.mfaMethod
|
||||||
});
|
});
|
||||||
|
|
||||||
void res.setCookie("jid", token.refresh, {
|
void res.setCookie("jid", token.refresh, {
|
||||||
|
@ -27,7 +27,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
|||||||
body: z.object({
|
body: z.object({
|
||||||
emails: z.string().email().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.emails),
|
emails: z.string().email().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.emails),
|
||||||
usernames: z.string().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.usernames),
|
usernames: z.string().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.usernames),
|
||||||
roleSlugs: z.string().array().optional().describe(PROJECT_USERS.INVITE_MEMBER.roleSlugs)
|
roleSlugs: z.string().array().min(1).optional().describe(PROJECT_USERS.INVITE_MEMBER.roleSlugs)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
@ -49,7 +49,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
|||||||
projects: [
|
projects: [
|
||||||
{
|
{
|
||||||
id: req.params.projectId,
|
id: req.params.projectId,
|
||||||
projectRoleSlug: [ProjectMembershipRole.Member]
|
projectRoleSlug: req.body.roleSlugs || [ProjectMembershipRole.Member]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
|
@ -4,7 +4,7 @@ import { AuthTokenSessionsSchema, OrganizationsSchema, UserEncryptionKeysSchema,
|
|||||||
import { ApiKeysSchema } from "@app/db/schemas/api-keys";
|
import { ApiKeysSchema } from "@app/db/schemas/api-keys";
|
||||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { AuthMethod, AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMethod, AuthMode, MfaMethod } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
export const registerUserRouter = async (server: FastifyZodProvider) => {
|
export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||||
server.route({
|
server.route({
|
||||||
@ -56,7 +56,8 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
|||||||
},
|
},
|
||||||
schema: {
|
schema: {
|
||||||
body: z.object({
|
body: z.object({
|
||||||
isMfaEnabled: z.boolean()
|
isMfaEnabled: z.boolean().optional(),
|
||||||
|
selectedMfaMethod: z.nativeEnum(MfaMethod).optional()
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
@ -66,7 +67,12 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
|||||||
},
|
},
|
||||||
preHandler: verifyAuth([AuthMode.JWT, AuthMode.API_KEY]),
|
preHandler: verifyAuth([AuthMode.JWT, AuthMode.API_KEY]),
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
const user = await server.services.user.toggleUserMfa(req.permission.id, req.body.isMfaEnabled);
|
const user = await server.services.user.updateUserMfa({
|
||||||
|
userId: req.permission.id,
|
||||||
|
isMfaEnabled: req.body.isMfaEnabled,
|
||||||
|
selectedMfaMethod: req.body.selectedMfaMethod
|
||||||
|
});
|
||||||
|
|
||||||
return { user };
|
return { user };
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -48,7 +48,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
|||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
token: z.string(),
|
token: z.string(),
|
||||||
isMfaEnabled: z.boolean()
|
isMfaEnabled: z.boolean(),
|
||||||
|
mfaMethod: z.string().optional()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -64,7 +65,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
|||||||
if (tokens.isMfaEnabled) {
|
if (tokens.isMfaEnabled) {
|
||||||
return {
|
return {
|
||||||
token: tokens.mfa as string,
|
token: tokens.mfa as string,
|
||||||
isMfaEnabled: true
|
isMfaEnabled: true,
|
||||||
|
mfaMethod: tokens.mfaMethod
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@ import { TokenType } from "../auth-token/auth-token-types";
|
|||||||
import { TOrgDALFactory } from "../org/org-dal";
|
import { TOrgDALFactory } from "../org/org-dal";
|
||||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||||
import { LoginMethod } from "../super-admin/super-admin-types";
|
import { LoginMethod } from "../super-admin/super-admin-types";
|
||||||
|
import { TTotpServiceFactory } from "../totp/totp-service";
|
||||||
import { TUserDALFactory } from "../user/user-dal";
|
import { TUserDALFactory } from "../user/user-dal";
|
||||||
import { enforceUserLockStatus, validateProviderAuthToken } from "./auth-fns";
|
import { enforceUserLockStatus, validateProviderAuthToken } from "./auth-fns";
|
||||||
import {
|
import {
|
||||||
@ -26,13 +27,14 @@ import {
|
|||||||
TOauthTokenExchangeDTO,
|
TOauthTokenExchangeDTO,
|
||||||
TVerifyMfaTokenDTO
|
TVerifyMfaTokenDTO
|
||||||
} from "./auth-login-type";
|
} from "./auth-login-type";
|
||||||
import { AuthMethod, AuthModeJwtTokenPayload, AuthModeMfaJwtTokenPayload, AuthTokenType } from "./auth-type";
|
import { AuthMethod, AuthModeJwtTokenPayload, AuthModeMfaJwtTokenPayload, AuthTokenType, MfaMethod } from "./auth-type";
|
||||||
|
|
||||||
type TAuthLoginServiceFactoryDep = {
|
type TAuthLoginServiceFactoryDep = {
|
||||||
userDAL: TUserDALFactory;
|
userDAL: TUserDALFactory;
|
||||||
orgDAL: TOrgDALFactory;
|
orgDAL: TOrgDALFactory;
|
||||||
tokenService: TAuthTokenServiceFactory;
|
tokenService: TAuthTokenServiceFactory;
|
||||||
smtpService: TSmtpService;
|
smtpService: TSmtpService;
|
||||||
|
totpService: Pick<TTotpServiceFactory, "verifyUserTotp" | "verifyWithUserRecoveryCode">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TAuthLoginFactory = ReturnType<typeof authLoginServiceFactory>;
|
export type TAuthLoginFactory = ReturnType<typeof authLoginServiceFactory>;
|
||||||
@ -40,7 +42,8 @@ export const authLoginServiceFactory = ({
|
|||||||
userDAL,
|
userDAL,
|
||||||
tokenService,
|
tokenService,
|
||||||
smtpService,
|
smtpService,
|
||||||
orgDAL
|
orgDAL,
|
||||||
|
totpService
|
||||||
}: TAuthLoginServiceFactoryDep) => {
|
}: TAuthLoginServiceFactoryDep) => {
|
||||||
/*
|
/*
|
||||||
* Private
|
* Private
|
||||||
@ -100,7 +103,8 @@ export const authLoginServiceFactory = ({
|
|||||||
userAgent,
|
userAgent,
|
||||||
organizationId,
|
organizationId,
|
||||||
authMethod,
|
authMethod,
|
||||||
isMfaVerified
|
isMfaVerified,
|
||||||
|
mfaMethod
|
||||||
}: {
|
}: {
|
||||||
user: TUsers;
|
user: TUsers;
|
||||||
ip: string;
|
ip: string;
|
||||||
@ -108,6 +112,7 @@ export const authLoginServiceFactory = ({
|
|||||||
organizationId?: string;
|
organizationId?: string;
|
||||||
authMethod: AuthMethod;
|
authMethod: AuthMethod;
|
||||||
isMfaVerified?: boolean;
|
isMfaVerified?: boolean;
|
||||||
|
mfaMethod?: MfaMethod;
|
||||||
}) => {
|
}) => {
|
||||||
const cfg = getConfig();
|
const cfg = getConfig();
|
||||||
await updateUserDeviceSession(user, ip, userAgent);
|
await updateUserDeviceSession(user, ip, userAgent);
|
||||||
@ -126,7 +131,8 @@ export const authLoginServiceFactory = ({
|
|||||||
tokenVersionId: tokenSession.id,
|
tokenVersionId: tokenSession.id,
|
||||||
accessVersion: tokenSession.accessVersion,
|
accessVersion: tokenSession.accessVersion,
|
||||||
organizationId,
|
organizationId,
|
||||||
isMfaVerified
|
isMfaVerified,
|
||||||
|
mfaMethod
|
||||||
},
|
},
|
||||||
cfg.AUTH_SECRET,
|
cfg.AUTH_SECRET,
|
||||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||||
@ -140,7 +146,8 @@ export const authLoginServiceFactory = ({
|
|||||||
tokenVersionId: tokenSession.id,
|
tokenVersionId: tokenSession.id,
|
||||||
refreshVersion: tokenSession.refreshVersion,
|
refreshVersion: tokenSession.refreshVersion,
|
||||||
organizationId,
|
organizationId,
|
||||||
isMfaVerified
|
isMfaVerified,
|
||||||
|
mfaMethod
|
||||||
},
|
},
|
||||||
cfg.AUTH_SECRET,
|
cfg.AUTH_SECRET,
|
||||||
{ expiresIn: cfg.JWT_REFRESH_LIFETIME }
|
{ expiresIn: cfg.JWT_REFRESH_LIFETIME }
|
||||||
@ -353,8 +360,12 @@ export const authLoginServiceFactory = ({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// send multi factor auth token if they it enabled
|
const shouldCheckMfa = selectedOrg.enforceMfa || user.isMfaEnabled;
|
||||||
if ((selectedOrg.enforceMfa || user.isMfaEnabled) && user.email && !decodedToken.isMfaVerified) {
|
const orgMfaMethod = selectedOrg.enforceMfa ? selectedOrg.selectedMfaMethod ?? MfaMethod.EMAIL : undefined;
|
||||||
|
const userMfaMethod = user.isMfaEnabled ? user.selectedMfaMethod ?? MfaMethod.EMAIL : undefined;
|
||||||
|
const mfaMethod = orgMfaMethod ?? userMfaMethod;
|
||||||
|
|
||||||
|
if (shouldCheckMfa && (!decodedToken.isMfaVerified || decodedToken.mfaMethod !== mfaMethod)) {
|
||||||
enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd);
|
enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd);
|
||||||
|
|
||||||
const mfaToken = jwt.sign(
|
const mfaToken = jwt.sign(
|
||||||
@ -369,12 +380,14 @@ export const authLoginServiceFactory = ({
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
await sendUserMfaCode({
|
if (mfaMethod === MfaMethod.EMAIL && user.email) {
|
||||||
userId: user.id,
|
await sendUserMfaCode({
|
||||||
email: user.email
|
userId: user.id,
|
||||||
});
|
email: user.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return { isMfaEnabled: true, mfa: mfaToken } as const;
|
return { isMfaEnabled: true, mfa: mfaToken, mfaMethod } as const;
|
||||||
}
|
}
|
||||||
|
|
||||||
const tokens = await generateUserTokens({
|
const tokens = await generateUserTokens({
|
||||||
@ -383,7 +396,8 @@ export const authLoginServiceFactory = ({
|
|||||||
userAgent,
|
userAgent,
|
||||||
ip: ipAddress,
|
ip: ipAddress,
|
||||||
organizationId,
|
organizationId,
|
||||||
isMfaVerified: decodedToken.isMfaVerified
|
isMfaVerified: decodedToken.isMfaVerified,
|
||||||
|
mfaMethod: decodedToken.mfaMethod
|
||||||
});
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -458,17 +472,39 @@ export const authLoginServiceFactory = ({
|
|||||||
* Multi factor authentication verification of code
|
* Multi factor authentication verification of code
|
||||||
* Third step of login in which user completes with mfa
|
* Third step of login in which user completes with mfa
|
||||||
* */
|
* */
|
||||||
const verifyMfaToken = async ({ userId, mfaToken, mfaJwtToken, ip, userAgent, orgId }: TVerifyMfaTokenDTO) => {
|
const verifyMfaToken = async ({
|
||||||
|
userId,
|
||||||
|
mfaToken,
|
||||||
|
mfaMethod,
|
||||||
|
mfaJwtToken,
|
||||||
|
ip,
|
||||||
|
userAgent,
|
||||||
|
orgId
|
||||||
|
}: TVerifyMfaTokenDTO) => {
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
const user = await userDAL.findById(userId);
|
const user = await userDAL.findById(userId);
|
||||||
enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd);
|
enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await tokenService.validateTokenForUser({
|
if (mfaMethod === MfaMethod.EMAIL) {
|
||||||
type: TokenType.TOKEN_EMAIL_MFA,
|
await tokenService.validateTokenForUser({
|
||||||
userId,
|
type: TokenType.TOKEN_EMAIL_MFA,
|
||||||
code: mfaToken
|
userId,
|
||||||
});
|
code: mfaToken
|
||||||
|
});
|
||||||
|
} else if (mfaMethod === MfaMethod.TOTP) {
|
||||||
|
if (mfaToken.length === 6) {
|
||||||
|
await totpService.verifyUserTotp({
|
||||||
|
userId,
|
||||||
|
totp: mfaToken
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await totpService.verifyWithUserRecoveryCode({
|
||||||
|
userId,
|
||||||
|
recoveryCode: mfaToken
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
const updatedUser = await processFailedMfaAttempt(userId);
|
const updatedUser = await processFailedMfaAttempt(userId);
|
||||||
if (updatedUser.isLocked) {
|
if (updatedUser.isLocked) {
|
||||||
@ -513,7 +549,8 @@ export const authLoginServiceFactory = ({
|
|||||||
userAgent,
|
userAgent,
|
||||||
organizationId: orgId,
|
organizationId: orgId,
|
||||||
authMethod: decodedToken.authMethod,
|
authMethod: decodedToken.authMethod,
|
||||||
isMfaVerified: true
|
isMfaVerified: true,
|
||||||
|
mfaMethod
|
||||||
});
|
});
|
||||||
|
|
||||||
return { token, user: userEnc };
|
return { token, user: userEnc };
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { AuthMethod } from "./auth-type";
|
import { AuthMethod, MfaMethod } from "./auth-type";
|
||||||
|
|
||||||
export type TLoginGenServerPublicKeyDTO = {
|
export type TLoginGenServerPublicKeyDTO = {
|
||||||
email: string;
|
email: string;
|
||||||
@ -19,6 +19,7 @@ export type TLoginClientProofDTO = {
|
|||||||
export type TVerifyMfaTokenDTO = {
|
export type TVerifyMfaTokenDTO = {
|
||||||
userId: string;
|
userId: string;
|
||||||
mfaToken: string;
|
mfaToken: string;
|
||||||
|
mfaMethod: MfaMethod;
|
||||||
mfaJwtToken: string;
|
mfaJwtToken: string;
|
||||||
ip: string;
|
ip: string;
|
||||||
userAgent: string;
|
userAgent: string;
|
||||||
|
@ -8,6 +8,7 @@ import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
|
|||||||
import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
|
import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
|
||||||
import { TokenType } from "../auth-token/auth-token-types";
|
import { TokenType } from "../auth-token/auth-token-types";
|
||||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||||
|
import { TTotpConfigDALFactory } from "../totp/totp-config-dal";
|
||||||
import { TUserDALFactory } from "../user/user-dal";
|
import { TUserDALFactory } from "../user/user-dal";
|
||||||
import { TAuthDALFactory } from "./auth-dal";
|
import { TAuthDALFactory } from "./auth-dal";
|
||||||
import { TChangePasswordDTO, TCreateBackupPrivateKeyDTO, TResetPasswordViaBackupKeyDTO } from "./auth-password-type";
|
import { TChangePasswordDTO, TCreateBackupPrivateKeyDTO, TResetPasswordViaBackupKeyDTO } from "./auth-password-type";
|
||||||
@ -18,6 +19,7 @@ type TAuthPasswordServiceFactoryDep = {
|
|||||||
userDAL: TUserDALFactory;
|
userDAL: TUserDALFactory;
|
||||||
tokenService: TAuthTokenServiceFactory;
|
tokenService: TAuthTokenServiceFactory;
|
||||||
smtpService: TSmtpService;
|
smtpService: TSmtpService;
|
||||||
|
totpConfigDAL: Pick<TTotpConfigDALFactory, "delete">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TAuthPasswordFactory = ReturnType<typeof authPaswordServiceFactory>;
|
export type TAuthPasswordFactory = ReturnType<typeof authPaswordServiceFactory>;
|
||||||
@ -25,7 +27,8 @@ export const authPaswordServiceFactory = ({
|
|||||||
authDAL,
|
authDAL,
|
||||||
userDAL,
|
userDAL,
|
||||||
tokenService,
|
tokenService,
|
||||||
smtpService
|
smtpService,
|
||||||
|
totpConfigDAL
|
||||||
}: TAuthPasswordServiceFactoryDep) => {
|
}: TAuthPasswordServiceFactoryDep) => {
|
||||||
/*
|
/*
|
||||||
* Pre setup for pass change with srp protocol
|
* Pre setup for pass change with srp protocol
|
||||||
@ -185,6 +188,12 @@ export const authPaswordServiceFactory = ({
|
|||||||
temporaryLockDateEnd: null,
|
temporaryLockDateEnd: null,
|
||||||
consecutiveFailedMfaAttempts: 0
|
consecutiveFailedMfaAttempts: 0
|
||||||
});
|
});
|
||||||
|
|
||||||
|
/* we reset the mobile authenticator configs of the user
|
||||||
|
because we want this to be one of the recovery modes from account lockout */
|
||||||
|
await totpConfigDAL.delete({
|
||||||
|
userId
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -53,6 +53,7 @@ export type AuthModeJwtTokenPayload = {
|
|||||||
accessVersion: number;
|
accessVersion: number;
|
||||||
organizationId?: string;
|
organizationId?: string;
|
||||||
isMfaVerified?: boolean;
|
isMfaVerified?: boolean;
|
||||||
|
mfaMethod?: MfaMethod;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type AuthModeMfaJwtTokenPayload = {
|
export type AuthModeMfaJwtTokenPayload = {
|
||||||
@ -71,6 +72,7 @@ export type AuthModeRefreshJwtTokenPayload = {
|
|||||||
refreshVersion: number;
|
refreshVersion: number;
|
||||||
organizationId?: string;
|
organizationId?: string;
|
||||||
isMfaVerified?: boolean;
|
isMfaVerified?: boolean;
|
||||||
|
mfaMethod?: MfaMethod;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type AuthModeProviderJwtTokenPayload = {
|
export type AuthModeProviderJwtTokenPayload = {
|
||||||
@ -85,3 +87,8 @@ export type AuthModeProviderSignUpTokenPayload = {
|
|||||||
authTokenType: AuthTokenType.SIGNUP_TOKEN;
|
authTokenType: AuthTokenType.SIGNUP_TOKEN;
|
||||||
userId: string;
|
userId: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export enum MfaMethod {
|
||||||
|
EMAIL = "email",
|
||||||
|
TOTP = "totp"
|
||||||
|
}
|
||||||
|
@ -29,7 +29,7 @@ import {
|
|||||||
} from "./identity-aws-auth-types";
|
} from "./identity-aws-auth-types";
|
||||||
|
|
||||||
type TIdentityAwsAuthServiceFactoryDep = {
|
type TIdentityAwsAuthServiceFactoryDep = {
|
||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||||
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
@ -346,6 +346,8 @@ export const identityAwsAuthServiceFactory = ({
|
|||||||
|
|
||||||
const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => {
|
const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => {
|
||||||
const deletedAwsAuth = await identityAwsAuthDAL.delete({ identityId }, tx);
|
const deletedAwsAuth = await identityAwsAuthDAL.delete({ identityId }, tx);
|
||||||
|
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.AWS_AUTH }, tx);
|
||||||
|
|
||||||
return { ...deletedAwsAuth?.[0], orgId: identityMembershipOrg.orgId };
|
return { ...deletedAwsAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||||
});
|
});
|
||||||
return revokedIdentityAwsAuth;
|
return revokedIdentityAwsAuth;
|
||||||
|
@ -30,7 +30,7 @@ type TIdentityAzureAuthServiceFactoryDep = {
|
|||||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||||
>;
|
>;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
};
|
};
|
||||||
@ -70,7 +70,9 @@ export const identityAzureAuthServiceFactory = ({
|
|||||||
.map((servicePrincipalId) => servicePrincipalId.trim())
|
.map((servicePrincipalId) => servicePrincipalId.trim())
|
||||||
.some((servicePrincipalId) => servicePrincipalId === azureIdentity.oid);
|
.some((servicePrincipalId) => servicePrincipalId === azureIdentity.oid);
|
||||||
|
|
||||||
if (!isServicePrincipalAllowed) throw new UnauthorizedError({ message: "Service principal not allowed" });
|
if (!isServicePrincipalAllowed) {
|
||||||
|
throw new UnauthorizedError({ message: `Service principal '${azureIdentity.oid}' not allowed` });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const identityAccessToken = await identityAzureAuthDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||||
@ -317,6 +319,8 @@ export const identityAzureAuthServiceFactory = ({
|
|||||||
|
|
||||||
const revokedIdentityAzureAuth = await identityAzureAuthDAL.transaction(async (tx) => {
|
const revokedIdentityAzureAuth = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||||
const deletedAzureAuth = await identityAzureAuthDAL.delete({ identityId }, tx);
|
const deletedAzureAuth = await identityAzureAuthDAL.delete({ identityId }, tx);
|
||||||
|
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.AZURE_AUTH }, tx);
|
||||||
|
|
||||||
return { ...deletedAzureAuth?.[0], orgId: identityMembershipOrg.orgId };
|
return { ...deletedAzureAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||||
});
|
});
|
||||||
return revokedIdentityAzureAuth;
|
return revokedIdentityAzureAuth;
|
||||||
|
@ -28,7 +28,7 @@ import {
|
|||||||
type TIdentityGcpAuthServiceFactoryDep = {
|
type TIdentityGcpAuthServiceFactoryDep = {
|
||||||
identityGcpAuthDAL: Pick<TIdentityGcpAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
identityGcpAuthDAL: Pick<TIdentityGcpAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
};
|
};
|
||||||
@ -365,6 +365,8 @@ export const identityGcpAuthServiceFactory = ({
|
|||||||
|
|
||||||
const revokedIdentityGcpAuth = await identityGcpAuthDAL.transaction(async (tx) => {
|
const revokedIdentityGcpAuth = await identityGcpAuthDAL.transaction(async (tx) => {
|
||||||
const deletedGcpAuth = await identityGcpAuthDAL.delete({ identityId }, tx);
|
const deletedGcpAuth = await identityGcpAuthDAL.delete({ identityId }, tx);
|
||||||
|
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.GCP_AUTH }, tx);
|
||||||
|
|
||||||
return { ...deletedGcpAuth?.[0], orgId: identityMembershipOrg.orgId };
|
return { ...deletedGcpAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||||
});
|
});
|
||||||
return revokedIdentityGcpAuth;
|
return revokedIdentityGcpAuth;
|
||||||
|
@ -41,7 +41,7 @@ type TIdentityKubernetesAuthServiceFactoryDep = {
|
|||||||
TIdentityKubernetesAuthDALFactory,
|
TIdentityKubernetesAuthDALFactory,
|
||||||
"create" | "findOne" | "transaction" | "updateById" | "delete"
|
"create" | "findOne" | "transaction" | "updateById" | "delete"
|
||||||
>;
|
>;
|
||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById">;
|
||||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
@ -622,6 +622,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
|||||||
|
|
||||||
const revokedIdentityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
const revokedIdentityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
||||||
const deletedKubernetesAuth = await identityKubernetesAuthDAL.delete({ identityId }, tx);
|
const deletedKubernetesAuth = await identityKubernetesAuthDAL.delete({ identityId }, tx);
|
||||||
|
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.KUBERNETES_AUTH }, tx);
|
||||||
return { ...deletedKubernetesAuth?.[0], orgId: identityMembershipOrg.orgId };
|
return { ...deletedKubernetesAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||||
});
|
});
|
||||||
return revokedIdentityKubernetesAuth;
|
return revokedIdentityKubernetesAuth;
|
||||||
|
@ -39,7 +39,7 @@ import {
|
|||||||
type TIdentityOidcAuthServiceFactoryDep = {
|
type TIdentityOidcAuthServiceFactoryDep = {
|
||||||
identityOidcAuthDAL: TIdentityOidcAuthDALFactory;
|
identityOidcAuthDAL: TIdentityOidcAuthDALFactory;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||||
@ -539,6 +539,8 @@ export const identityOidcAuthServiceFactory = ({
|
|||||||
|
|
||||||
const revokedIdentityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => {
|
const revokedIdentityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||||
const deletedOidcAuth = await identityOidcAuthDAL.delete({ identityId }, tx);
|
const deletedOidcAuth = await identityOidcAuthDAL.delete({ identityId }, tx);
|
||||||
|
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.OIDC_AUTH }, tx);
|
||||||
|
|
||||||
return { ...deletedOidcAuth?.[0], orgId: identityMembershipOrg.orgId };
|
return { ...deletedOidcAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -182,7 +182,12 @@ export const identityProjectServiceFactory = ({
|
|||||||
|
|
||||||
// validate custom roles input
|
// validate custom roles input
|
||||||
const customInputRoles = roles.filter(
|
const customInputRoles = roles.filter(
|
||||||
({ role }) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole)
|
({ role }) =>
|
||||||
|
!Object.values(ProjectMembershipRole)
|
||||||
|
// we don't want to include custom in this check;
|
||||||
|
// this unintentionally enables setting slug to custom which is reserved
|
||||||
|
.filter((r) => r !== ProjectMembershipRole.Custom)
|
||||||
|
.includes(role as ProjectMembershipRole)
|
||||||
);
|
);
|
||||||
const hasCustomRole = Boolean(customInputRoles.length);
|
const hasCustomRole = Boolean(customInputRoles.length);
|
||||||
const customRoles = hasCustomRole
|
const customRoles = hasCustomRole
|
||||||
|
@ -385,8 +385,8 @@ export const identityTokenAuthServiceFactory = ({
|
|||||||
actorOrgId
|
actorOrgId
|
||||||
}: TUpdateTokenAuthTokenDTO) => {
|
}: TUpdateTokenAuthTokenDTO) => {
|
||||||
const foundToken = await identityAccessTokenDAL.findOne({
|
const foundToken = await identityAccessTokenDAL.findOne({
|
||||||
id: tokenId,
|
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
|
||||||
authMethod: IdentityAuthMethod.TOKEN_AUTH
|
[`${TableName.IdentityAccessToken}.authMethod` as "authMethod"]: IdentityAuthMethod.TOKEN_AUTH
|
||||||
});
|
});
|
||||||
if (!foundToken) throw new NotFoundError({ message: `Token with ID ${tokenId} not found` });
|
if (!foundToken) throw new NotFoundError({ message: `Token with ID ${tokenId} not found` });
|
||||||
|
|
||||||
@ -444,8 +444,8 @@ export const identityTokenAuthServiceFactory = ({
|
|||||||
}: TRevokeTokenAuthTokenDTO) => {
|
}: TRevokeTokenAuthTokenDTO) => {
|
||||||
const identityAccessToken = await identityAccessTokenDAL.findOne({
|
const identityAccessToken = await identityAccessTokenDAL.findOne({
|
||||||
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
|
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
|
||||||
isAccessTokenRevoked: false,
|
[`${TableName.IdentityAccessToken}.isAccessTokenRevoked` as "isAccessTokenRevoked"]: false,
|
||||||
authMethod: IdentityAuthMethod.TOKEN_AUTH
|
[`${TableName.IdentityAccessToken}.authMethod` as "authMethod"]: IdentityAuthMethod.TOKEN_AUTH
|
||||||
});
|
});
|
||||||
if (!identityAccessToken)
|
if (!identityAccessToken)
|
||||||
throw new NotFoundError({
|
throw new NotFoundError({
|
||||||
|
@ -3075,7 +3075,7 @@ const syncSecretsTerraformCloud = async ({
|
|||||||
}) => {
|
}) => {
|
||||||
// get secrets from Terraform Cloud
|
// get secrets from Terraform Cloud
|
||||||
const terraformSecrets = (
|
const terraformSecrets = (
|
||||||
await request.get<{ data: { attributes: { key: string; value: string }; id: string }[] }>(
|
await request.get<{ data: { attributes: { key: string; value: string; sensitive: boolean }; id: string }[] }>(
|
||||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars`,
|
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars`,
|
||||||
{
|
{
|
||||||
headers: {
|
headers: {
|
||||||
@ -3089,7 +3089,7 @@ const syncSecretsTerraformCloud = async ({
|
|||||||
...obj,
|
...obj,
|
||||||
[secret.attributes.key]: secret
|
[secret.attributes.key]: secret
|
||||||
}),
|
}),
|
||||||
{} as Record<string, { attributes: { key: string; value: string }; id: string }>
|
{} as Record<string, { attributes: { key: string; value: string; sensitive: boolean }; id: string }>
|
||||||
);
|
);
|
||||||
|
|
||||||
const secretsToAdd: { [key: string]: string } = {};
|
const secretsToAdd: { [key: string]: string } = {};
|
||||||
@ -3170,7 +3170,8 @@ const syncSecretsTerraformCloud = async ({
|
|||||||
attributes: {
|
attributes: {
|
||||||
key,
|
key,
|
||||||
value: secrets[key]?.value,
|
value: secrets[key]?.value,
|
||||||
category: integration.targetService
|
category: integration.targetService,
|
||||||
|
sensitive: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -3183,7 +3184,11 @@ const syncSecretsTerraformCloud = async ({
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
// case: secret exists in Terraform Cloud
|
// case: secret exists in Terraform Cloud
|
||||||
} else if (secrets[key]?.value !== terraformSecrets[key].attributes.value) {
|
} else if (
|
||||||
|
// we now set secrets to sensitive in Terraform Cloud, this checks if existing secrets are not sensitive and updates them accordingly
|
||||||
|
!terraformSecrets[key].attributes.sensitive ||
|
||||||
|
secrets[key]?.value !== terraformSecrets[key].attributes.value
|
||||||
|
) {
|
||||||
// -> update secret
|
// -> update secret
|
||||||
await request.patch(
|
await request.patch(
|
||||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${terraformSecrets[key].id}`,
|
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${terraformSecrets[key].id}`,
|
||||||
@ -3193,7 +3198,8 @@ const syncSecretsTerraformCloud = async ({
|
|||||||
id: terraformSecrets[key].id,
|
id: terraformSecrets[key].id,
|
||||||
attributes: {
|
attributes: {
|
||||||
...terraformSecrets[key],
|
...terraformSecrets[key],
|
||||||
value: secrets[key]?.value
|
value: secrets[key]?.value,
|
||||||
|
sensitive: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -268,7 +268,7 @@ export const orgServiceFactory = ({
|
|||||||
actorOrgId,
|
actorOrgId,
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
orgId,
|
orgId,
|
||||||
data: { name, slug, authEnforced, scimEnabled, defaultMembershipRoleSlug, enforceMfa }
|
data: { name, slug, authEnforced, scimEnabled, defaultMembershipRoleSlug, enforceMfa, selectedMfaMethod }
|
||||||
}: TUpdateOrgDTO) => {
|
}: TUpdateOrgDTO) => {
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||||
@ -333,7 +333,8 @@ export const orgServiceFactory = ({
|
|||||||
authEnforced,
|
authEnforced,
|
||||||
scimEnabled,
|
scimEnabled,
|
||||||
defaultMembershipRole,
|
defaultMembershipRole,
|
||||||
enforceMfa
|
enforceMfa,
|
||||||
|
selectedMfaMethod
|
||||||
});
|
});
|
||||||
if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` });
|
if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` });
|
||||||
return org;
|
return org;
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { TOrgPermission } from "@app/lib/types";
|
import { TOrgPermission } from "@app/lib/types";
|
||||||
|
|
||||||
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
|
import { ActorAuthMethod, ActorType, MfaMethod } from "../auth/auth-type";
|
||||||
|
|
||||||
export type TUpdateOrgMembershipDTO = {
|
export type TUpdateOrgMembershipDTO = {
|
||||||
userId: string;
|
userId: string;
|
||||||
@ -65,6 +65,7 @@ export type TUpdateOrgDTO = {
|
|||||||
scimEnabled: boolean;
|
scimEnabled: boolean;
|
||||||
defaultMembershipRoleSlug: string;
|
defaultMembershipRoleSlug: string;
|
||||||
enforceMfa: boolean;
|
enforceMfa: boolean;
|
||||||
|
selectedMfaMethod: MfaMethod;
|
||||||
}>;
|
}>;
|
||||||
} & TOrgPermission;
|
} & TOrgPermission;
|
||||||
|
|
||||||
|
@ -280,7 +280,12 @@ export const projectMembershipServiceFactory = ({
|
|||||||
|
|
||||||
// validate custom roles input
|
// validate custom roles input
|
||||||
const customInputRoles = roles.filter(
|
const customInputRoles = roles.filter(
|
||||||
({ role }) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole)
|
({ role }) =>
|
||||||
|
!Object.values(ProjectMembershipRole)
|
||||||
|
// we don't want to include custom in this check;
|
||||||
|
// this unintentionally enables setting slug to custom which is reserved
|
||||||
|
.filter((r) => r !== ProjectMembershipRole.Custom)
|
||||||
|
.includes(role as ProjectMembershipRole)
|
||||||
);
|
);
|
||||||
const hasCustomRole = Boolean(customInputRoles.length);
|
const hasCustomRole = Boolean(customInputRoles.length);
|
||||||
if (hasCustomRole) {
|
if (hasCustomRole) {
|
||||||
|
@ -361,6 +361,10 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
|
|||||||
void bd.whereILike(`${TableName.SecretV2}.key`, `%${filters?.search}%`);
|
void bd.whereILike(`${TableName.SecretV2}.key`, `%${filters?.search}%`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (filters?.keys) {
|
||||||
|
void bd.whereIn(`${TableName.SecretV2}.key`, filters.keys);
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.where((bd) => {
|
.where((bd) => {
|
||||||
void bd.whereNull(`${TableName.SecretV2}.userId`).orWhere({ userId: userId || null });
|
void bd.whereNull(`${TableName.SecretV2}.userId`).orWhere({ userId: userId || null });
|
||||||
|
@ -518,7 +518,10 @@ export const expandSecretReferencesFactory = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (referencedSecretValue) {
|
if (referencedSecretValue) {
|
||||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referencedSecretValue);
|
expandedValue = expandedValue.replaceAll(
|
||||||
|
interpolationSyntax,
|
||||||
|
() => referencedSecretValue // prevents special characters from triggering replacement patterns
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -150,9 +150,13 @@ export const secretV2BridgeServiceFactory = ({
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
if (referredSecrets.length !== references.length)
|
if (
|
||||||
|
referredSecrets.length !==
|
||||||
|
new Set(references.map(({ secretKey, secretPath, environment }) => `${secretKey}.${secretPath}.${environment}`))
|
||||||
|
.size // only count unique references
|
||||||
|
)
|
||||||
throw new BadRequestError({
|
throw new BadRequestError({
|
||||||
message: `Referenced secret not found. Found only ${diff(
|
message: `Referenced secret(s) not found: ${diff(
|
||||||
references.map((el) => el.secretKey),
|
references.map((el) => el.secretKey),
|
||||||
referredSecrets.map((el) => el.key)
|
referredSecrets.map((el) => el.key)
|
||||||
).join(",")}`
|
).join(",")}`
|
||||||
@ -410,12 +414,13 @@ export const secretV2BridgeServiceFactory = ({
|
|||||||
type: KmsDataKey.SecretManager,
|
type: KmsDataKey.SecretManager,
|
||||||
projectId
|
projectId
|
||||||
});
|
});
|
||||||
const encryptedValue = secretValue
|
const encryptedValue =
|
||||||
? {
|
typeof secretValue === "string"
|
||||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secretValue) }).cipherTextBlob,
|
? {
|
||||||
references: getAllSecretReferences(secretValue).nestedReferences
|
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secretValue) }).cipherTextBlob,
|
||||||
}
|
references: getAllSecretReferences(secretValue).nestedReferences
|
||||||
: {};
|
}
|
||||||
|
: {};
|
||||||
|
|
||||||
if (secretValue) {
|
if (secretValue) {
|
||||||
const { nestedReferences, localReferences } = getAllSecretReferences(secretValue);
|
const { nestedReferences, localReferences } = getAllSecretReferences(secretValue);
|
||||||
@ -1161,7 +1166,7 @@ export const secretV2BridgeServiceFactory = ({
|
|||||||
const newSecrets = await secretDAL.transaction(async (tx) =>
|
const newSecrets = await secretDAL.transaction(async (tx) =>
|
||||||
fnSecretBulkInsert({
|
fnSecretBulkInsert({
|
||||||
inputSecrets: inputSecrets.map((el) => {
|
inputSecrets: inputSecrets.map((el) => {
|
||||||
const references = secretReferencesGroupByInputSecretKey[el.secretKey].nestedReferences;
|
const references = secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
version: 1,
|
version: 1,
|
||||||
@ -1368,7 +1373,7 @@ export const secretV2BridgeServiceFactory = ({
|
|||||||
typeof el.secretValue !== "undefined"
|
typeof el.secretValue !== "undefined"
|
||||||
? {
|
? {
|
||||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob,
|
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob,
|
||||||
references: secretReferencesGroupByInputSecretKey[el.secretKey].nestedReferences
|
references: secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences
|
||||||
}
|
}
|
||||||
: {};
|
: {};
|
||||||
|
|
||||||
|
@ -33,6 +33,7 @@ export type TGetSecretsDTO = {
|
|||||||
offset?: number;
|
offset?: number;
|
||||||
limit?: number;
|
limit?: number;
|
||||||
search?: string;
|
search?: string;
|
||||||
|
keys?: string[];
|
||||||
} & TProjectPermission;
|
} & TProjectPermission;
|
||||||
|
|
||||||
export type TGetASecretDTO = {
|
export type TGetASecretDTO = {
|
||||||
@ -294,6 +295,7 @@ export type TFindSecretsByFolderIdsFilter = {
|
|||||||
search?: string;
|
search?: string;
|
||||||
tagSlugs?: string[];
|
tagSlugs?: string[];
|
||||||
includeTagsInSearch?: boolean;
|
includeTagsInSearch?: boolean;
|
||||||
|
keys?: string[];
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TGetSecretsRawByFolderMappingsDTO = {
|
export type TGetSecretsRawByFolderMappingsDTO = {
|
||||||
|
@ -185,6 +185,7 @@ export type TGetSecretsRawDTO = {
|
|||||||
offset?: number;
|
offset?: number;
|
||||||
limit?: number;
|
limit?: number;
|
||||||
search?: string;
|
search?: string;
|
||||||
|
keys?: string[];
|
||||||
} & TProjectPermission;
|
} & TProjectPermission;
|
||||||
|
|
||||||
export type TGetASecretRawDTO = {
|
export type TGetASecretRawDTO = {
|
||||||
|
@ -77,5 +77,21 @@ export const smtpServiceFactory = (cfg: TSmtpConfig) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return { sendMail };
|
const verify = async () => {
|
||||||
|
const isConnected = smtp
|
||||||
|
.verify()
|
||||||
|
.then(async () => {
|
||||||
|
logger.info("SMTP connected");
|
||||||
|
return true;
|
||||||
|
})
|
||||||
|
.catch((err: Error) => {
|
||||||
|
logger.error("SMTP error");
|
||||||
|
logger.error(err);
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
|
||||||
|
return isConnected;
|
||||||
|
};
|
||||||
|
|
||||||
|
return { sendMail, verify };
|
||||||
};
|
};
|
||||||
|
11
backend/src/services/totp/totp-config-dal.ts
Normal file
11
backend/src/services/totp/totp-config-dal.ts
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
import { TDbClient } from "@app/db";
|
||||||
|
import { TableName } from "@app/db/schemas";
|
||||||
|
import { ormify } from "@app/lib/knex";
|
||||||
|
|
||||||
|
export type TTotpConfigDALFactory = ReturnType<typeof totpConfigDALFactory>;
|
||||||
|
|
||||||
|
export const totpConfigDALFactory = (db: TDbClient) => {
|
||||||
|
const totpConfigDal = ormify(db, TableName.TotpConfig);
|
||||||
|
|
||||||
|
return totpConfigDal;
|
||||||
|
};
|
3
backend/src/services/totp/totp-fns.ts
Normal file
3
backend/src/services/totp/totp-fns.ts
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
import crypto from "node:crypto";
|
||||||
|
|
||||||
|
export const generateRecoveryCode = () => String(crypto.randomInt(10 ** 7, 10 ** 8 - 1));
|
270
backend/src/services/totp/totp-service.ts
Normal file
270
backend/src/services/totp/totp-service.ts
Normal file
@ -0,0 +1,270 @@
|
|||||||
|
import { authenticator } from "otplib";
|
||||||
|
|
||||||
|
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
|
|
||||||
|
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||||
|
import { TUserDALFactory } from "../user/user-dal";
|
||||||
|
import { TTotpConfigDALFactory } from "./totp-config-dal";
|
||||||
|
import { generateRecoveryCode } from "./totp-fns";
|
||||||
|
import {
|
||||||
|
TCreateUserTotpRecoveryCodesDTO,
|
||||||
|
TDeleteUserTotpConfigDTO,
|
||||||
|
TGetUserTotpConfigDTO,
|
||||||
|
TRegisterUserTotpDTO,
|
||||||
|
TVerifyUserTotpConfigDTO,
|
||||||
|
TVerifyUserTotpDTO,
|
||||||
|
TVerifyWithUserRecoveryCodeDTO
|
||||||
|
} from "./totp-types";
|
||||||
|
|
||||||
|
type TTotpServiceFactoryDep = {
|
||||||
|
userDAL: TUserDALFactory;
|
||||||
|
totpConfigDAL: TTotpConfigDALFactory;
|
||||||
|
kmsService: TKmsServiceFactory;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TTotpServiceFactory = ReturnType<typeof totpServiceFactory>;
|
||||||
|
|
||||||
|
const MAX_RECOVERY_CODE_LIMIT = 10;
|
||||||
|
|
||||||
|
export const totpServiceFactory = ({ totpConfigDAL, kmsService, userDAL }: TTotpServiceFactoryDep) => {
|
||||||
|
const getUserTotpConfig = async ({ userId }: TGetUserTotpConfigDTO) => {
|
||||||
|
const totpConfig = await totpConfigDAL.findOne({
|
||||||
|
userId
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!totpConfig) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: "TOTP configuration not found"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!totpConfig.isVerified) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "TOTP configuration has not been verified"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||||
|
const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(",");
|
||||||
|
|
||||||
|
return {
|
||||||
|
isVerified: totpConfig.isVerified,
|
||||||
|
recoveryCodes
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const registerUserTotp = async ({ userId }: TRegisterUserTotpDTO) => {
|
||||||
|
const totpConfig = await totpConfigDAL.transaction(async (tx) => {
|
||||||
|
const verifiedTotpConfig = await totpConfigDAL.findOne(
|
||||||
|
{
|
||||||
|
userId,
|
||||||
|
isVerified: true
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
|
|
||||||
|
if (verifiedTotpConfig) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "TOTP configuration for user already exists"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const unverifiedTotpConfig = await totpConfigDAL.findOne({
|
||||||
|
userId,
|
||||||
|
isVerified: false
|
||||||
|
});
|
||||||
|
|
||||||
|
if (unverifiedTotpConfig) {
|
||||||
|
return unverifiedTotpConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
const encryptWithRoot = kmsService.encryptWithRootKey();
|
||||||
|
|
||||||
|
// create new TOTP configuration
|
||||||
|
const secret = authenticator.generateSecret();
|
||||||
|
const encryptedSecret = encryptWithRoot(Buffer.from(secret));
|
||||||
|
const recoveryCodes = Array.from({ length: MAX_RECOVERY_CODE_LIMIT }).map(generateRecoveryCode);
|
||||||
|
const encryptedRecoveryCodes = encryptWithRoot(Buffer.from(recoveryCodes.join(",")));
|
||||||
|
const newTotpConfig = await totpConfigDAL.create({
|
||||||
|
userId,
|
||||||
|
encryptedRecoveryCodes,
|
||||||
|
encryptedSecret
|
||||||
|
});
|
||||||
|
|
||||||
|
return newTotpConfig;
|
||||||
|
});
|
||||||
|
|
||||||
|
const user = await userDAL.findById(userId);
|
||||||
|
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||||
|
|
||||||
|
const secret = decryptWithRoot(totpConfig.encryptedSecret).toString();
|
||||||
|
const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(",");
|
||||||
|
const otpUrl = authenticator.keyuri(user.username, "Infisical", secret);
|
||||||
|
|
||||||
|
return {
|
||||||
|
otpUrl,
|
||||||
|
recoveryCodes
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const verifyUserTotpConfig = async ({ userId, totp }: TVerifyUserTotpConfigDTO) => {
|
||||||
|
const totpConfig = await totpConfigDAL.findOne({
|
||||||
|
userId
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!totpConfig) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: "TOTP configuration not found"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (totpConfig.isVerified) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "TOTP configuration has already been verified"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||||
|
const secret = decryptWithRoot(totpConfig.encryptedSecret).toString();
|
||||||
|
const isValid = authenticator.verify({
|
||||||
|
token: totp,
|
||||||
|
secret
|
||||||
|
});
|
||||||
|
|
||||||
|
if (isValid) {
|
||||||
|
await totpConfigDAL.updateById(totpConfig.id, {
|
||||||
|
isVerified: true
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Invalid TOTP token"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const verifyUserTotp = async ({ userId, totp }: TVerifyUserTotpDTO) => {
|
||||||
|
const totpConfig = await totpConfigDAL.findOne({
|
||||||
|
userId
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!totpConfig) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: "TOTP configuration not found"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!totpConfig.isVerified) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "TOTP configuration has not been verified"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||||
|
const secret = decryptWithRoot(totpConfig.encryptedSecret).toString();
|
||||||
|
const isValid = authenticator.verify({
|
||||||
|
token: totp,
|
||||||
|
secret
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!isValid) {
|
||||||
|
throw new ForbiddenRequestError({
|
||||||
|
message: "Invalid TOTP"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const verifyWithUserRecoveryCode = async ({ userId, recoveryCode }: TVerifyWithUserRecoveryCodeDTO) => {
|
||||||
|
const totpConfig = await totpConfigDAL.findOne({
|
||||||
|
userId
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!totpConfig) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: "TOTP configuration not found"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!totpConfig.isVerified) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "TOTP configuration has not been verified"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||||
|
const encryptWithRoot = kmsService.encryptWithRootKey();
|
||||||
|
|
||||||
|
const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(",");
|
||||||
|
const matchingCode = recoveryCodes.find((code) => recoveryCode === code);
|
||||||
|
if (!matchingCode) {
|
||||||
|
throw new ForbiddenRequestError({
|
||||||
|
message: "Invalid TOTP recovery code"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const updatedRecoveryCodes = recoveryCodes.filter((code) => code !== matchingCode);
|
||||||
|
const encryptedRecoveryCodes = encryptWithRoot(Buffer.from(updatedRecoveryCodes.join(",")));
|
||||||
|
await totpConfigDAL.updateById(totpConfig.id, {
|
||||||
|
encryptedRecoveryCodes
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const deleteUserTotpConfig = async ({ userId }: TDeleteUserTotpConfigDTO) => {
|
||||||
|
const totpConfig = await totpConfigDAL.findOne({
|
||||||
|
userId
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!totpConfig) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: "TOTP configuration not found"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await totpConfigDAL.deleteById(totpConfig.id);
|
||||||
|
};
|
||||||
|
|
||||||
|
const createUserTotpRecoveryCodes = async ({ userId }: TCreateUserTotpRecoveryCodesDTO) => {
|
||||||
|
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||||
|
const encryptWithRoot = kmsService.encryptWithRootKey();
|
||||||
|
|
||||||
|
return totpConfigDAL.transaction(async (tx) => {
|
||||||
|
const totpConfig = await totpConfigDAL.findOne(
|
||||||
|
{
|
||||||
|
userId,
|
||||||
|
isVerified: true
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!totpConfig) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: "Valid TOTP configuration not found"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(",");
|
||||||
|
if (recoveryCodes.length >= MAX_RECOVERY_CODE_LIMIT) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Cannot have more than ${MAX_RECOVERY_CODE_LIMIT} recovery codes at a time`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const toGenerateCount = MAX_RECOVERY_CODE_LIMIT - recoveryCodes.length;
|
||||||
|
const newRecoveryCodes = Array.from({ length: toGenerateCount }).map(generateRecoveryCode);
|
||||||
|
const encryptedRecoveryCodes = encryptWithRoot(Buffer.from([...recoveryCodes, ...newRecoveryCodes].join(",")));
|
||||||
|
|
||||||
|
await totpConfigDAL.updateById(totpConfig.id, {
|
||||||
|
encryptedRecoveryCodes
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
registerUserTotp,
|
||||||
|
verifyUserTotpConfig,
|
||||||
|
getUserTotpConfig,
|
||||||
|
verifyUserTotp,
|
||||||
|
verifyWithUserRecoveryCode,
|
||||||
|
deleteUserTotpConfig,
|
||||||
|
createUserTotpRecoveryCodes
|
||||||
|
};
|
||||||
|
};
|
30
backend/src/services/totp/totp-types.ts
Normal file
30
backend/src/services/totp/totp-types.ts
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
export type TRegisterUserTotpDTO = {
|
||||||
|
userId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TVerifyUserTotpConfigDTO = {
|
||||||
|
userId: string;
|
||||||
|
totp: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TGetUserTotpConfigDTO = {
|
||||||
|
userId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TVerifyUserTotpDTO = {
|
||||||
|
userId: string;
|
||||||
|
totp: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TVerifyWithUserRecoveryCodeDTO = {
|
||||||
|
userId: string;
|
||||||
|
recoveryCode: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TDeleteUserTotpConfigDTO = {
|
||||||
|
userId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TCreateUserTotpRecoveryCodesDTO = {
|
||||||
|
userId: string;
|
||||||
|
};
|
@ -15,7 +15,7 @@ import { AuthMethod } from "../auth/auth-type";
|
|||||||
import { TGroupProjectDALFactory } from "../group-project/group-project-dal";
|
import { TGroupProjectDALFactory } from "../group-project/group-project-dal";
|
||||||
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
|
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
|
||||||
import { TUserDALFactory } from "./user-dal";
|
import { TUserDALFactory } from "./user-dal";
|
||||||
import { TListUserGroupsDTO } from "./user-types";
|
import { TListUserGroupsDTO, TUpdateUserMfaDTO } from "./user-types";
|
||||||
|
|
||||||
type TUserServiceFactoryDep = {
|
type TUserServiceFactoryDep = {
|
||||||
userDAL: Pick<
|
userDAL: Pick<
|
||||||
@ -171,15 +171,24 @@ export const userServiceFactory = ({
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const toggleUserMfa = async (userId: string, isMfaEnabled: boolean) => {
|
const updateUserMfa = async ({ userId, isMfaEnabled, selectedMfaMethod }: TUpdateUserMfaDTO) => {
|
||||||
const user = await userDAL.findById(userId);
|
const user = await userDAL.findById(userId);
|
||||||
|
|
||||||
if (!user || !user.email) throw new BadRequestError({ name: "Failed to toggle MFA" });
|
if (!user || !user.email) throw new BadRequestError({ name: "Failed to toggle MFA" });
|
||||||
|
|
||||||
|
let mfaMethods;
|
||||||
|
if (isMfaEnabled === undefined) {
|
||||||
|
mfaMethods = undefined;
|
||||||
|
} else {
|
||||||
|
mfaMethods = isMfaEnabled ? ["email"] : [];
|
||||||
|
}
|
||||||
|
|
||||||
const updatedUser = await userDAL.updateById(userId, {
|
const updatedUser = await userDAL.updateById(userId, {
|
||||||
isMfaEnabled,
|
isMfaEnabled,
|
||||||
mfaMethods: isMfaEnabled ? ["email"] : []
|
mfaMethods,
|
||||||
|
selectedMfaMethod
|
||||||
});
|
});
|
||||||
|
|
||||||
return updatedUser;
|
return updatedUser;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -327,7 +336,7 @@ export const userServiceFactory = ({
|
|||||||
return {
|
return {
|
||||||
sendEmailVerificationCode,
|
sendEmailVerificationCode,
|
||||||
verifyEmailVerificationCode,
|
verifyEmailVerificationCode,
|
||||||
toggleUserMfa,
|
updateUserMfa,
|
||||||
updateUserName,
|
updateUserName,
|
||||||
updateAuthMethods,
|
updateAuthMethods,
|
||||||
deleteUser,
|
deleteUser,
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
import { TOrgPermission } from "@app/lib/types";
|
import { TOrgPermission } from "@app/lib/types";
|
||||||
|
|
||||||
|
import { MfaMethod } from "../auth/auth-type";
|
||||||
|
|
||||||
export type TListUserGroupsDTO = {
|
export type TListUserGroupsDTO = {
|
||||||
username: string;
|
username: string;
|
||||||
} & Omit<TOrgPermission, "orgId">;
|
} & Omit<TOrgPermission, "orgId">;
|
||||||
@ -8,3 +10,9 @@ export enum UserEncryption {
|
|||||||
V1 = 1,
|
V1 = 1,
|
||||||
V2 = 2
|
V2 = 2
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type TUpdateUserMfaDTO = {
|
||||||
|
userId: string;
|
||||||
|
isMfaEnabled?: boolean;
|
||||||
|
selectedMfaMethod?: MfaMethod;
|
||||||
|
};
|
||||||
|
@ -138,6 +138,7 @@ type GetOrganizationsResponse struct {
|
|||||||
type SelectOrganizationResponse struct {
|
type SelectOrganizationResponse struct {
|
||||||
Token string `json:"token"`
|
Token string `json:"token"`
|
||||||
MfaEnabled bool `json:"isMfaEnabled"`
|
MfaEnabled bool `json:"isMfaEnabled"`
|
||||||
|
MfaMethod string `json:"mfaMethod"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type SelectOrganizationRequest struct {
|
type SelectOrganizationRequest struct {
|
||||||
@ -260,8 +261,9 @@ type GetLoginTwoV2Response struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type VerifyMfaTokenRequest struct {
|
type VerifyMfaTokenRequest struct {
|
||||||
Email string `json:"email"`
|
Email string `json:"email"`
|
||||||
MFAToken string `json:"mfaToken"`
|
MFAToken string `json:"mfaToken"`
|
||||||
|
MFAMethod string `json:"mfaMethod"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type VerifyMfaTokenResponse struct {
|
type VerifyMfaTokenResponse struct {
|
||||||
|
@ -79,13 +79,14 @@ var initCmd = &cobra.Command{
|
|||||||
if tokenResponse.MfaEnabled {
|
if tokenResponse.MfaEnabled {
|
||||||
i := 1
|
i := 1
|
||||||
for i < 6 {
|
for i < 6 {
|
||||||
mfaVerifyCode := askForMFACode()
|
mfaVerifyCode := askForMFACode(tokenResponse.MfaMethod)
|
||||||
|
|
||||||
httpClient := resty.New()
|
httpClient := resty.New()
|
||||||
httpClient.SetAuthToken(tokenResponse.Token)
|
httpClient.SetAuthToken(tokenResponse.Token)
|
||||||
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
||||||
Email: userCreds.UserCredentials.Email,
|
Email: userCreds.UserCredentials.Email,
|
||||||
MFAToken: mfaVerifyCode,
|
MFAToken: mfaVerifyCode,
|
||||||
|
MFAMethod: tokenResponse.MfaMethod,
|
||||||
})
|
})
|
||||||
if requestError != nil {
|
if requestError != nil {
|
||||||
util.HandleError(err)
|
util.HandleError(err)
|
||||||
@ -99,7 +100,7 @@ var initCmd = &cobra.Command{
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if mfaErrorResponse.Context.Code == "mfa_expired" {
|
if mfaErrorResponse.Context.Code == "mfa_expired" {
|
||||||
util.PrintErrorMessageAndExit("Your 2FA verification code has expired, please try logging in again")
|
util.PrintErrorMessageAndExit("Your 2FA verification code has expired, please try logging in again")
|
||||||
break
|
break
|
||||||
|
@ -343,7 +343,7 @@ func cliDefaultLogin(userCredentialsToBeStored *models.UserCredentials) {
|
|||||||
if loginTwoResponse.MfaEnabled {
|
if loginTwoResponse.MfaEnabled {
|
||||||
i := 1
|
i := 1
|
||||||
for i < 6 {
|
for i < 6 {
|
||||||
mfaVerifyCode := askForMFACode()
|
mfaVerifyCode := askForMFACode("email")
|
||||||
|
|
||||||
httpClient := resty.New()
|
httpClient := resty.New()
|
||||||
httpClient.SetAuthToken(loginTwoResponse.Token)
|
httpClient.SetAuthToken(loginTwoResponse.Token)
|
||||||
@ -532,7 +532,7 @@ func askForDomain() error {
|
|||||||
const (
|
const (
|
||||||
INFISICAL_CLOUD_US = "Infisical Cloud (US Region)"
|
INFISICAL_CLOUD_US = "Infisical Cloud (US Region)"
|
||||||
INFISICAL_CLOUD_EU = "Infisical Cloud (EU Region)"
|
INFISICAL_CLOUD_EU = "Infisical Cloud (EU Region)"
|
||||||
SELF_HOSTING = "Self-Hosting"
|
SELF_HOSTING = "Self-Hosting or Dedicated Instance"
|
||||||
ADD_NEW_DOMAIN = "Add a new domain"
|
ADD_NEW_DOMAIN = "Add a new domain"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -756,13 +756,14 @@ func GetJwtTokenWithOrganizationId(oldJwtToken string, email string) string {
|
|||||||
if selectedOrgRes.MfaEnabled {
|
if selectedOrgRes.MfaEnabled {
|
||||||
i := 1
|
i := 1
|
||||||
for i < 6 {
|
for i < 6 {
|
||||||
mfaVerifyCode := askForMFACode()
|
mfaVerifyCode := askForMFACode(selectedOrgRes.MfaMethod)
|
||||||
|
|
||||||
httpClient := resty.New()
|
httpClient := resty.New()
|
||||||
httpClient.SetAuthToken(selectedOrgRes.Token)
|
httpClient.SetAuthToken(selectedOrgRes.Token)
|
||||||
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
||||||
Email: email,
|
Email: email,
|
||||||
MFAToken: mfaVerifyCode,
|
MFAToken: mfaVerifyCode,
|
||||||
|
MFAMethod: selectedOrgRes.MfaMethod,
|
||||||
})
|
})
|
||||||
if requestError != nil {
|
if requestError != nil {
|
||||||
util.HandleError(err)
|
util.HandleError(err)
|
||||||
@ -817,9 +818,15 @@ func generateFromPassword(password string, salt []byte, p *params) (hash []byte,
|
|||||||
return hash, nil
|
return hash, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func askForMFACode() string {
|
func askForMFACode(mfaMethod string) string {
|
||||||
|
var label string
|
||||||
|
if mfaMethod == "totp" {
|
||||||
|
label = "Enter the verification code from your mobile authenticator app or use a recovery code"
|
||||||
|
} else {
|
||||||
|
label = "Enter the 2FA verification code sent to your email"
|
||||||
|
}
|
||||||
mfaCodePromptUI := promptui.Prompt{
|
mfaCodePromptUI := promptui.Prompt{
|
||||||
Label: "Enter the 2FA verification code sent to your email",
|
Label: label,
|
||||||
}
|
}
|
||||||
|
|
||||||
mfaVerifyCode, err := mfaCodePromptUI.Run()
|
mfaVerifyCode, err := mfaCodePromptUI.Run()
|
||||||
|
28
company/handbook/compensation.mdx
Normal file
28
company/handbook/compensation.mdx
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
---
|
||||||
|
title: "Compensation"
|
||||||
|
sidebarTitle: "Compensation"
|
||||||
|
description: "This guide explains how various compensation processes work at Infisical."
|
||||||
|
---
|
||||||
|
|
||||||
|
## Probation period
|
||||||
|
|
||||||
|
We are fully committed to ensuring that you are set up for success, but also understand that it may take some time to determine whether or not there is a long term fit between you and Infisical.
|
||||||
|
|
||||||
|
The first 3 months of your employment with Infisical is a probation period. During this time, you can choose to end your contract with 1 week's notice. If we chose to end your contract, Infisical will pay you 4 weeks' pay, but usually ask you to finish on the same day.
|
||||||
|
|
||||||
|
People in sales roles, such as Account Executives, have a 6 month probation period - this is to account for the fact that it can be difficult to establish whether or not someone is able to close contracts within their first 3 months, given sales cycles.
|
||||||
|
|
||||||
|
Your manager is responsible for monitoring and specifically reviewing your performance throughout this initial period. If under-performance is a concern, or if there is any hesitation regarding the future at Infisical, this should be discussed immediately with you and your manager.
|
||||||
|
|
||||||
|
|
||||||
|
## Severance
|
||||||
|
|
||||||
|
At Infisical, average performance gets a generous severance.
|
||||||
|
|
||||||
|
If Infisical decides to end your contract after the first 3 months of employment have been completed, we will give you 10 weeks' pay. It is likely we will ask you to stop working immediately.
|
||||||
|
|
||||||
|
If the decision to leave is yours, then we just require 1 month of notice.
|
||||||
|
|
||||||
|
We have structured notice in this way as we believe it is in neither Infisical's nor your interest to lock you into a role that is no longer right for you due to financial considerations. This extended notice period only applies in the case of under-performance or a change in business needs - if your contract is terminated due to gross misconduct then you may be dismissed without notice. If this policy conflicts with the requirements of your local jurisdiction, then those local laws will take priority.
|
||||||
|
|
||||||
|
|
@ -58,6 +58,7 @@
|
|||||||
"pages": [
|
"pages": [
|
||||||
"handbook/onboarding",
|
"handbook/onboarding",
|
||||||
"handbook/spending-money",
|
"handbook/spending-money",
|
||||||
|
"handbook/compensation",
|
||||||
"handbook/time-off",
|
"handbook/time-off",
|
||||||
"handbook/hiring",
|
"handbook/hiring",
|
||||||
"handbook/meetings",
|
"handbook/meetings",
|
||||||
|
@ -86,6 +86,7 @@ services:
|
|||||||
- .env
|
- .env
|
||||||
ports:
|
ports:
|
||||||
- 4000:4000
|
- 4000:4000
|
||||||
|
- 9464:9464 # for OTEL collection of Prometheus metrics
|
||||||
environment:
|
environment:
|
||||||
- NODE_ENV=development
|
- NODE_ENV=development
|
||||||
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
|
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
|
||||||
@ -95,6 +96,42 @@ services:
|
|||||||
extra_hosts:
|
extra_hosts:
|
||||||
- "host.docker.internal:host-gateway"
|
- "host.docker.internal:host-gateway"
|
||||||
|
|
||||||
|
prometheus:
|
||||||
|
image: prom/prometheus
|
||||||
|
volumes:
|
||||||
|
- ./prometheus.dev.yml:/etc/prometheus/prometheus.yml
|
||||||
|
ports:
|
||||||
|
- "9090:9090"
|
||||||
|
command:
|
||||||
|
- "--config.file=/etc/prometheus/prometheus.yml"
|
||||||
|
profiles: [metrics]
|
||||||
|
|
||||||
|
otel-collector:
|
||||||
|
image: otel/opentelemetry-collector-contrib
|
||||||
|
volumes:
|
||||||
|
- ./otel-collector-config.yaml:/etc/otelcol-contrib/config.yaml
|
||||||
|
ports:
|
||||||
|
- 1888:1888 # pprof extension
|
||||||
|
- 8888:8888 # Prometheus metrics exposed by the Collector
|
||||||
|
- 8889:8889 # Prometheus exporter metrics
|
||||||
|
- 13133:13133 # health_check extension
|
||||||
|
- 4317:4317 # OTLP gRPC receiver
|
||||||
|
- 4318:4318 # OTLP http receiver
|
||||||
|
- 55679:55679 # zpages extension
|
||||||
|
profiles: [metrics-otel]
|
||||||
|
|
||||||
|
grafana:
|
||||||
|
image: grafana/grafana
|
||||||
|
container_name: grafana
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
- GF_LOG_LEVEL=debug
|
||||||
|
ports:
|
||||||
|
- "3005:3000"
|
||||||
|
volumes:
|
||||||
|
- "grafana_storage:/var/lib/grafana"
|
||||||
|
profiles: [metrics]
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
container_name: infisical-dev-frontend
|
container_name: infisical-dev-frontend
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -166,3 +203,4 @@ volumes:
|
|||||||
driver: local
|
driver: local
|
||||||
ldap_data:
|
ldap_data:
|
||||||
ldap_config:
|
ldap_config:
|
||||||
|
grafana_storage:
|
||||||
|
@ -69,4 +69,4 @@ volumes:
|
|||||||
driver: local
|
driver: local
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
infisical:
|
infisical:
|
@ -3,6 +3,3 @@ title: "Bulk Create"
|
|||||||
openapi: "POST /api/v3/secrets/batch/raw"
|
openapi: "POST /api/v3/secrets/batch/raw"
|
||||||
---
|
---
|
||||||
|
|
||||||
<Tip>
|
|
||||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
|
||||||
</Tip>
|
|
||||||
|
@ -3,6 +3,3 @@ title: "Create"
|
|||||||
openapi: "POST /api/v3/secrets/raw/{secretName}"
|
openapi: "POST /api/v3/secrets/raw/{secretName}"
|
||||||
---
|
---
|
||||||
|
|
||||||
<Tip>
|
|
||||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
|
||||||
</Tip>
|
|
||||||
|
@ -3,6 +3,3 @@ title: "Bulk Delete"
|
|||||||
openapi: "DELETE /api/v3/secrets/batch/raw"
|
openapi: "DELETE /api/v3/secrets/batch/raw"
|
||||||
---
|
---
|
||||||
|
|
||||||
<Tip>
|
|
||||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
|
||||||
</Tip>
|
|
||||||
|
@ -3,6 +3,3 @@ title: "Delete"
|
|||||||
openapi: "DELETE /api/v3/secrets/raw/{secretName}"
|
openapi: "DELETE /api/v3/secrets/raw/{secretName}"
|
||||||
---
|
---
|
||||||
|
|
||||||
<Tip>
|
|
||||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
|
||||||
</Tip>
|
|
@ -2,7 +2,3 @@
|
|||||||
title: "List"
|
title: "List"
|
||||||
openapi: "GET /api/v3/secrets/raw"
|
openapi: "GET /api/v3/secrets/raw"
|
||||||
---
|
---
|
||||||
|
|
||||||
<Tip>
|
|
||||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
|
||||||
</Tip>
|
|
@ -3,6 +3,3 @@ title: "Retrieve"
|
|||||||
openapi: "GET /api/v3/secrets/raw/{secretName}"
|
openapi: "GET /api/v3/secrets/raw/{secretName}"
|
||||||
---
|
---
|
||||||
|
|
||||||
<Tip>
|
|
||||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
|
||||||
</Tip>
|
|
@ -3,6 +3,3 @@ title: "Bulk Update"
|
|||||||
openapi: "PATCH /api/v3/secrets/batch/raw"
|
openapi: "PATCH /api/v3/secrets/batch/raw"
|
||||||
---
|
---
|
||||||
|
|
||||||
<Tip>
|
|
||||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
|
||||||
</Tip>
|
|
||||||
|
@ -2,7 +2,3 @@
|
|||||||
title: "Update"
|
title: "Update"
|
||||||
openapi: "PATCH /api/v3/secrets/raw/{secretName}"
|
openapi: "PATCH /api/v3/secrets/raw/{secretName}"
|
||||||
---
|
---
|
||||||
|
|
||||||
<Tip>
|
|
||||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
|
||||||
</Tip>
|
|
@ -3,13 +3,13 @@ title: 'Install'
|
|||||||
description: "Infisical's CLI is one of the best way to manage environments and secrets. Install it here"
|
description: "Infisical's CLI is one of the best way to manage environments and secrets. Install it here"
|
||||||
---
|
---
|
||||||
|
|
||||||
The Infisical CLI is powerful command line tool that can be used to retrieve, modify, export and inject secrets into any process or application as environment variables.
|
The Infisical CLI is a powerful command line tool that can be used to retrieve, modify, export and inject secrets into any process or application as environment variables.
|
||||||
You can use it across various environments, whether it's local development, CI/CD, staging, or production.
|
You can use it across various environments, whether it's local development, CI/CD, staging, or production.
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
<Tabs>
|
<Tabs>
|
||||||
<Tab title="MacOS">
|
<Tab title="MacOS">
|
||||||
Use [brew](https://brew.sh/) package manager
|
Use [brew](https://brew.sh/) package manager
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@ -21,9 +21,8 @@ You can use it across various environments, whether it's local development, CI/C
|
|||||||
```bash
|
```bash
|
||||||
brew update && brew upgrade infisical
|
brew update && brew upgrade infisical
|
||||||
```
|
```
|
||||||
|
</Tab>
|
||||||
</Tab>
|
<Tab title="Windows">
|
||||||
<Tab title="Windows">
|
|
||||||
Use [Scoop](https://scoop.sh/) package manager
|
Use [Scoop](https://scoop.sh/) package manager
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@ -40,7 +39,20 @@ You can use it across various environments, whether it's local development, CI/C
|
|||||||
scoop update infisical
|
scoop update infisical
|
||||||
```
|
```
|
||||||
|
|
||||||
</Tab>
|
</Tab>
|
||||||
|
<Tab title="NPM">
|
||||||
|
Use [NPM](https://www.npmjs.com/) package manager
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install -g @infisical/cli
|
||||||
|
```
|
||||||
|
|
||||||
|
### Updates
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm update -g @infisical/cli
|
||||||
|
```
|
||||||
|
</Tab>
|
||||||
<Tab title="Alpine">
|
<Tab title="Alpine">
|
||||||
Install prerequisite
|
Install prerequisite
|
||||||
```bash
|
```bash
|
||||||
|
@ -69,7 +69,7 @@ The Infisical AWS ElastiCache dynamic secret allows you to generate AWS ElastiCa
|
|||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Default TTL" type="string" required>
|
<ParamField path="Default TTL" type="string" required>
|
||||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Max TTL" type="string" required>
|
<ParamField path="Max TTL" type="string" required>
|
||||||
@ -131,12 +131,12 @@ The Infisical AWS ElastiCache dynamic secret allows you to generate AWS ElastiCa
|
|||||||
|
|
||||||
## Audit or Revoke Leases
|
## Audit or Revoke Leases
|
||||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
This will allow you to see the expiration time of the lease or delete a lease before it's set time to live.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Renew Leases
|
## Renew Leases
|
||||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||||

|

|
||||||
|
|
||||||
<Warning>
|
<Warning>
|
||||||
|
@ -66,7 +66,7 @@ Replace **\<account id\>** with your AWS account id and **\<aws-scope-path\>** w
|
|||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Default TTL" type="string" required>
|
<ParamField path="Default TTL" type="string" required>
|
||||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Max TTL" type="string" required>
|
<ParamField path="Max TTL" type="string" required>
|
||||||
@ -138,12 +138,12 @@ Replace **\<account id\>** with your AWS account id and **\<aws-scope-path\>** w
|
|||||||
|
|
||||||
## Audit or Revoke Leases
|
## Audit or Revoke Leases
|
||||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||||
This will allow you see the lease details and delete the lease ahead of its expiration time.
|
This will allow you to see the lease details and delete the lease ahead of its expiration time.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Renew Leases
|
## Renew Leases
|
||||||
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** as illustrated below.
|
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||||

|

|
||||||
|
|
||||||
<Warning>
|
<Warning>
|
||||||
|
@ -98,7 +98,7 @@ Click on Add assignments. Search for the application name you created and select
|
|||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Default TTL" type="string" required>
|
<ParamField path="Default TTL" type="string" required>
|
||||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Max TTL" type="string" required>
|
<ParamField path="Max TTL" type="string" required>
|
||||||
@ -151,12 +151,12 @@ Click on Add assignments. Search for the application name you created and select
|
|||||||
|
|
||||||
## Audit or Revoke Leases
|
## Audit or Revoke Leases
|
||||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
This will allow you to see the expiration time of the lease or delete a lease before it's set time to live.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Renew Leases
|
## Renew Leases
|
||||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||||

|

|
||||||
|
|
||||||
<Warning>
|
<Warning>
|
||||||
|
@ -39,7 +39,7 @@ The above configuration allows user creation and granting permissions.
|
|||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Default TTL" type="string" required>
|
<ParamField path="Default TTL" type="string" required>
|
||||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Max TTL" type="string" required>
|
<ParamField path="Max TTL" type="string" required>
|
||||||
@ -116,12 +116,12 @@ The above configuration allows user creation and granting permissions.
|
|||||||
|
|
||||||
## Audit or Revoke Leases
|
## Audit or Revoke Leases
|
||||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||||
This will allow you see the lease details and delete the lease ahead of its expiration time.
|
This will allow you to see the lease details and delete the lease ahead of its expiration time.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Renew Leases
|
## Renew Leases
|
||||||
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** as illustrated below.
|
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||||

|

|
||||||
|
|
||||||
<Warning>
|
<Warning>
|
||||||
|
@ -34,7 +34,7 @@ The Infisical Elasticsearch dynamic secret allows you to generate Elasticsearch
|
|||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Default TTL" type="string" required>
|
<ParamField path="Default TTL" type="string" required>
|
||||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Max TTL" type="string" required>
|
<ParamField path="Max TTL" type="string" required>
|
||||||
@ -114,12 +114,12 @@ The Infisical Elasticsearch dynamic secret allows you to generate Elasticsearch
|
|||||||
|
|
||||||
## Audit or Revoke Leases
|
## Audit or Revoke Leases
|
||||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
This will allow you to see the expiration time of the lease or delete a lease before it's set time to live.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Renew Leases
|
## Renew Leases
|
||||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** button as illustrated below.
|
||||||

|

|
||||||
|
|
||||||
<Warning>
|
<Warning>
|
||||||
|
@ -31,7 +31,7 @@ The Infisical LDAP dynamic secret allows you to generate user credentials on dem
|
|||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Default TTL" type="string" required>
|
<ParamField path="Default TTL" type="string" required>
|
||||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Max TTL" type="string" required>
|
<ParamField path="Max TTL" type="string" required>
|
||||||
@ -171,7 +171,7 @@ The Infisical LDAP dynamic secret allows you to generate user credentials on dem
|
|||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Default TTL" type="string" required>
|
<ParamField path="Default TTL" type="string" required>
|
||||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
|
||||||
</ParamField>
|
</ParamField>
|
||||||
|
|
||||||
<ParamField path="Max TTL" type="string" required>
|
<ParamField path="Max TTL" type="string" required>
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user