mirror of
https://github.com/Infisical/infisical.git
synced 2025-08-31 15:32:32 +00:00
Compare commits
103 Commits
edit-acces
...
disable-lo
Author | SHA1 | Date | |
---|---|---|---|
|
a1b53855bb | ||
|
b447ccd3f0 | ||
|
82faf3a797 | ||
|
1e7f0f8a39 | ||
|
e3838643e5 | ||
|
5bd961735d | ||
|
1147cfcea4 | ||
|
abb577e4e9 | ||
|
29dd49d696 | ||
|
0f76003f77 | ||
|
1c4dfbe028 | ||
|
65be2e7f7b | ||
|
cf64c89ea3 | ||
|
d934f03597 | ||
|
e051cfd146 | ||
|
be30327dc9 | ||
|
f9784f15ed | ||
|
8e42fdaf5b | ||
|
7f958e6d89 | ||
|
e7138f1be9 | ||
|
01fba20872 | ||
|
696a70577a | ||
|
8ba61e8293 | ||
|
f5434b5cba | ||
|
1159b74bdb | ||
|
bc4885b098 | ||
|
97be78a107 | ||
|
4b42f7b1b5 | ||
|
3de7fec650 | ||
|
7bc6697801 | ||
|
34c6d254a0 | ||
|
a0da2f2d4c | ||
|
c7987772e3 | ||
|
5eee99e9ac | ||
|
4485d7f757 | ||
|
d3c3f3a17e | ||
|
999588b06e | ||
|
37153cd8cf | ||
|
4547ed7aeb | ||
|
aae6a3f9af | ||
|
43dd45de29 | ||
|
13b20806ba | ||
|
49b5ab8126 | ||
|
c99d5c210c | ||
|
fc6778dd89 | ||
|
2f68ff1629 | ||
|
cde7673a23 | ||
|
1165b05e8a | ||
|
8884c0e6bd | ||
|
0762de93d6 | ||
|
af2f21fe93 | ||
|
dcd588007c | ||
|
8d6461b01d | ||
|
f52dbaa2f2 | ||
|
0c92764409 | ||
|
976317e71b | ||
|
7b52d60036 | ||
|
83479a091e | ||
|
4e2592960d | ||
|
8d5b6a17b1 | ||
|
8945bc0dc1 | ||
|
bceaac844f | ||
|
2f375d6b65 | ||
|
8f00bab61c | ||
|
ec12acfcdf | ||
|
34a8301617 | ||
|
1b22438c46 | ||
|
8ffff7e779 | ||
|
a349dda4bc | ||
|
f63ee39f3d | ||
|
f550a2ae3f | ||
|
725e55f7e5 | ||
|
f59efc1948 | ||
|
f52e90a5c1 | ||
|
2fda307b67 | ||
|
ff7b530252 | ||
|
10cfbe0c74 | ||
|
8123be4c14 | ||
|
9a98192b9b | ||
|
991ee20ec7 | ||
|
6c7062fa16 | ||
|
57c667f0b1 | ||
|
15d3638612 | ||
|
ebd3b5c9d1 | ||
|
52bbe25fc5 | ||
|
5136dbc543 | ||
|
bceddab89f | ||
|
6d5bed756a | ||
|
bb14231d71 | ||
|
d985b84577 | ||
|
8a72023e80 | ||
|
41a3ac6bd4 | ||
|
2fb5cc1712 | ||
|
b352428032 | ||
|
914bb3d389 | ||
|
be70bfa33f | ||
|
7758e5dbfa | ||
|
22fca374f2 | ||
|
94039ca509 | ||
|
c8f124e4c5 | ||
|
2501c57030 | ||
|
60b3f5c7c6 | ||
|
c2cea8cffc |
290
backend/package-lock.json
generated
290
backend/package-lock.json
generated
@@ -63,6 +63,7 @@
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
"axios": "^1.11.0",
|
||||
"axios-ntlm": "^1.4.4",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"botbuilder": "^4.23.2",
|
||||
@@ -12956,216 +12957,6 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@swc/core": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.107.tgz",
|
||||
"integrity": "sha512-zKhqDyFcTsyLIYK1iEmavljZnf4CCor5pF52UzLAz4B6Nu/4GLU+2LQVAf+oRHjusG39PTPjd2AlRT3f3QWfsQ==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@swc/counter": "^0.1.1",
|
||||
"@swc/types": "^0.1.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/swc"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@swc/core-darwin-arm64": "1.3.107",
|
||||
"@swc/core-darwin-x64": "1.3.107",
|
||||
"@swc/core-linux-arm-gnueabihf": "1.3.107",
|
||||
"@swc/core-linux-arm64-gnu": "1.3.107",
|
||||
"@swc/core-linux-arm64-musl": "1.3.107",
|
||||
"@swc/core-linux-x64-gnu": "1.3.107",
|
||||
"@swc/core-linux-x64-musl": "1.3.107",
|
||||
"@swc/core-win32-arm64-msvc": "1.3.107",
|
||||
"@swc/core-win32-ia32-msvc": "1.3.107",
|
||||
"@swc/core-win32-x64-msvc": "1.3.107"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@swc/helpers": "^0.5.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@swc/helpers": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-darwin-arm64": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.107.tgz",
|
||||
"integrity": "sha512-47tD/5vSXWxPd0j/ZllyQUg4bqalbQTsmqSw0J4dDdS82MWqCAwUErUrAZPRjBkjNQ6Kmrf5rpCWaGTtPw+ngw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-darwin-x64": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.107.tgz",
|
||||
"integrity": "sha512-hwiLJ2ulNkBGAh1m1eTfeY1417OAYbRGcb/iGsJ+LuVLvKAhU/itzsl535CvcwAlt2LayeCFfcI8gdeOLeZa9A==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-arm-gnueabihf": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.107.tgz",
|
||||
"integrity": "sha512-I2wzcC0KXqh0OwymCmYwNRgZ9nxX7DWnOOStJXV3pS0uB83TXAkmqd7wvMBuIl9qu4Hfomi9aDM7IlEEn9tumQ==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-arm64-gnu": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.107.tgz",
|
||||
"integrity": "sha512-HWgnn7JORYlOYnGsdunpSF8A+BCZKPLzLtEUA27/M/ZuANcMZabKL9Zurt7XQXq888uJFAt98Gy+59PU90aHKg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-arm64-musl": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.107.tgz",
|
||||
"integrity": "sha512-vfPF74cWfAm8hyhS8yvYI94ucMHIo8xIYU+oFOW9uvDlGQRgnUf/6DEVbLyt/3yfX5723Ln57U8uiMALbX5Pyw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-x64-gnu": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.107.tgz",
|
||||
"integrity": "sha512-uBVNhIg0ip8rH9OnOsCARUFZ3Mq3tbPHxtmWk9uAa5u8jQwGWeBx5+nTHpDOVd3YxKb6+5xDEI/edeeLpha/9g==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-x64-musl": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.107.tgz",
|
||||
"integrity": "sha512-mvACkUvzSIB12q1H5JtabWATbk3AG+pQgXEN95AmEX2ZA5gbP9+B+mijsg7Sd/3tboHr7ZHLz/q3SHTvdFJrEw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-win32-arm64-msvc": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.107.tgz",
|
||||
"integrity": "sha512-J3P14Ngy/1qtapzbguEH41kY109t6DFxfbK4Ntz9dOWNuVY3o9/RTB841ctnJk0ZHEG+BjfCJjsD2n8H5HcaOA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-win32-ia32-msvc": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.107.tgz",
|
||||
"integrity": "sha512-ZBUtgyjTHlz8TPJh7kfwwwFma+ktr6OccB1oXC8fMSopD0AxVnQasgun3l3099wIsAB9eEsJDQ/3lDkOLs1gBA==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-win32-x64-msvc": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.107.tgz",
|
||||
"integrity": "sha512-Eyzo2XRqWOxqhE1gk9h7LWmUf4Bp4Xn2Ttb0ayAXFp6YSTxQIThXcT9kipXZqcpxcmDwoq8iWbbf2P8XL743EA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/counter": {
|
||||
"version": "0.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz",
|
||||
@@ -13183,14 +12974,6 @@
|
||||
"tslib": "^2.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/types": {
|
||||
"version": "0.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz",
|
||||
"integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/@techteamer/ocsp": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@techteamer/ocsp/-/ocsp-1.0.1.tgz",
|
||||
@@ -15195,6 +14978,18 @@
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/axios-ntlm": {
|
||||
"version": "1.4.4",
|
||||
"resolved": "https://registry.npmjs.org/axios-ntlm/-/axios-ntlm-1.4.4.tgz",
|
||||
"integrity": "sha512-kpCRdzMfL8gi0Z0o96P3QPAK4XuC8iciGgxGXe+PeQ4oyjI2LZN8WSOKbu0Y9Jo3T/A7pB81n6jYVPIpglEuRA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"axios": "^1.8.4",
|
||||
"des.js": "^1.1.0",
|
||||
"dev-null": "^0.1.1",
|
||||
"js-md4": "^0.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/axios-retry": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/axios-retry/-/axios-retry-4.0.0.tgz",
|
||||
@@ -16954,6 +16749,16 @@
|
||||
"resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
|
||||
"integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="
|
||||
},
|
||||
"node_modules/des.js": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/des.js/-/des.js-1.1.0.tgz",
|
||||
"integrity": "sha512-r17GxjhUCjSRy8aiJpr8/UadFIzMzJGexI3Nmz4ADi9LYSFx4gTBp80+NaX/YsXWWLhpZ7v/v/ubEc/bCNfKwg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.1",
|
||||
"minimalistic-assert": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/destroy": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
|
||||
@@ -16981,6 +16786,12 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/dev-null": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/dev-null/-/dev-null-0.1.1.tgz",
|
||||
"integrity": "sha512-nMNZG0zfMgmdv8S5O0TM5cpwNbGKRGPCxVsr0SmA3NZZy9CYBbuNLL0PD3Acx9e5LIUgwONXtM9kM6RlawPxEQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/diff": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
|
||||
@@ -19029,49 +18840,6 @@
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/gcp-metadata": {
|
||||
"version": "5.3.0",
|
||||
"resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.3.0.tgz",
|
||||
"integrity": "sha512-FNTkdNEnBdlqF2oatizolQqNANMrcqJt6AAYt99B3y1aLLC8Hc5IOBb+ZnnzllodEEf6xMBp6wRcBbc16fa65w==",
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"gaxios": "^5.0.0",
|
||||
"json-bigint": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/gcp-metadata/node_modules/gaxios": {
|
||||
"version": "5.1.3",
|
||||
"resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.3.tgz",
|
||||
"integrity": "sha512-95hVgBRgEIRQQQHIbnxBXeHbW4TqFk4ZDJW7wmVtvYar72FdhRIo1UGOLS2eRAKCPEdPBWu+M7+A33D9CdX9rA==",
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"extend": "^3.0.2",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
"is-stream": "^2.0.0",
|
||||
"node-fetch": "^2.6.9"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/gcp-metadata/node_modules/is-stream": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
|
||||
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/generate-function": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz",
|
||||
|
@@ -183,6 +183,7 @@
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
"axios": "^1.11.0",
|
||||
"axios-ntlm": "^1.4.4",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"botbuilder": "^4.23.2",
|
||||
|
@@ -0,0 +1,49 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 1000;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.UserAliases, "isEmailVerified"))) {
|
||||
// Add the column
|
||||
await knex.schema.alterTable(TableName.UserAliases, (t) => {
|
||||
t.boolean("isEmailVerified").defaultTo(false);
|
||||
});
|
||||
|
||||
const aliasesToUpdate: { aliasId: string; isEmailVerified: boolean }[] = await knex(TableName.UserAliases)
|
||||
.join(TableName.Users, `${TableName.UserAliases}.userId`, `${TableName.Users}.id`)
|
||||
.select([`${TableName.UserAliases}.id as aliasId`, `${TableName.Users}.isEmailVerified`]);
|
||||
|
||||
for (let i = 0; i < aliasesToUpdate.length; i += BATCH_SIZE) {
|
||||
const batch = aliasesToUpdate.slice(i, i + BATCH_SIZE);
|
||||
|
||||
const trueIds = batch.filter((row) => row.isEmailVerified).map((row) => row.aliasId);
|
||||
|
||||
if (trueIds.length > 0) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.UserAliases).whereIn("id", trueIds).update({ isEmailVerified: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.AuthTokens, "aliasId"))) {
|
||||
await knex.schema.alterTable(TableName.AuthTokens, (t) => {
|
||||
t.string("aliasId").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.UserAliases, "isEmailVerified")) {
|
||||
await knex.schema.alterTable(TableName.UserAliases, (t) => {
|
||||
t.dropColumn("isEmailVerified");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.AuthTokens, "aliasId")) {
|
||||
await knex.schema.alterTable(TableName.AuthTokens, (t) => {
|
||||
t.dropColumn("aliasId");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,57 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.IdentityUniversalAuth)) {
|
||||
const hasLockoutEnabled = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutEnabled");
|
||||
const hasLockoutThreshold = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutThreshold");
|
||||
const hasLockoutDuration = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutDurationSeconds");
|
||||
const hasLockoutCounterReset = await knex.schema.hasColumn(
|
||||
TableName.IdentityUniversalAuth,
|
||||
"lockoutCounterResetSeconds"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.IdentityUniversalAuth, (t) => {
|
||||
if (!hasLockoutEnabled) {
|
||||
t.boolean("lockoutEnabled").notNullable().defaultTo(true);
|
||||
}
|
||||
if (!hasLockoutThreshold) {
|
||||
t.integer("lockoutThreshold").notNullable().defaultTo(3);
|
||||
}
|
||||
if (!hasLockoutDuration) {
|
||||
t.integer("lockoutDurationSeconds").notNullable().defaultTo(300); // 5 minutes
|
||||
}
|
||||
if (!hasLockoutCounterReset) {
|
||||
t.integer("lockoutCounterResetSeconds").notNullable().defaultTo(30); // 30 seconds
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.IdentityUniversalAuth)) {
|
||||
const hasLockoutEnabled = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutEnabled");
|
||||
const hasLockoutThreshold = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutThreshold");
|
||||
const hasLockoutDuration = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutDurationSeconds");
|
||||
const hasLockoutCounterReset = await knex.schema.hasColumn(
|
||||
TableName.IdentityUniversalAuth,
|
||||
"lockoutCounterResetSeconds"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.IdentityUniversalAuth, (t) => {
|
||||
if (hasLockoutEnabled) {
|
||||
t.dropColumn("lockoutEnabled");
|
||||
}
|
||||
if (hasLockoutThreshold) {
|
||||
t.dropColumn("lockoutThreshold");
|
||||
}
|
||||
if (hasLockoutDuration) {
|
||||
t.dropColumn("lockoutDurationSeconds");
|
||||
}
|
||||
if (hasLockoutCounterReset) {
|
||||
t.dropColumn("lockoutCounterResetSeconds");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.boolean("shouldCheckSecretPermission").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.dropColumn("shouldCheckSecretPermission");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,29 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 100;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission")) {
|
||||
// find all existing SecretApprovalPolicy rows to backfill shouldCheckSecretPermission flag
|
||||
const rows = await knex(TableName.SecretApprovalPolicy).select(selectAllTableCols(TableName.SecretApprovalPolicy));
|
||||
|
||||
if (rows.length > 0) {
|
||||
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
||||
const batch = rows.slice(i, i + BATCH_SIZE);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.SecretApprovalPolicy)
|
||||
.whereIn(
|
||||
"id",
|
||||
batch.map((row) => row.id)
|
||||
)
|
||||
.update({ shouldCheckSecretPermission: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {}
|
@@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasPropertiesCol = await knex.schema.hasColumn(TableName.PkiSubscriber, "properties");
|
||||
|
||||
if (!hasPropertiesCol) {
|
||||
await knex.schema.alterTable(TableName.PkiSubscriber, (t) => {
|
||||
t.jsonb("properties").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasPropertiesCol = await knex.schema.hasColumn(TableName.PkiSubscriber, "properties");
|
||||
|
||||
if (hasPropertiesCol) {
|
||||
await knex.schema.alterTable(TableName.PkiSubscriber, (t) => {
|
||||
t.dropColumn("properties");
|
||||
});
|
||||
}
|
||||
}
|
@@ -17,7 +17,8 @@ export const AuthTokensSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
orgId: z.string().uuid().nullable().optional()
|
||||
orgId: z.string().uuid().nullable().optional(),
|
||||
aliasId: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TAuthTokens = z.infer<typeof AuthTokensSchema>;
|
||||
|
@@ -18,7 +18,11 @@ export const IdentityUniversalAuthsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
accessTokenPeriod: z.coerce.number().default(0),
|
||||
lockoutEnabled: z.boolean().default(true),
|
||||
lockoutThreshold: z.number().default(3),
|
||||
lockoutDurationSeconds: z.number().default(300),
|
||||
lockoutCounterResetSeconds: z.number().default(30)
|
||||
});
|
||||
|
||||
export type TIdentityUniversalAuths = z.infer<typeof IdentityUniversalAuthsSchema>;
|
||||
|
@@ -25,7 +25,8 @@ export const PkiSubscribersSchema = z.object({
|
||||
lastAutoRenewAt: z.date().nullable().optional(),
|
||||
lastOperationStatus: z.string().nullable().optional(),
|
||||
lastOperationMessage: z.string().nullable().optional(),
|
||||
lastOperationAt: z.date().nullable().optional()
|
||||
lastOperationAt: z.date().nullable().optional(),
|
||||
properties: z.unknown().nullable().optional()
|
||||
});
|
||||
|
||||
export type TPkiSubscribers = z.infer<typeof PkiSubscribersSchema>;
|
||||
|
@@ -17,7 +17,8 @@ export const SecretApprovalPoliciesSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
allowedSelfApprovals: z.boolean().default(true),
|
||||
shouldCheckSecretPermission: z.boolean().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
|
||||
|
@@ -16,7 +16,8 @@ export const UserAliasesSchema = z.object({
|
||||
emails: z.string().array().nullable().optional(),
|
||||
orgId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
isEmailVerified: z.boolean().default(false).nullable().optional()
|
||||
});
|
||||
|
||||
export type TUserAliases = z.infer<typeof UserAliasesSchema>;
|
||||
|
@@ -133,6 +133,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
approvals: z.number(),
|
||||
approvers: z
|
||||
.object({
|
||||
isOrgMembershipActive: z.boolean().nullable().optional(),
|
||||
userId: z.string().nullable().optional(),
|
||||
sequence: z.number().nullable().optional(),
|
||||
approvalsRequired: z.number().nullable().optional(),
|
||||
@@ -150,6 +151,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
}),
|
||||
reviewers: z
|
||||
.object({
|
||||
isOrgMembershipActive: z.boolean().nullable().optional(),
|
||||
userId: z.string(),
|
||||
status: z.string()
|
||||
})
|
||||
|
@@ -294,22 +294,30 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
200: z.object({
|
||||
approval: SecretApprovalRequestsSchema.merge(
|
||||
z.object({
|
||||
// secretPath: z.string(),
|
||||
policy: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
approvals: z.number(),
|
||||
approvers: approvalRequestUser.array(),
|
||||
approvers: approvalRequestUser
|
||||
.extend({ isOrgMembershipActive: z.boolean().nullable().optional() })
|
||||
.array(),
|
||||
bypassers: approvalRequestUser.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
allowedSelfApprovals: z.boolean(),
|
||||
shouldCheckSecretPermission: z.boolean().nullable().optional()
|
||||
}),
|
||||
environment: z.string(),
|
||||
statusChangedByUser: approvalRequestUser.optional(),
|
||||
committerUser: approvalRequestUser.nullish(),
|
||||
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
|
||||
reviewers: approvalRequestUser
|
||||
.extend({
|
||||
status: z.string(),
|
||||
comment: z.string().optional(),
|
||||
isOrgMembershipActive: z.boolean().nullable().optional()
|
||||
})
|
||||
.array(),
|
||||
secretPath: z.string(),
|
||||
commits: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true, version: true, secretValue: true })
|
||||
|
@@ -5,6 +5,7 @@ import {
|
||||
AccessApprovalRequestsSchema,
|
||||
TableName,
|
||||
TAccessApprovalRequests,
|
||||
TOrgMemberships,
|
||||
TUserGroupMembership,
|
||||
TUsers
|
||||
} from "@app/db/schemas";
|
||||
@@ -144,6 +145,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
| {
|
||||
userId: string;
|
||||
@@ -151,6 +153,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
)[];
|
||||
bypassers: string[];
|
||||
@@ -202,6 +205,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
reviewers: {
|
||||
userId: string;
|
||||
status: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}[];
|
||||
approvers: (
|
||||
| {
|
||||
@@ -210,6 +214,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
| {
|
||||
userId: string;
|
||||
@@ -217,6 +222,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
)[];
|
||||
bypassers: string[];
|
||||
@@ -288,6 +294,24 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
`requestedByUser.id`
|
||||
)
|
||||
|
||||
.leftJoin<TOrgMemberships>(
|
||||
db(TableName.OrgMembership).as("approverOrgMembership"),
|
||||
`${TableName.AccessApprovalPolicyApprover}.approverUserId`,
|
||||
`approverOrgMembership.userId`
|
||||
)
|
||||
|
||||
.leftJoin<TOrgMemberships>(
|
||||
db(TableName.OrgMembership).as("approverGroupOrgMembership"),
|
||||
`${TableName.Users}.id`,
|
||||
`approverGroupOrgMembership.userId`
|
||||
)
|
||||
|
||||
.leftJoin<TOrgMemberships>(
|
||||
db(TableName.OrgMembership).as("reviewerOrgMembership"),
|
||||
`${TableName.AccessApprovalRequestReviewer}.reviewerUserId`,
|
||||
`reviewerOrgMembership.userId`
|
||||
)
|
||||
|
||||
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
|
||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||
@@ -300,6 +324,10 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
|
||||
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt"),
|
||||
|
||||
db.ref("isActive").withSchema("approverOrgMembership").as("approverIsOrgMembershipActive"),
|
||||
db.ref("isActive").withSchema("approverGroupOrgMembership").as("approverGroupIsOrgMembershipActive"),
|
||||
db.ref("isActive").withSchema("reviewerOrgMembership").as("reviewerIsOrgMembershipActive"),
|
||||
db.ref("maxTimePeriod").withSchema(TableName.AccessApprovalPolicy).as("policyMaxTimePeriod")
|
||||
)
|
||||
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
@@ -396,17 +424,26 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
{
|
||||
key: "reviewerUserId",
|
||||
label: "reviewers" as const,
|
||||
mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined)
|
||||
mapper: ({ reviewerUserId: userId, reviewerStatus: status, reviewerIsOrgMembershipActive }) =>
|
||||
userId ? { userId, status, isOrgMembershipActive: reviewerIsOrgMembershipActive } : undefined
|
||||
},
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverUserId, approverSequence, approvalsRequired, approverUsername, approverEmail }) => ({
|
||||
mapper: ({
|
||||
approverUserId,
|
||||
approverSequence,
|
||||
approvalsRequired,
|
||||
approverUsername,
|
||||
approverEmail,
|
||||
approverIsOrgMembershipActive
|
||||
}) => ({
|
||||
userId: approverUserId,
|
||||
sequence: approverSequence,
|
||||
approvalsRequired,
|
||||
email: approverEmail,
|
||||
username: approverUsername
|
||||
username: approverUsername,
|
||||
isOrgMembershipActive: approverIsOrgMembershipActive
|
||||
})
|
||||
},
|
||||
{
|
||||
@@ -417,13 +454,15 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
approverSequence,
|
||||
approvalsRequired,
|
||||
approverGroupEmail,
|
||||
approverGroupUsername
|
||||
approverGroupUsername,
|
||||
approverGroupIsOrgMembershipActive
|
||||
}) => ({
|
||||
userId: approverGroupUserId,
|
||||
sequence: approverSequence,
|
||||
approvalsRequired,
|
||||
email: approverGroupEmail,
|
||||
username: approverGroupUsername
|
||||
username: approverGroupUsername,
|
||||
isOrgMembershipActive: approverGroupIsOrgMembershipActive
|
||||
})
|
||||
},
|
||||
{ key: "bypasserUserId", label: "bypassers" as const, mapper: ({ bypasserUserId }) => bypasserUserId },
|
||||
|
@@ -87,6 +87,7 @@ export interface TAccessApprovalRequestServiceFactory {
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
| {
|
||||
userId: string;
|
||||
@@ -94,6 +95,7 @@ export interface TAccessApprovalRequestServiceFactory {
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
)[];
|
||||
bypassers: string[];
|
||||
@@ -145,6 +147,7 @@ export interface TAccessApprovalRequestServiceFactory {
|
||||
reviewers: {
|
||||
userId: string;
|
||||
status: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}[];
|
||||
approvers: (
|
||||
| {
|
||||
@@ -153,6 +156,7 @@ export interface TAccessApprovalRequestServiceFactory {
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
| {
|
||||
userId: string;
|
||||
@@ -160,6 +164,7 @@ export interface TAccessApprovalRequestServiceFactory {
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
)[];
|
||||
bypassers: string[];
|
||||
|
@@ -198,6 +198,7 @@ export enum EventType {
|
||||
|
||||
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
|
||||
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
|
||||
CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS = "clear-identity-universal-auth-lockouts",
|
||||
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id",
|
||||
@@ -281,6 +282,7 @@ export enum EventType {
|
||||
UPDATE_SSH_CERTIFICATE_TEMPLATE = "update-ssh-certificate-template",
|
||||
DELETE_SSH_CERTIFICATE_TEMPLATE = "delete-ssh-certificate-template",
|
||||
GET_SSH_CERTIFICATE_TEMPLATE = "get-ssh-certificate-template",
|
||||
GET_AZURE_AD_TEMPLATES = "get-azure-ad-templates",
|
||||
GET_SSH_HOST = "get-ssh-host",
|
||||
CREATE_SSH_HOST = "create-ssh-host",
|
||||
UPDATE_SSH_HOST = "update-ssh-host",
|
||||
@@ -866,6 +868,10 @@ interface AddIdentityUniversalAuthEvent {
|
||||
accessTokenMaxTTL: number;
|
||||
accessTokenNumUsesLimit: number;
|
||||
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
|
||||
lockoutEnabled: boolean;
|
||||
lockoutThreshold: number;
|
||||
lockoutDurationSeconds: number;
|
||||
lockoutCounterResetSeconds: number;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -878,6 +884,10 @@ interface UpdateIdentityUniversalAuthEvent {
|
||||
accessTokenMaxTTL?: number;
|
||||
accessTokenNumUsesLimit?: number;
|
||||
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
|
||||
lockoutEnabled?: boolean;
|
||||
lockoutThreshold?: number;
|
||||
lockoutDurationSeconds?: number;
|
||||
lockoutCounterResetSeconds?: number;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1037,6 +1047,13 @@ interface RevokeIdentityUniversalAuthClientSecretEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface ClearIdentityUniversalAuthLockoutsEvent {
|
||||
type: EventType.CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface LoginIdentityGcpAuthEvent {
|
||||
type: EventType.LOGIN_IDENTITY_GCP_AUTH;
|
||||
metadata: {
|
||||
@@ -2497,6 +2514,14 @@ interface CreateCertificateTemplateEstConfig {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetAzureAdCsTemplatesEvent {
|
||||
type: EventType.GET_AZURE_AD_TEMPLATES;
|
||||
metadata: {
|
||||
caId: string;
|
||||
amount: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateCertificateTemplateEstConfig {
|
||||
type: EventType.UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG;
|
||||
metadata: {
|
||||
@@ -3491,6 +3516,7 @@ export type Event =
|
||||
| GetIdentityUniversalAuthClientSecretsEvent
|
||||
| GetIdentityUniversalAuthClientSecretByIdEvent
|
||||
| RevokeIdentityUniversalAuthClientSecretEvent
|
||||
| ClearIdentityUniversalAuthLockoutsEvent
|
||||
| LoginIdentityGcpAuthEvent
|
||||
| AddIdentityGcpAuthEvent
|
||||
| DeleteIdentityGcpAuthEvent
|
||||
@@ -3636,6 +3662,7 @@ export type Event =
|
||||
| CreateCertificateTemplateEstConfig
|
||||
| UpdateCertificateTemplateEstConfig
|
||||
| GetCertificateTemplateEstConfig
|
||||
| GetAzureAdCsTemplatesEvent
|
||||
| AttemptCreateSlackIntegration
|
||||
| AttemptReinstallSlackIntegration
|
||||
| UpdateSlackIntegration
|
||||
|
@@ -400,15 +400,13 @@ export const ldapConfigServiceFactory = ({
|
||||
|
||||
userAlias = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
if (serverCfg.trustLdapEmails) {
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email: email.toLowerCase(),
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email: email.toLowerCase(),
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!newUser) {
|
||||
const uniqueUsername = await normalizeUsername(username, userDAL);
|
||||
@@ -433,7 +431,8 @@ export const ldapConfigServiceFactory = ({
|
||||
aliasType: UserAliasType.LDAP,
|
||||
externalId,
|
||||
emails: [email],
|
||||
orgId
|
||||
orgId,
|
||||
isEmailVerified: serverCfg.trustLdapEmails
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -556,15 +555,14 @@ export const ldapConfigServiceFactory = ({
|
||||
return newUser;
|
||||
});
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted) && userAlias.isEmailVerified;
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
hasExchangedPrivateKey: true,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
...(user.email && { email: user.email, isEmailVerified: userAlias.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
@@ -572,6 +570,7 @@ export const ldapConfigServiceFactory = ({
|
||||
organizationSlug: organization.slug,
|
||||
authMethod: AuthMethod.LDAP,
|
||||
authType: UserAliasType.LDAP,
|
||||
aliasId: userAlias.id,
|
||||
isUserCompleted,
|
||||
...(relayState
|
||||
? {
|
||||
@@ -585,10 +584,11 @@ export const ldapConfigServiceFactory = ({
|
||||
}
|
||||
);
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
if (user.email && !userAlias.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
userId: user.id,
|
||||
aliasId: userAlias.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
|
@@ -180,7 +180,7 @@ export const oidcConfigServiceFactory = ({
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
let userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: UserAliasType.OIDC
|
||||
@@ -231,32 +231,29 @@ export const oidcConfigServiceFactory = ({
|
||||
} else {
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
// we prioritize getting the most complete user to create the new alias under
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (serverCfg.trustOidcEmails) {
|
||||
// we prioritize getting the most complete user to create the new alias under
|
||||
if (!newUser) {
|
||||
// this fetches user entries created via invites
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
username: email
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!newUser) {
|
||||
// this fetches user entries created via invites
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
username: email
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (newUser && !newUser.isEmailVerified) {
|
||||
// we automatically mark it as email-verified because we've configured trust for OIDC emails
|
||||
newUser = await userDAL.updateById(newUser.id, {
|
||||
isEmailVerified: true
|
||||
});
|
||||
}
|
||||
if (newUser && !newUser.isEmailVerified) {
|
||||
// we automatically mark it as email-verified because we've configured trust for OIDC emails
|
||||
newUser = await userDAL.updateById(newUser.id, {
|
||||
isEmailVerified: serverCfg.trustOidcEmails
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -276,13 +273,14 @@ export const oidcConfigServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
await userAliasDAL.create(
|
||||
userAlias = await userAliasDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
aliasType: UserAliasType.OIDC,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
orgId
|
||||
orgId,
|
||||
isEmailVerified: serverCfg.trustOidcEmails
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -404,19 +402,20 @@ export const oidcConfigServiceFactory = ({
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const isUserCompleted = Boolean(user.isAccepted) && userAlias.isEmailVerified;
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
...(user.email && { email: user.email, isEmailVerified: userAlias.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
hasExchangedPrivateKey: true,
|
||||
aliasId: userAlias.id,
|
||||
authMethod: AuthMethod.OIDC,
|
||||
authType: UserAliasType.OIDC,
|
||||
isUserCompleted,
|
||||
@@ -430,10 +429,11 @@ export const oidcConfigServiceFactory = ({
|
||||
|
||||
await oidcConfigDAL.update({ orgId }, { lastUsed: new Date() });
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
if (user.email && !userAlias.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
userId: user.id,
|
||||
aliasId: userAlias.id
|
||||
});
|
||||
|
||||
await smtpService
|
||||
|
@@ -246,7 +246,7 @@ export const samlConfigServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
let userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: UserAliasType.SAML
|
||||
@@ -320,15 +320,13 @@ export const samlConfigServiceFactory = ({
|
||||
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!newUser) {
|
||||
const uniqueUsername = await normalizeUsername(`${firstName ?? ""}-${lastName ?? ""}`, userDAL);
|
||||
@@ -346,13 +344,14 @@ export const samlConfigServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
await userAliasDAL.create(
|
||||
userAlias = await userAliasDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
aliasType: UserAliasType.SAML,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
orgId
|
||||
orgId,
|
||||
isEmailVerified: serverCfg.trustSamlEmails
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -410,13 +409,13 @@ export const samlConfigServiceFactory = ({
|
||||
}
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified);
|
||||
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified && userAlias.isEmailVerified);
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
...(user.email && { email: user.email, isEmailVerified: userAlias.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
@@ -424,6 +423,7 @@ export const samlConfigServiceFactory = ({
|
||||
organizationSlug: organization.slug,
|
||||
authMethod: authProvider,
|
||||
hasExchangedPrivateKey: true,
|
||||
aliasId: userAlias.id,
|
||||
authType: UserAliasType.SAML,
|
||||
isUserCompleted,
|
||||
...(relayState
|
||||
@@ -440,10 +440,11 @@ export const samlConfigServiceFactory = ({
|
||||
|
||||
await samlConfigDAL.update({ orgId }, { lastUsed: new Date() });
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
if (user.email && !userAlias.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
userId: user.id,
|
||||
aliasId: userAlias.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
|
@@ -4,6 +4,7 @@ import { TDbClient } from "@app/db";
|
||||
import {
|
||||
SecretApprovalRequestsSchema,
|
||||
TableName,
|
||||
TOrgMemberships,
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TUserGroupMembership,
|
||||
@@ -107,11 +108,32 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`,
|
||||
`secretApprovalReviewerUser.id`
|
||||
)
|
||||
|
||||
.leftJoin<TOrgMemberships>(
|
||||
db(TableName.OrgMembership).as("approverOrgMembership"),
|
||||
`${TableName.SecretApprovalPolicyApprover}.approverUserId`,
|
||||
`approverOrgMembership.userId`
|
||||
)
|
||||
|
||||
.leftJoin<TOrgMemberships>(
|
||||
db(TableName.OrgMembership).as("approverGroupOrgMembership"),
|
||||
`secretApprovalPolicyGroupApproverUser.id`,
|
||||
`approverGroupOrgMembership.userId`
|
||||
)
|
||||
|
||||
.leftJoin<TOrgMemberships>(
|
||||
db(TableName.OrgMembership).as("reviewerOrgMembership"),
|
||||
`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`,
|
||||
`reviewerOrgMembership.userId`
|
||||
)
|
||||
|
||||
.select(selectAllTableCols(TableName.SecretApprovalRequest))
|
||||
.select(
|
||||
tx.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
|
||||
tx.ref("userId").withSchema("approverUserGroupMembership").as("approverGroupUserId"),
|
||||
tx.ref("email").withSchema("secretApprovalPolicyApproverUser").as("approverEmail"),
|
||||
tx.ref("isActive").withSchema("approverOrgMembership").as("approverIsOrgMembershipActive"),
|
||||
tx.ref("isActive").withSchema("approverGroupOrgMembership").as("approverGroupIsOrgMembershipActive"),
|
||||
tx.ref("email").withSchema("secretApprovalPolicyGroupApproverUser").as("approverGroupEmail"),
|
||||
tx.ref("username").withSchema("secretApprovalPolicyApproverUser").as("approverUsername"),
|
||||
tx.ref("username").withSchema("secretApprovalPolicyGroupApproverUser").as("approverGroupUsername"),
|
||||
@@ -148,6 +170,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
|
||||
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
|
||||
tx.ref("lastName").withSchema("secretApprovalReviewerUser").as("reviewerLastName"),
|
||||
tx.ref("isActive").withSchema("reviewerOrgMembership").as("reviewerIsOrgMembershipActive"),
|
||||
tx.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"),
|
||||
tx.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"),
|
||||
tx.ref("projectId").withSchema(TableName.Environment),
|
||||
@@ -157,7 +180,11 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
|
||||
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt"),
|
||||
tx
|
||||
.ref("shouldCheckSecretPermission")
|
||||
.withSchema(TableName.SecretApprovalPolicy)
|
||||
.as("policySecretReadAccessCompat")
|
||||
);
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
@@ -197,7 +224,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
envId: el.policyEnvId,
|
||||
deletedAt: el.policyDeletedAt,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals,
|
||||
shouldCheckSecretPermission: el.policySecretReadAccessCompat
|
||||
}
|
||||
}),
|
||||
childrenMapper: [
|
||||
@@ -211,9 +239,21 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
reviewerLastName: lastName,
|
||||
reviewerUsername: username,
|
||||
reviewerFirstName: firstName,
|
||||
reviewerComment: comment
|
||||
reviewerComment: comment,
|
||||
reviewerIsOrgMembershipActive: isOrgMembershipActive
|
||||
}) =>
|
||||
userId ? { userId, status, email, firstName, lastName, username, comment: comment ?? "" } : undefined
|
||||
userId
|
||||
? {
|
||||
userId,
|
||||
status,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username,
|
||||
comment: comment ?? "",
|
||||
isOrgMembershipActive
|
||||
}
|
||||
: undefined
|
||||
},
|
||||
{
|
||||
key: "approverUserId",
|
||||
@@ -223,13 +263,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approverEmail: email,
|
||||
approverUsername: username,
|
||||
approverLastName: lastName,
|
||||
approverFirstName: firstName
|
||||
approverFirstName: firstName,
|
||||
approverIsOrgMembershipActive: isOrgMembershipActive
|
||||
}) => ({
|
||||
userId,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username
|
||||
username,
|
||||
isOrgMembershipActive
|
||||
})
|
||||
},
|
||||
{
|
||||
@@ -240,13 +282,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approverGroupEmail: email,
|
||||
approverGroupUsername: username,
|
||||
approverGroupLastName: lastName,
|
||||
approverGroupFirstName: firstName
|
||||
approverGroupFirstName: firstName,
|
||||
approverGroupIsOrgMembershipActive: isOrgMembershipActive
|
||||
}) => ({
|
||||
userId,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username
|
||||
username,
|
||||
isOrgMembershipActive
|
||||
})
|
||||
},
|
||||
{
|
||||
@@ -653,14 +697,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
db.ref("firstName").withSchema("committerUser").as("committerUserFirstName"),
|
||||
db.ref("lastName").withSchema("committerUser").as("committerUserLastName")
|
||||
)
|
||||
.distinctOn(`${TableName.SecretApprovalRequest}.id`)
|
||||
.as("inner");
|
||||
|
||||
const query = (tx || db)
|
||||
.select("*")
|
||||
const countQuery = (await (tx || db)
|
||||
.select(db.raw("count(*) OVER() as total_count"))
|
||||
.from(innerQuery)
|
||||
.orderBy("createdAt", "desc") as typeof innerQuery;
|
||||
.from(innerQuery.clone().distinctOn(`${TableName.SecretApprovalRequest}.id`))) as Array<{
|
||||
total_count: number;
|
||||
}>;
|
||||
|
||||
const query = (tx || db).select("*").from(innerQuery).orderBy("createdAt", "desc") as typeof innerQuery;
|
||||
|
||||
if (search) {
|
||||
void query.where((qb) => {
|
||||
@@ -686,8 +731,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
.where("w.rank", ">=", rankOffset)
|
||||
.andWhere("w.rank", "<", rankOffset + limit);
|
||||
|
||||
// @ts-expect-error knex does not infer
|
||||
const totalCount = Number(docs[0]?.total_count || 0);
|
||||
const totalCount = Number(countQuery[0]?.total_count || 0);
|
||||
|
||||
const formattedDoc = sqlNestRelationships({
|
||||
data: docs,
|
||||
|
@@ -258,6 +258,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
|
||||
|
||||
const secretApprovalRequest = await secretApprovalRequestDAL.findById(id);
|
||||
|
||||
if (!secretApprovalRequest)
|
||||
throw new NotFoundError({ message: `Secret approval request with ID '${id}' not found` });
|
||||
|
||||
@@ -280,13 +281,22 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
) {
|
||||
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
|
||||
}
|
||||
const getHasSecretReadAccess = (environment: string, tags: { slug: string }[], secretPath?: string) => {
|
||||
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
|
||||
environment,
|
||||
secretPath: secretPath || "/",
|
||||
secretTags: tags.map((i) => i.slug)
|
||||
});
|
||||
return canRead;
|
||||
const getHasSecretReadAccess = (
|
||||
shouldCheckSecretPermission: boolean | null | undefined,
|
||||
environment: string,
|
||||
tags: { slug: string }[],
|
||||
secretPath?: string
|
||||
) => {
|
||||
if (shouldCheckSecretPermission) {
|
||||
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
|
||||
environment,
|
||||
secretPath: secretPath || "/",
|
||||
secretTags: tags.map((i) => i.slug)
|
||||
});
|
||||
return canRead;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
let secrets;
|
||||
@@ -308,8 +318,18 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
version: el.version,
|
||||
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
|
||||
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
|
||||
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
secretValue: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secret && el.secret.isRotatedSecret
|
||||
? undefined
|
||||
@@ -325,11 +345,17 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
id: el.secret.id,
|
||||
version: el.secret.version,
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
secretValue: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secret.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
|
||||
@@ -345,11 +371,17 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
id: el.secretVersion.id,
|
||||
version: el.secretVersion.version,
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
secretValue: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secretVersion.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
|
||||
@@ -367,7 +399,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
|
||||
secrets = encryptedSecrets.map((el) => ({
|
||||
...el,
|
||||
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
...decryptSecretWithBot(el, botKey),
|
||||
secret: el.secret
|
||||
? {
|
||||
@@ -1447,6 +1484,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
|
||||
const commits: Omit<TSecretApprovalRequestsSecretsV2Insert, "requestId">[] = [];
|
||||
const commitTagIds: Record<string, string[]> = {};
|
||||
const existingTagIds: Record<string, string[]> = {};
|
||||
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
@@ -1512,6 +1550,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
type: SecretType.Shared
|
||||
}))
|
||||
);
|
||||
|
||||
secretsToUpdateStoredInDB.forEach((el) => {
|
||||
if (el.tags?.length) existingTagIds[el.key] = el.tags.map((i) => i.id);
|
||||
});
|
||||
|
||||
if (secretsToUpdateStoredInDB.length !== secretsToUpdate.length)
|
||||
throw new NotFoundError({
|
||||
message: `Secret does not exist: ${secretsToUpdateStoredInDB.map((el) => el.key).join(",")}`
|
||||
@@ -1555,7 +1598,10 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretMetadata
|
||||
}) => {
|
||||
const secretId = updatingSecretsGroupByKey[secretKey][0].id;
|
||||
if (tagIds?.length) commitTagIds[newSecretName ?? secretKey] = tagIds;
|
||||
if (tagIds?.length || existingTagIds[secretKey]?.length) {
|
||||
commitTagIds[newSecretName ?? secretKey] = tagIds || existingTagIds[secretKey];
|
||||
}
|
||||
|
||||
return {
|
||||
...latestSecretVersions[secretId],
|
||||
secretMetadata,
|
||||
|
@@ -13,7 +13,8 @@ export const PgSqlLock = {
|
||||
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`),
|
||||
CreateProject: (orgId: string) => pgAdvisoryLockHashText(`create-project:${orgId}`),
|
||||
CreateFolder: (envId: string, projectId: string) => pgAdvisoryLockHashText(`create-folder:${envId}-${projectId}`),
|
||||
SshInit: (projectId: string) => pgAdvisoryLockHashText(`ssh-bootstrap:${projectId}`)
|
||||
SshInit: (projectId: string) => pgAdvisoryLockHashText(`ssh-bootstrap:${projectId}`),
|
||||
IdentityLogin: (identityId: string, nonce: string) => pgAdvisoryLockHashText(`identity-login:${identityId}:${nonce}`)
|
||||
} as const;
|
||||
|
||||
// all the key prefixes used must be set here to avoid conflict
|
||||
|
@@ -166,7 +166,12 @@ export const UNIVERSAL_AUTH = {
|
||||
accessTokenNumUsesLimit:
|
||||
"The maximum number of times that an access token can be used; a value of 0 implies infinite number of uses.",
|
||||
accessTokenPeriod:
|
||||
"The period for an access token in seconds. This value will be referenced at renewal time. Default value is 0."
|
||||
"The period for an access token in seconds. This value will be referenced at renewal time. Default value is 0.",
|
||||
lockoutEnabled: "Whether the lockout feature is enabled.",
|
||||
lockoutThreshold: "The amount of times login must fail before locking the identity auth method.",
|
||||
lockoutDurationSeconds: "How long an identity auth method lockout lasts.",
|
||||
lockoutCounterResetSeconds:
|
||||
"How long to wait from the most recent failed login until resetting the lockout counter."
|
||||
},
|
||||
RETRIEVE: {
|
||||
identityId: "The ID of the identity to retrieve the auth method for."
|
||||
@@ -181,7 +186,12 @@ export const UNIVERSAL_AUTH = {
|
||||
accessTokenTTL: "The new lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
|
||||
accessTokenPeriod: "The new period for an access token in seconds."
|
||||
accessTokenPeriod: "The new period for an access token in seconds.",
|
||||
lockoutEnabled: "Whether the lockout feature is enabled.",
|
||||
lockoutThreshold: "The amount of times login must fail before locking the identity auth method.",
|
||||
lockoutDurationSeconds: "How long an identity auth method lockout lasts.",
|
||||
lockoutCounterResetSeconds:
|
||||
"How long to wait from the most recent failed login until resetting the lockout counter."
|
||||
},
|
||||
CREATE_CLIENT_SECRET: {
|
||||
identityId: "The ID of the identity to create a client secret for.",
|
||||
@@ -201,6 +211,9 @@ export const UNIVERSAL_AUTH = {
|
||||
identityId: "The ID of the identity to revoke the client secret from.",
|
||||
clientSecretId: "The ID of the client secret to revoke."
|
||||
},
|
||||
CLEAR_CLIENT_LOCKOUTS: {
|
||||
identityId: "The ID of the identity to clear the client lockouts from."
|
||||
},
|
||||
RENEW_ACCESS_TOKEN: {
|
||||
accessToken: "The access token to renew."
|
||||
},
|
||||
@@ -2148,7 +2161,9 @@ export const CertificateAuthorities = {
|
||||
directoryUrl: `The directory URL for the ACME Certificate Authority.`,
|
||||
accountEmail: `The email address for the ACME Certificate Authority.`,
|
||||
provider: `The DNS provider for the ACME Certificate Authority.`,
|
||||
hostedZoneId: `The hosted zone ID for the ACME Certificate Authority.`
|
||||
hostedZoneId: `The hosted zone ID for the ACME Certificate Authority.`,
|
||||
eabKid: `The External Account Binding (EAB) Key ID for the ACME Certificate Authority. Required if the ACME provider uses EAB.`,
|
||||
eabHmacKey: `The External Account Binding (EAB) HMAC key for the ACME Certificate Authority. Required if the ACME provider uses EAB.`
|
||||
},
|
||||
INTERNAL: {
|
||||
type: "The type of CA to create.",
|
||||
@@ -2312,6 +2327,15 @@ export const AppConnections = {
|
||||
OKTA: {
|
||||
instanceUrl: "The URL used to access your Okta organization.",
|
||||
apiToken: "The API token used to authenticate with Okta."
|
||||
},
|
||||
AZURE_ADCS: {
|
||||
adcsUrl:
|
||||
"The HTTPS URL of the Azure ADCS instance to connect with (e.g., 'https://adcs.yourdomain.com/certsrv').",
|
||||
username: "The username used to access Azure ADCS (format: 'DOMAIN\\username' or 'username@domain.com').",
|
||||
password: "The password used to access Azure ADCS.",
|
||||
sslRejectUnauthorized:
|
||||
"Whether or not to reject unauthorized SSL certificates (true/false). Set to false only in test environments with self-signed certificates.",
|
||||
sslCertificate: "The SSL certificate (PEM format) to use for secure connection."
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@@ -726,7 +726,8 @@ export const registerRoutes = async (
|
||||
permissionService,
|
||||
groupProjectDAL,
|
||||
smtpService,
|
||||
projectMembershipDAL
|
||||
projectMembershipDAL,
|
||||
userAliasDAL
|
||||
});
|
||||
|
||||
const totpService = totpServiceFactory({
|
||||
@@ -1455,7 +1456,8 @@ export const registerRoutes = async (
|
||||
identityOrgMembershipDAL,
|
||||
identityProjectDAL,
|
||||
licenseService,
|
||||
identityMetadataDAL
|
||||
identityMetadataDAL,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const identityAuthTemplateService = identityAuthTemplateServiceFactory({
|
||||
@@ -1509,7 +1511,8 @@ export const registerRoutes = async (
|
||||
identityAccessTokenDAL,
|
||||
identityUaClientSecretDAL,
|
||||
identityUaDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const identityKubernetesAuthService = identityKubernetesAuthServiceFactory({
|
||||
|
@@ -15,6 +15,10 @@ import {
|
||||
} from "@app/services/app-connection/1password";
|
||||
import { Auth0ConnectionListItemSchema, SanitizedAuth0ConnectionSchema } from "@app/services/app-connection/auth0";
|
||||
import { AwsConnectionListItemSchema, SanitizedAwsConnectionSchema } from "@app/services/app-connection/aws";
|
||||
import {
|
||||
AzureADCSConnectionListItemSchema,
|
||||
SanitizedAzureADCSConnectionSchema
|
||||
} from "@app/services/app-connection/azure-adcs/azure-adcs-connection-schemas";
|
||||
import {
|
||||
AzureAppConfigurationConnectionListItemSchema,
|
||||
SanitizedAzureAppConfigurationConnectionSchema
|
||||
@@ -150,7 +154,8 @@ const SanitizedAppConnectionSchema = z.union([
|
||||
...SanitizedSupabaseConnectionSchema.options,
|
||||
...SanitizedDigitalOceanConnectionSchema.options,
|
||||
...SanitizedNetlifyConnectionSchema.options,
|
||||
...SanitizedOktaConnectionSchema.options
|
||||
...SanitizedOktaConnectionSchema.options,
|
||||
...SanitizedAzureADCSConnectionSchema.options
|
||||
]);
|
||||
|
||||
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
@@ -190,7 +195,8 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
SupabaseConnectionListItemSchema,
|
||||
DigitalOceanConnectionListItemSchema,
|
||||
NetlifyConnectionListItemSchema,
|
||||
OktaConnectionListItemSchema
|
||||
OktaConnectionListItemSchema,
|
||||
AzureADCSConnectionListItemSchema
|
||||
]);
|
||||
|
||||
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
|
@@ -0,0 +1,18 @@
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateAzureADCSConnectionSchema,
|
||||
SanitizedAzureADCSConnectionSchema,
|
||||
UpdateAzureADCSConnectionSchema
|
||||
} from "@app/services/app-connection/azure-adcs";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerAzureADCSConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.AzureADCS,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedAzureADCSConnectionSchema,
|
||||
createSchema: CreateAzureADCSConnectionSchema,
|
||||
updateSchema: UpdateAzureADCSConnectionSchema
|
||||
});
|
||||
};
|
@@ -53,4 +53,36 @@ export const registerChecklyConnectionRouter = async (server: FastifyZodProvider
|
||||
return { accounts };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/accounts/:accountId/groups`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid(),
|
||||
accountId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
groups: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId, accountId } = req.params;
|
||||
|
||||
const groups = await server.services.appConnection.checkly.listGroups(connectionId, accountId, req.permission);
|
||||
|
||||
return { groups };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -5,6 +5,7 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
|
||||
import { registerOnePassConnectionRouter } from "./1password-connection-router";
|
||||
import { registerAuth0ConnectionRouter } from "./auth0-connection-router";
|
||||
import { registerAwsConnectionRouter } from "./aws-connection-router";
|
||||
import { registerAzureADCSConnectionRouter } from "./azure-adcs-connection-router";
|
||||
import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-configuration-connection-router";
|
||||
import { registerAzureClientSecretsConnectionRouter } from "./azure-client-secrets-connection-router";
|
||||
import { registerAzureDevOpsConnectionRouter } from "./azure-devops-connection-router";
|
||||
@@ -50,6 +51,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
||||
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
|
||||
[AppConnection.AzureClientSecrets]: registerAzureClientSecretsConnectionRouter,
|
||||
[AppConnection.AzureDevOps]: registerAzureDevOpsConnectionRouter,
|
||||
[AppConnection.AzureADCS]: registerAzureADCSConnectionRouter,
|
||||
[AppConnection.Databricks]: registerDatabricksConnectionRouter,
|
||||
[AppConnection.Humanitec]: registerHumanitecConnectionRouter,
|
||||
[AppConnection.TerraformCloud]: registerTerraformCloudConnectionRouter,
|
||||
|
@@ -0,0 +1,78 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import {
|
||||
AzureAdCsCertificateAuthoritySchema,
|
||||
CreateAzureAdCsCertificateAuthoritySchema,
|
||||
UpdateAzureAdCsCertificateAuthoritySchema
|
||||
} from "@app/services/certificate-authority/azure-ad-cs/azure-ad-cs-certificate-authority-schemas";
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
|
||||
import { registerCertificateAuthorityEndpoints } from "./certificate-authority-endpoints";
|
||||
|
||||
export const registerAzureAdCsCertificateAuthorityRouter = async (server: FastifyZodProvider) => {
|
||||
registerCertificateAuthorityEndpoints({
|
||||
caType: CaType.AZURE_AD_CS,
|
||||
server,
|
||||
responseSchema: AzureAdCsCertificateAuthoritySchema,
|
||||
createSchema: CreateAzureAdCsCertificateAuthoritySchema,
|
||||
updateSchema: UpdateAzureAdCsCertificateAuthoritySchema
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:caId/templates",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
description: "Get available certificate templates from Azure AD CS CA",
|
||||
params: z.object({
|
||||
caId: z.string().describe("Azure AD CS CA ID")
|
||||
}),
|
||||
querystring: z.object({
|
||||
projectId: z.string().describe("Project ID")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
templates: z.array(
|
||||
z.object({
|
||||
id: z.string().describe("Template identifier"),
|
||||
name: z.string().describe("Template display name"),
|
||||
description: z.string().optional().describe("Template description")
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const templates = await server.services.certificateAuthority.getAzureAdcsTemplates({
|
||||
caId: req.params.caId,
|
||||
projectId: req.query.projectId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_AZURE_AD_TEMPLATES,
|
||||
metadata: {
|
||||
caId: req.params.caId,
|
||||
amount: templates.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { templates };
|
||||
}
|
||||
});
|
||||
};
|
@@ -1,6 +1,7 @@
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
|
||||
import { registerAcmeCertificateAuthorityRouter } from "./acme-certificate-authority-router";
|
||||
import { registerAzureAdCsCertificateAuthorityRouter } from "./azure-ad-cs-certificate-authority-router";
|
||||
import { registerInternalCertificateAuthorityRouter } from "./internal-certificate-authority-router";
|
||||
|
||||
export * from "./internal-certificate-authority-router";
|
||||
@@ -8,5 +9,6 @@ export * from "./internal-certificate-authority-router";
|
||||
export const CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP: Record<CaType, (server: FastifyZodProvider) => Promise<void>> =
|
||||
{
|
||||
[CaType.INTERNAL]: registerInternalCertificateAuthorityRouter,
|
||||
[CaType.ACME]: registerAcmeCertificateAuthorityRouter
|
||||
[CaType.ACME]: registerAcmeCertificateAuthorityRouter,
|
||||
[CaType.AZURE_AD_CS]: registerAzureAdCsCertificateAuthorityRouter
|
||||
};
|
||||
|
@@ -703,6 +703,9 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
// prevent older projects from accessing endpoint
|
||||
if (!shouldUseSecretV2Bridge) throw new BadRequestError({ message: "Project version not supported" });
|
||||
|
||||
// verify folder exists and user has project permission
|
||||
await server.services.folder.getFolderByPath({ projectId, environment, secretPath }, req.permission);
|
||||
|
||||
const tags = req.query.tags?.split(",") ?? [];
|
||||
|
||||
let remainingLimit = limit;
|
||||
|
@@ -250,7 +250,8 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
||||
description: true
|
||||
}).optional(),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, hasDeleteProtection: true }).extend({
|
||||
authMethods: z.array(z.string())
|
||||
authMethods: z.array(z.string()),
|
||||
activeLockoutAuthMethods: z.array(z.string())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@@ -137,7 +137,21 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
.min(0)
|
||||
.default(0)
|
||||
.describe(UNIVERSAL_AUTH.ATTACH.accessTokenNumUsesLimit),
|
||||
accessTokenPeriod: z.number().int().min(0).default(0).describe(UNIVERSAL_AUTH.ATTACH.accessTokenPeriod)
|
||||
accessTokenPeriod: z.number().int().min(0).default(0).describe(UNIVERSAL_AUTH.ATTACH.accessTokenPeriod),
|
||||
lockoutEnabled: z.boolean().default(true).describe(UNIVERSAL_AUTH.ATTACH.lockoutEnabled),
|
||||
lockoutThreshold: z.number().min(1).max(30).default(3).describe(UNIVERSAL_AUTH.ATTACH.lockoutThreshold),
|
||||
lockoutDurationSeconds: z
|
||||
.number()
|
||||
.min(30)
|
||||
.max(86400)
|
||||
.default(300)
|
||||
.describe(UNIVERSAL_AUTH.ATTACH.lockoutDurationSeconds),
|
||||
lockoutCounterResetSeconds: z
|
||||
.number()
|
||||
.min(5)
|
||||
.max(3600)
|
||||
.default(30)
|
||||
.describe(UNIVERSAL_AUTH.ATTACH.lockoutCounterResetSeconds)
|
||||
})
|
||||
.refine(
|
||||
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
|
||||
@@ -171,7 +185,11 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
accessTokenMaxTTL: identityUniversalAuth.accessTokenMaxTTL,
|
||||
accessTokenTrustedIps: identityUniversalAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
|
||||
clientSecretTrustedIps: identityUniversalAuth.clientSecretTrustedIps as TIdentityTrustedIp[],
|
||||
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit
|
||||
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit,
|
||||
lockoutEnabled: identityUniversalAuth.lockoutEnabled,
|
||||
lockoutThreshold: identityUniversalAuth.lockoutThreshold,
|
||||
lockoutDurationSeconds: identityUniversalAuth.lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds: identityUniversalAuth.lockoutCounterResetSeconds
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -243,7 +261,21 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
.min(0)
|
||||
.max(315360000)
|
||||
.optional()
|
||||
.describe(UNIVERSAL_AUTH.UPDATE.accessTokenPeriod)
|
||||
.describe(UNIVERSAL_AUTH.UPDATE.accessTokenPeriod),
|
||||
lockoutEnabled: z.boolean().optional().describe(UNIVERSAL_AUTH.UPDATE.lockoutEnabled),
|
||||
lockoutThreshold: z.number().min(1).max(30).optional().describe(UNIVERSAL_AUTH.UPDATE.lockoutThreshold),
|
||||
lockoutDurationSeconds: z
|
||||
.number()
|
||||
.min(30)
|
||||
.max(86400)
|
||||
.optional()
|
||||
.describe(UNIVERSAL_AUTH.UPDATE.lockoutDurationSeconds),
|
||||
lockoutCounterResetSeconds: z
|
||||
.number()
|
||||
.min(5)
|
||||
.max(3600)
|
||||
.optional()
|
||||
.describe(UNIVERSAL_AUTH.UPDATE.lockoutCounterResetSeconds)
|
||||
})
|
||||
.refine(
|
||||
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
|
||||
@@ -276,7 +308,11 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
accessTokenMaxTTL: identityUniversalAuth.accessTokenMaxTTL,
|
||||
accessTokenTrustedIps: identityUniversalAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
|
||||
clientSecretTrustedIps: identityUniversalAuth.clientSecretTrustedIps as TIdentityTrustedIp[],
|
||||
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit
|
||||
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit,
|
||||
lockoutEnabled: identityUniversalAuth.lockoutEnabled,
|
||||
lockoutThreshold: identityUniversalAuth.lockoutThreshold,
|
||||
lockoutDurationSeconds: identityUniversalAuth.lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds: identityUniversalAuth.lockoutCounterResetSeconds
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -594,4 +630,53 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
return { clientSecretData };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/universal-auth/identities/:identityId/clear-lockouts",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.UniversalAuth],
|
||||
description: "Clear Universal Auth Lockouts for identity",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
identityId: z.string().describe(UNIVERSAL_AUTH.CLEAR_CLIENT_LOCKOUTS.identityId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
deleted: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const clearLockoutsData = await server.services.identityUa.clearUniversalAuthLockouts({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
identityId: req.params.identityId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: clearLockoutsData.orgId,
|
||||
event: {
|
||||
type: EventType.CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS,
|
||||
metadata: {
|
||||
identityId: clearLockoutsData.identityId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return clearLockoutsData;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import RE2 from "re2";
|
||||
import { z } from "zod";
|
||||
|
||||
import { CertificatesSchema } from "@app/db/schemas";
|
||||
@@ -112,7 +113,88 @@ export const registerPkiSubscriberRouter = async (server: FastifyZodProvider) =>
|
||||
.transform((arr) => Array.from(new Set(arr)))
|
||||
.describe(PKI_SUBSCRIBERS.CREATE.extendedKeyUsages),
|
||||
enableAutoRenewal: z.boolean().optional().describe(PKI_SUBSCRIBERS.CREATE.enableAutoRenewal),
|
||||
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.CREATE.autoRenewalPeriodInDays)
|
||||
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.CREATE.autoRenewalPeriodInDays),
|
||||
properties: z
|
||||
.object({
|
||||
azureTemplateType: z.string().optional().describe("Azure ADCS Certificate Template Type"),
|
||||
organization: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Organization cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Organization contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Organization cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Organization (O) - Maximum 64 characters, no special DN characters"),
|
||||
organizationalUnit: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Organizational Unit cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Organizational Unit contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Organizational Unit cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Organizational Unit (OU) - Maximum 64 characters, no special DN characters"),
|
||||
country: z
|
||||
.string()
|
||||
.trim()
|
||||
.length(2, "Country must be exactly 2 characters")
|
||||
.regex(new RE2("^[A-Z]{2}$"), "Country must be exactly 2 uppercase letters")
|
||||
.optional()
|
||||
.describe("Country (C) - Two uppercase letter country code (e.g., US, CA, GB)"),
|
||||
state: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "State cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'State contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"State cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("State/Province (ST) - Maximum 64 characters, no special DN characters"),
|
||||
locality: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Locality cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Locality contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Locality cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Locality (L) - Maximum 64 characters, no special DN characters"),
|
||||
emailAddress: z
|
||||
.string()
|
||||
.trim()
|
||||
.email("Email Address must be a valid email format")
|
||||
.min(6, "Email Address must be at least 6 characters")
|
||||
.max(64, "Email Address cannot exceed 64 characters")
|
||||
.optional()
|
||||
.describe("Email Address - Valid email format between 6 and 64 characters")
|
||||
})
|
||||
.optional()
|
||||
.describe("Additional subscriber properties and subject fields")
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedPkiSubscriber
|
||||
@@ -199,7 +281,88 @@ export const registerPkiSubscriberRouter = async (server: FastifyZodProvider) =>
|
||||
.optional()
|
||||
.describe(PKI_SUBSCRIBERS.UPDATE.extendedKeyUsages),
|
||||
enableAutoRenewal: z.boolean().optional().describe(PKI_SUBSCRIBERS.UPDATE.enableAutoRenewal),
|
||||
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.UPDATE.autoRenewalPeriodInDays)
|
||||
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.UPDATE.autoRenewalPeriodInDays),
|
||||
properties: z
|
||||
.object({
|
||||
azureTemplateType: z.string().optional().describe("Azure ADCS Certificate Template Type"),
|
||||
organization: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Organization cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Organization contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Organization cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Organization (O) - Maximum 64 characters, no special DN characters"),
|
||||
organizationalUnit: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Organizational Unit cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Organizational Unit contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Organizational Unit cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Organizational Unit (OU) - Maximum 64 characters, no special DN characters"),
|
||||
country: z
|
||||
.string()
|
||||
.trim()
|
||||
.length(2, "Country must be exactly 2 characters")
|
||||
.regex(new RE2("^[A-Z]{2}$"), "Country must be exactly 2 uppercase letters")
|
||||
.optional()
|
||||
.describe("Country (C) - Two uppercase letter country code (e.g., US, CA, GB)"),
|
||||
state: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "State cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'State contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"State cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("State/Province (ST) - Maximum 64 characters, no special DN characters"),
|
||||
locality: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Locality cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Locality contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Locality cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Locality (L) - Maximum 64 characters, no special DN characters"),
|
||||
emailAddress: z
|
||||
.string()
|
||||
.trim()
|
||||
.email("Email Address must be a valid email format")
|
||||
.min(6, "Email Address must be at least 6 characters")
|
||||
.max(64, "Email Address cannot exceed 64 characters")
|
||||
.optional()
|
||||
.describe("Email Address - Valid email format between 6 and 64 characters")
|
||||
})
|
||||
.optional()
|
||||
.describe("Additional subscriber properties and subject fields")
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedPkiSubscriber
|
||||
|
@@ -108,7 +108,11 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
id: true
|
||||
}).merge(UserEncryptionKeysSchema.pick({ publicKey: true })),
|
||||
})
|
||||
.merge(UserEncryptionKeysSchema.pick({ publicKey: true }))
|
||||
.extend({
|
||||
isOrgMembershipActive: z.boolean()
|
||||
}),
|
||||
project: SanitizedProjectSchema.pick({ name: true, id: true }),
|
||||
roles: z.array(
|
||||
z.object({
|
||||
|
@@ -6,12 +6,14 @@ import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { AcmeCertificateAuthoritySchema } from "@app/services/certificate-authority/acme/acme-certificate-authority-schemas";
|
||||
import { AzureAdCsCertificateAuthoritySchema } from "@app/services/certificate-authority/azure-ad-cs/azure-ad-cs-certificate-authority-schemas";
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
import { InternalCertificateAuthoritySchema } from "@app/services/certificate-authority/internal/internal-certificate-authority-schemas";
|
||||
|
||||
const CertificateAuthoritySchema = z.discriminatedUnion("type", [
|
||||
InternalCertificateAuthoritySchema,
|
||||
AcmeCertificateAuthoritySchema
|
||||
AcmeCertificateAuthoritySchema,
|
||||
AzureAdCsCertificateAuthoritySchema
|
||||
]);
|
||||
|
||||
export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
@@ -52,19 +54,31 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
req.permission
|
||||
);
|
||||
|
||||
const azureAdCsCas = await server.services.certificateAuthority.listCertificateAuthoritiesByProjectId(
|
||||
{
|
||||
projectId: req.query.projectId,
|
||||
type: CaType.AZURE_AD_CS
|
||||
},
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_CAS,
|
||||
metadata: {
|
||||
caIds: [...(internalCas ?? []).map((ca) => ca.id), ...(acmeCas ?? []).map((ca) => ca.id)]
|
||||
caIds: [
|
||||
...(internalCas ?? []).map((ca) => ca.id),
|
||||
...(acmeCas ?? []).map((ca) => ca.id),
|
||||
...(azureAdCsCas ?? []).map((ca) => ca.id)
|
||||
]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
certificateAuthorities: [...(internalCas ?? []), ...(acmeCas ?? [])]
|
||||
certificateAuthorities: [...(internalCas ?? []), ...(acmeCas ?? []), ...(azureAdCsCas ?? [])]
|
||||
};
|
||||
}
|
||||
});
|
||||
|
@@ -18,14 +18,14 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
username: z.string().trim()
|
||||
token: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
await server.services.user.sendEmailVerificationCode(req.body.username);
|
||||
await server.services.user.sendEmailVerificationCode(req.body.token);
|
||||
return {};
|
||||
}
|
||||
});
|
||||
|
@@ -8,6 +8,7 @@ export enum AppConnection {
|
||||
AzureAppConfiguration = "azure-app-configuration",
|
||||
AzureClientSecrets = "azure-client-secrets",
|
||||
AzureDevOps = "azure-devops",
|
||||
AzureADCS = "azure-adcs",
|
||||
Humanitec = "humanitec",
|
||||
TerraformCloud = "terraform-cloud",
|
||||
Vercel = "vercel",
|
||||
|
@@ -31,6 +31,11 @@ import {
|
||||
} from "./app-connection-types";
|
||||
import { Auth0ConnectionMethod, getAuth0ConnectionListItem, validateAuth0ConnectionCredentials } from "./auth0";
|
||||
import { AwsConnectionMethod, getAwsConnectionListItem, validateAwsConnectionCredentials } from "./aws";
|
||||
import { AzureADCSConnectionMethod } from "./azure-adcs";
|
||||
import {
|
||||
getAzureADCSConnectionListItem,
|
||||
validateAzureADCSConnectionCredentials
|
||||
} from "./azure-adcs/azure-adcs-connection-fns";
|
||||
import {
|
||||
AzureAppConfigurationConnectionMethod,
|
||||
getAzureAppConfigurationConnectionListItem,
|
||||
@@ -136,6 +141,7 @@ export const listAppConnectionOptions = () => {
|
||||
getAzureKeyVaultConnectionListItem(),
|
||||
getAzureAppConfigurationConnectionListItem(),
|
||||
getAzureDevopsConnectionListItem(),
|
||||
getAzureADCSConnectionListItem(),
|
||||
getDatabricksConnectionListItem(),
|
||||
getHumanitecConnectionListItem(),
|
||||
getTerraformCloudConnectionListItem(),
|
||||
@@ -227,6 +233,7 @@ export const validateAppConnectionCredentials = async (
|
||||
[AppConnection.AzureClientSecrets]:
|
||||
validateAzureClientSecretsConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.AzureDevOps]: validateAzureDevOpsConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.AzureADCS]: validateAzureADCSConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Humanitec]: validateHumanitecConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Postgres]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.MsSql]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
@@ -300,6 +307,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
|
||||
case MsSqlConnectionMethod.UsernameAndPassword:
|
||||
case MySqlConnectionMethod.UsernameAndPassword:
|
||||
case OracleDBConnectionMethod.UsernameAndPassword:
|
||||
case AzureADCSConnectionMethod.UsernamePassword:
|
||||
return "Username & Password";
|
||||
case WindmillConnectionMethod.AccessToken:
|
||||
case HCVaultConnectionMethod.AccessToken:
|
||||
@@ -357,6 +365,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
|
||||
[AppConnection.AzureKeyVault]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureAppConfiguration]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureDevOps]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureADCS]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Humanitec]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Postgres]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
|
||||
[AppConnection.MsSql]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
|
||||
|
@@ -9,6 +9,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
[AppConnection.AzureAppConfiguration]: "Azure App Configuration",
|
||||
[AppConnection.AzureClientSecrets]: "Azure Client Secrets",
|
||||
[AppConnection.AzureDevOps]: "Azure DevOps",
|
||||
[AppConnection.AzureADCS]: "Azure ADCS",
|
||||
[AppConnection.Databricks]: "Databricks",
|
||||
[AppConnection.Humanitec]: "Humanitec",
|
||||
[AppConnection.TerraformCloud]: "Terraform Cloud",
|
||||
@@ -49,6 +50,7 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
|
||||
[AppConnection.AzureAppConfiguration]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureClientSecrets]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureDevOps]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureADCS]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Databricks]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Humanitec]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.TerraformCloud]: AppConnectionPlanType.Regular,
|
||||
|
@@ -45,6 +45,7 @@ import {
|
||||
import { ValidateAuth0ConnectionCredentialsSchema } from "./auth0";
|
||||
import { ValidateAwsConnectionCredentialsSchema } from "./aws";
|
||||
import { awsConnectionService } from "./aws/aws-connection-service";
|
||||
import { ValidateAzureADCSConnectionCredentialsSchema } from "./azure-adcs/azure-adcs-connection-schemas";
|
||||
import { ValidateAzureAppConfigurationConnectionCredentialsSchema } from "./azure-app-configuration";
|
||||
import { ValidateAzureClientSecretsConnectionCredentialsSchema } from "./azure-client-secrets";
|
||||
import { azureClientSecretsConnectionService } from "./azure-client-secrets/azure-client-secrets-service";
|
||||
@@ -122,6 +123,7 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
|
||||
[AppConnection.AzureKeyVault]: ValidateAzureKeyVaultConnectionCredentialsSchema,
|
||||
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema,
|
||||
[AppConnection.AzureDevOps]: ValidateAzureDevOpsConnectionCredentialsSchema,
|
||||
[AppConnection.AzureADCS]: ValidateAzureADCSConnectionCredentialsSchema,
|
||||
[AppConnection.Databricks]: ValidateDatabricksConnectionCredentialsSchema,
|
||||
[AppConnection.Humanitec]: ValidateHumanitecConnectionCredentialsSchema,
|
||||
[AppConnection.TerraformCloud]: ValidateTerraformCloudConnectionCredentialsSchema,
|
||||
|
@@ -33,6 +33,12 @@ import {
|
||||
TAwsConnectionInput,
|
||||
TValidateAwsConnectionCredentialsSchema
|
||||
} from "./aws";
|
||||
import {
|
||||
TAzureADCSConnection,
|
||||
TAzureADCSConnectionConfig,
|
||||
TAzureADCSConnectionInput,
|
||||
TValidateAzureADCSConnectionCredentialsSchema
|
||||
} from "./azure-adcs/azure-adcs-connection-types";
|
||||
import {
|
||||
TAzureAppConfigurationConnection,
|
||||
TAzureAppConfigurationConnectionConfig,
|
||||
@@ -223,6 +229,7 @@ export type TAppConnection = { id: string } & (
|
||||
| TAzureKeyVaultConnection
|
||||
| TAzureAppConfigurationConnection
|
||||
| TAzureDevOpsConnection
|
||||
| TAzureADCSConnection
|
||||
| TDatabricksConnection
|
||||
| THumanitecConnection
|
||||
| TTerraformCloudConnection
|
||||
@@ -267,6 +274,7 @@ export type TAppConnectionInput = { id: string } & (
|
||||
| TAzureKeyVaultConnectionInput
|
||||
| TAzureAppConfigurationConnectionInput
|
||||
| TAzureDevOpsConnectionInput
|
||||
| TAzureADCSConnectionInput
|
||||
| TDatabricksConnectionInput
|
||||
| THumanitecConnectionInput
|
||||
| TTerraformCloudConnectionInput
|
||||
@@ -322,6 +330,7 @@ export type TAppConnectionConfig =
|
||||
| TAzureKeyVaultConnectionConfig
|
||||
| TAzureAppConfigurationConnectionConfig
|
||||
| TAzureDevOpsConnectionConfig
|
||||
| TAzureADCSConnectionConfig
|
||||
| TAzureClientSecretsConnectionConfig
|
||||
| TDatabricksConnectionConfig
|
||||
| THumanitecConnectionConfig
|
||||
@@ -359,6 +368,7 @@ export type TValidateAppConnectionCredentialsSchema =
|
||||
| TValidateAzureAppConfigurationConnectionCredentialsSchema
|
||||
| TValidateAzureClientSecretsConnectionCredentialsSchema
|
||||
| TValidateAzureDevOpsConnectionCredentialsSchema
|
||||
| TValidateAzureADCSConnectionCredentialsSchema
|
||||
| TValidateDatabricksConnectionCredentialsSchema
|
||||
| TValidateHumanitecConnectionCredentialsSchema
|
||||
| TValidatePostgresConnectionCredentialsSchema
|
||||
|
@@ -0,0 +1,3 @@
|
||||
export enum AzureADCSConnectionMethod {
|
||||
UsernamePassword = "username-password"
|
||||
}
|
@@ -0,0 +1,455 @@
|
||||
/* eslint-disable no-case-declarations, @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-var-requires, no-await-in-loop, no-continue */
|
||||
import { NtlmClient } from "axios-ntlm";
|
||||
import https from "https";
|
||||
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator/validate-url";
|
||||
import { decryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
import { TAppConnectionDALFactory } from "../app-connection-dal";
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import { AzureADCSConnectionMethod } from "./azure-adcs-connection-enums";
|
||||
import { TAzureADCSConnectionConfig } from "./azure-adcs-connection-types";
|
||||
|
||||
// Type definitions for axios-ntlm
|
||||
interface AxiosNtlmConfig {
|
||||
ntlm: {
|
||||
domain: string;
|
||||
username: string;
|
||||
password: string;
|
||||
};
|
||||
httpsAgent?: https.Agent;
|
||||
url: string;
|
||||
method?: string;
|
||||
data?: string;
|
||||
headers?: Record<string, string>;
|
||||
}
|
||||
|
||||
interface AxiosNtlmResponse {
|
||||
status: number;
|
||||
data: string;
|
||||
headers: unknown;
|
||||
}
|
||||
|
||||
// Types for credential parsing
|
||||
interface ParsedCredentials {
|
||||
domain: string;
|
||||
username: string;
|
||||
fullUsername: string; // domain\username format
|
||||
}
|
||||
|
||||
// Helper function to parse and normalize credentials for Windows authentication
|
||||
const parseCredentials = (inputUsername: string): ParsedCredentials => {
|
||||
// Ensure inputUsername is a string
|
||||
if (typeof inputUsername !== "string" || !inputUsername.trim()) {
|
||||
throw new BadRequestError({
|
||||
message: "Username must be a non-empty string"
|
||||
});
|
||||
}
|
||||
|
||||
let domain = "";
|
||||
let username = "";
|
||||
let fullUsername = "";
|
||||
|
||||
if (inputUsername.includes("\\")) {
|
||||
// Already in domain\username format
|
||||
const parts = inputUsername.split("\\");
|
||||
if (parts.length === 2) {
|
||||
[domain, username] = parts;
|
||||
fullUsername = inputUsername;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid domain\\username format. Expected format: DOMAIN\\username"
|
||||
});
|
||||
}
|
||||
} else if (inputUsername.includes("@")) {
|
||||
// UPN format: user@domain.com
|
||||
const [user, domainPart] = inputUsername.split("@");
|
||||
if (!user || !domainPart) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid UPN format. Expected format: user@domain.com"
|
||||
});
|
||||
}
|
||||
|
||||
username = user;
|
||||
// Extract NetBIOS name from FQDN
|
||||
domain = domainPart.split(".")[0].toUpperCase();
|
||||
fullUsername = `${domain}\\${username}`;
|
||||
} else {
|
||||
// Plain username - assume local account or current domain
|
||||
username = inputUsername;
|
||||
domain = "";
|
||||
fullUsername = inputUsername;
|
||||
}
|
||||
|
||||
return { domain, username, fullUsername };
|
||||
};
|
||||
|
||||
// Helper to normalize URL
|
||||
const normalizeAdcsUrl = (url: string): string => {
|
||||
let normalizedUrl = url.trim();
|
||||
|
||||
// Remove trailing slash
|
||||
normalizedUrl = normalizedUrl.replace(/\/$/, "");
|
||||
|
||||
// Ensure HTTPS protocol
|
||||
if (normalizedUrl.startsWith("http://")) {
|
||||
normalizedUrl = normalizedUrl.replace("http://", "https://");
|
||||
} else if (!normalizedUrl.startsWith("https://")) {
|
||||
normalizedUrl = `https://${normalizedUrl}`;
|
||||
}
|
||||
|
||||
return normalizedUrl;
|
||||
};
|
||||
|
||||
// NTLM request wrapper
|
||||
const createHttpsAgent = (sslRejectUnauthorized: boolean, sslCertificate?: string): https.Agent => {
|
||||
const agentOptions: https.AgentOptions = {
|
||||
rejectUnauthorized: sslRejectUnauthorized,
|
||||
keepAlive: true, // axios-ntlm needs keepAlive for NTLM handshake
|
||||
ca: sslCertificate ? [sslCertificate.trim()] : undefined,
|
||||
// Disable hostname verification as Microsoft servers by default use local IPs for certificates
|
||||
// which may not match the hostname used to connect
|
||||
checkServerIdentity: () => undefined
|
||||
};
|
||||
|
||||
return new https.Agent(agentOptions);
|
||||
};
|
||||
|
||||
const axiosNtlmRequest = async (config: AxiosNtlmConfig): Promise<AxiosNtlmResponse> => {
|
||||
const method = config.method || "GET";
|
||||
|
||||
const credentials = {
|
||||
username: config.ntlm.username,
|
||||
password: config.ntlm.password,
|
||||
domain: config.ntlm.domain || "",
|
||||
workstation: ""
|
||||
};
|
||||
|
||||
const axiosConfig = {
|
||||
httpsAgent: config.httpsAgent,
|
||||
timeout: 30000
|
||||
};
|
||||
|
||||
const client = NtlmClient(credentials, axiosConfig);
|
||||
|
||||
const requestOptions: { url: string; method: string; data?: string; headers?: Record<string, string> } = {
|
||||
url: config.url,
|
||||
method
|
||||
};
|
||||
|
||||
if (config.data) {
|
||||
requestOptions.data = config.data;
|
||||
}
|
||||
|
||||
if (config.headers) {
|
||||
requestOptions.headers = config.headers;
|
||||
}
|
||||
|
||||
const response = await client(requestOptions);
|
||||
|
||||
return {
|
||||
status: response.status,
|
||||
data: response.data,
|
||||
headers: response.headers
|
||||
};
|
||||
};
|
||||
|
||||
// Test ADCS connectivity and authentication using NTLM
|
||||
const testAdcsConnection = async (
|
||||
credentials: ParsedCredentials,
|
||||
password: string,
|
||||
baseUrl: string,
|
||||
sslRejectUnauthorized: boolean = true,
|
||||
sslCertificate?: string
|
||||
): Promise<boolean> => {
|
||||
// Test endpoints in order of preference
|
||||
const testEndpoints = [
|
||||
"/certsrv/certrqus.asp", // Certificate request status (most reliable)
|
||||
"/certsrv/certfnsh.asp", // Certificate finalization
|
||||
"/certsrv/default.asp", // Main ADCS page
|
||||
"/certsrv/" // Root certsrv
|
||||
];
|
||||
|
||||
for (const endpoint of testEndpoints) {
|
||||
try {
|
||||
const testUrl = `${baseUrl}${endpoint}`;
|
||||
|
||||
const shouldRejectUnauthorized = sslRejectUnauthorized;
|
||||
|
||||
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
|
||||
|
||||
const response = await axiosNtlmRequest({
|
||||
url: testUrl,
|
||||
method: "GET",
|
||||
httpsAgent,
|
||||
ntlm: {
|
||||
domain: credentials.domain,
|
||||
username: credentials.username,
|
||||
password
|
||||
}
|
||||
});
|
||||
|
||||
// Check if we got a successful response
|
||||
if (response.status === 200) {
|
||||
const responseText = response.data;
|
||||
|
||||
// Verify this is actually an ADCS server by checking content
|
||||
const adcsIndicators = [
|
||||
"Microsoft Active Directory Certificate Services",
|
||||
"Certificate Services",
|
||||
"Request a certificate",
|
||||
"certsrv",
|
||||
"Certificate Template",
|
||||
"Web Enrollment"
|
||||
];
|
||||
|
||||
const isAdcsServer = adcsIndicators.some((indicator) =>
|
||||
responseText.toLowerCase().includes(indicator.toLowerCase())
|
||||
);
|
||||
|
||||
if (isAdcsServer) {
|
||||
// Successfully authenticated and confirmed ADCS
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (response.status === 401) {
|
||||
throw new BadRequestError({
|
||||
message: "Authentication failed. Please verify your credentials are correct."
|
||||
});
|
||||
}
|
||||
|
||||
if (response.status === 403) {
|
||||
throw new BadRequestError({
|
||||
message: "Access denied. Your account may not have permission to access ADCS web enrollment."
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof BadRequestError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Handle network and connection errors
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes("ENOTFOUND")) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot resolve ADCS server hostname. Please verify the URL is correct."
|
||||
});
|
||||
}
|
||||
if (error.message.includes("ECONNREFUSED")) {
|
||||
throw new BadRequestError({
|
||||
message: "Connection refused by ADCS server. Please verify the server is running and accessible."
|
||||
});
|
||||
}
|
||||
if (error.message.includes("ETIMEDOUT") || error.message.includes("timeout")) {
|
||||
throw new BadRequestError({
|
||||
message: "Connection timeout. Please verify the server is accessible and not blocked by firewall."
|
||||
});
|
||||
}
|
||||
if (error.message.includes("certificate") || error.message.includes("SSL") || error.message.includes("TLS")) {
|
||||
throw new BadRequestError({
|
||||
message: `SSL/TLS certificate error: ${error.message}. This may indicate a certificate verification failure.`
|
||||
});
|
||||
}
|
||||
if (error.message.includes("DEPTH_ZERO_SELF_SIGNED_CERT")) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Self-signed certificate detected. Either provide the server's certificate or set 'sslRejectUnauthorized' to false."
|
||||
});
|
||||
}
|
||||
if (error.message.includes("UNABLE_TO_VERIFY_LEAF_SIGNATURE")) {
|
||||
throw new BadRequestError({
|
||||
message: "Unable to verify certificate signature. Please provide the correct CA certificate."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Continue to next endpoint for other errors
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// If we get here, no endpoint worked
|
||||
throw new BadRequestError({
|
||||
message: "Could not connect to ADCS server. Please verify the server URL and that Web Enrollment is enabled."
|
||||
});
|
||||
};
|
||||
|
||||
// Create authenticated NTLM client for ADCS operations
|
||||
const createNtlmClient = (
|
||||
username: string,
|
||||
password: string,
|
||||
baseUrl: string,
|
||||
sslRejectUnauthorized: boolean = true,
|
||||
sslCertificate?: string
|
||||
) => {
|
||||
const parsedCredentials = parseCredentials(username);
|
||||
const normalizedUrl = normalizeAdcsUrl(baseUrl);
|
||||
|
||||
return {
|
||||
get: async (endpoint: string, additionalHeaders: Record<string, string> = {}) => {
|
||||
const shouldRejectUnauthorized = sslRejectUnauthorized;
|
||||
|
||||
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
|
||||
|
||||
return axiosNtlmRequest({
|
||||
url: `${normalizedUrl}${endpoint}`,
|
||||
method: "GET",
|
||||
httpsAgent,
|
||||
headers: additionalHeaders,
|
||||
ntlm: {
|
||||
domain: parsedCredentials.domain,
|
||||
username: parsedCredentials.username,
|
||||
password
|
||||
}
|
||||
});
|
||||
},
|
||||
post: async (endpoint: string, body: string, additionalHeaders: Record<string, string> = {}) => {
|
||||
const shouldRejectUnauthorized = sslRejectUnauthorized;
|
||||
|
||||
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
|
||||
|
||||
return axiosNtlmRequest({
|
||||
url: `${normalizedUrl}${endpoint}`,
|
||||
method: "POST",
|
||||
httpsAgent,
|
||||
data: body,
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
...additionalHeaders
|
||||
},
|
||||
ntlm: {
|
||||
domain: parsedCredentials.domain,
|
||||
username: parsedCredentials.username,
|
||||
password
|
||||
}
|
||||
});
|
||||
},
|
||||
baseUrl: normalizedUrl,
|
||||
credentials: parsedCredentials
|
||||
};
|
||||
};
|
||||
|
||||
export const getAzureADCSConnectionCredentials = async (
|
||||
connectionId: string,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const appConnection = await appConnectionDAL.findById(connectionId);
|
||||
|
||||
if (!appConnection) {
|
||||
throw new NotFoundError({ message: `Connection with ID '${connectionId}' not found` });
|
||||
}
|
||||
|
||||
if (appConnection.app !== AppConnection.AzureADCS) {
|
||||
throw new BadRequestError({ message: `Connection with ID '${connectionId}' is not an Azure ADCS connection` });
|
||||
}
|
||||
|
||||
switch (appConnection.method) {
|
||||
case AzureADCSConnectionMethod.UsernamePassword:
|
||||
const credentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as {
|
||||
username: string;
|
||||
password: string;
|
||||
adcsUrl: string;
|
||||
sslRejectUnauthorized?: boolean;
|
||||
sslCertificate?: string;
|
||||
};
|
||||
|
||||
return {
|
||||
username: credentials.username,
|
||||
password: credentials.password,
|
||||
adcsUrl: credentials.adcsUrl,
|
||||
sslRejectUnauthorized: credentials.sslRejectUnauthorized ?? true,
|
||||
sslCertificate: credentials.sslCertificate
|
||||
};
|
||||
|
||||
default:
|
||||
throw new BadRequestError({
|
||||
message: `Unsupported Azure ADCS connection method: ${appConnection.method}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const validateAzureADCSConnectionCredentials = async (appConnection: TAzureADCSConnectionConfig) => {
|
||||
const { credentials } = appConnection;
|
||||
|
||||
try {
|
||||
// Parse and validate credentials
|
||||
const parsedCredentials = parseCredentials(credentials.username);
|
||||
const normalizedUrl = normalizeAdcsUrl(credentials.adcsUrl);
|
||||
|
||||
// Validate URL to prevent DNS manipulation attacks and SSRF
|
||||
await blockLocalAndPrivateIpAddresses(normalizedUrl);
|
||||
|
||||
// Test the connection using NTLM
|
||||
await testAdcsConnection(
|
||||
parsedCredentials,
|
||||
credentials.password,
|
||||
normalizedUrl,
|
||||
credentials.sslRejectUnauthorized ?? true,
|
||||
credentials.sslCertificate
|
||||
);
|
||||
|
||||
// If we get here, authentication was successful
|
||||
return {
|
||||
username: credentials.username,
|
||||
password: credentials.password,
|
||||
adcsUrl: credentials.adcsUrl,
|
||||
sslRejectUnauthorized: credentials.sslRejectUnauthorized ?? true,
|
||||
sslCertificate: credentials.sslCertificate
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof BadRequestError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Handle unexpected errors
|
||||
let errorMessage = "Unable to validate ADCS connection.";
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes("401") || error.message.includes("Unauthorized")) {
|
||||
errorMessage = "NTLM authentication failed. Please verify your username, password, and domain are correct.";
|
||||
} else if (error.message.includes("ENOTFOUND") || error.message.includes("ECONNREFUSED")) {
|
||||
errorMessage = "Cannot connect to the ADCS server. Please verify the server URL is correct and accessible.";
|
||||
} else if (error.message.includes("timeout")) {
|
||||
errorMessage = "Connection to ADCS server timed out. Please verify the server is accessible.";
|
||||
} else if (
|
||||
error.message.includes("certificate") ||
|
||||
error.message.includes("SSL") ||
|
||||
error.message.includes("TLS") ||
|
||||
error.message.includes("DEPTH_ZERO_SELF_SIGNED_CERT") ||
|
||||
error.message.includes("UNABLE_TO_VERIFY_LEAF_SIGNATURE")
|
||||
) {
|
||||
errorMessage = `SSL/TLS certificate error: ${error.message}. The server certificate may be self-signed or the CA certificate may be incorrect.`;
|
||||
}
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate Azure ADCS connection: ${errorMessage} Details: ${
|
||||
error instanceof Error ? error.message : "Unknown error"
|
||||
}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const getAzureADCSConnectionListItem = () => ({
|
||||
name: "Azure ADCS" as const,
|
||||
app: AppConnection.AzureADCS as const,
|
||||
methods: [AzureADCSConnectionMethod.UsernamePassword] as [AzureADCSConnectionMethod.UsernamePassword]
|
||||
});
|
||||
|
||||
// Export helper functions for use in certificate ordering
|
||||
export const createAdcsHttpClient = (
|
||||
username: string,
|
||||
password: string,
|
||||
baseUrl: string,
|
||||
sslRejectUnauthorized: boolean = true,
|
||||
sslCertificate?: string
|
||||
) => {
|
||||
return createNtlmClient(username, password, baseUrl, sslRejectUnauthorized, sslCertificate);
|
||||
};
|
@@ -0,0 +1,88 @@
|
||||
import z from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { AzureADCSConnectionMethod } from "./azure-adcs-connection-enums";
|
||||
|
||||
export const AzureADCSUsernamePasswordCredentialsSchema = z.object({
|
||||
adcsUrl: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "ADCS URL required")
|
||||
.max(255)
|
||||
.refine((value) => value.startsWith("https://"), "ADCS URL must use HTTPS")
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.adcsUrl),
|
||||
username: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Username required")
|
||||
.max(255)
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.username),
|
||||
password: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Password required")
|
||||
.max(255)
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.password),
|
||||
sslRejectUnauthorized: z.boolean().optional().describe(AppConnections.CREDENTIALS.AZURE_ADCS.sslRejectUnauthorized),
|
||||
sslCertificate: z
|
||||
.string()
|
||||
.trim()
|
||||
.transform((value) => value || undefined)
|
||||
.optional()
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.sslCertificate)
|
||||
});
|
||||
|
||||
const BaseAzureADCSConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.AzureADCS) });
|
||||
|
||||
export const AzureADCSConnectionSchema = BaseAzureADCSConnectionSchema.extend({
|
||||
method: z.literal(AzureADCSConnectionMethod.UsernamePassword),
|
||||
credentials: AzureADCSUsernamePasswordCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedAzureADCSConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseAzureADCSConnectionSchema.extend({
|
||||
method: z.literal(AzureADCSConnectionMethod.UsernamePassword),
|
||||
credentials: AzureADCSUsernamePasswordCredentialsSchema.pick({
|
||||
username: true,
|
||||
adcsUrl: true,
|
||||
sslRejectUnauthorized: true,
|
||||
sslCertificate: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
export const ValidateAzureADCSConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
.literal(AzureADCSConnectionMethod.UsernamePassword)
|
||||
.describe(AppConnections.CREATE(AppConnection.AzureADCS).method),
|
||||
credentials: AzureADCSUsernamePasswordCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureADCS).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateAzureADCSConnectionSchema = ValidateAzureADCSConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.AzureADCS)
|
||||
);
|
||||
|
||||
export const UpdateAzureADCSConnectionSchema = z
|
||||
.object({
|
||||
credentials: AzureADCSUsernamePasswordCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.AzureADCS).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureADCS));
|
||||
|
||||
export const AzureADCSConnectionListItemSchema = z.object({
|
||||
name: z.literal("Azure ADCS"),
|
||||
app: z.literal(AppConnection.AzureADCS),
|
||||
methods: z.nativeEnum(AzureADCSConnectionMethod).array()
|
||||
});
|
@@ -0,0 +1,23 @@
|
||||
import z from "zod";
|
||||
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
AzureADCSConnectionSchema,
|
||||
CreateAzureADCSConnectionSchema,
|
||||
ValidateAzureADCSConnectionCredentialsSchema
|
||||
} from "./azure-adcs-connection-schemas";
|
||||
|
||||
export type TAzureADCSConnection = z.infer<typeof AzureADCSConnectionSchema>;
|
||||
|
||||
export type TAzureADCSConnectionInput = z.infer<typeof CreateAzureADCSConnectionSchema> & {
|
||||
app: AppConnection.AzureADCS;
|
||||
};
|
||||
|
||||
export type TValidateAzureADCSConnectionCredentialsSchema = typeof ValidateAzureADCSConnectionCredentialsSchema;
|
||||
|
||||
export type TAzureADCSConnectionConfig = DiscriminativePick<
|
||||
TAzureADCSConnectionInput,
|
||||
"method" | "app" | "credentials"
|
||||
>;
|
4
backend/src/services/app-connection/azure-adcs/index.ts
Normal file
4
backend/src/services/app-connection/azure-adcs/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from "./azure-adcs-connection-enums";
|
||||
export * from "./azure-adcs-connection-fns";
|
||||
export * from "./azure-adcs-connection-schemas";
|
||||
export * from "./azure-adcs-connection-types";
|
@@ -4,6 +4,7 @@ import { AxiosInstance, AxiosRequestConfig, AxiosResponse, HttpStatusCode, isAxi
|
||||
|
||||
import { createRequestClient } from "@app/lib/config/request";
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
|
||||
|
||||
import { ChecklyConnectionMethod } from "./checkly-connection-constants";
|
||||
import { TChecklyAccount, TChecklyConnectionConfig, TChecklyVariable } from "./checkly-connection-types";
|
||||
@@ -181,6 +182,122 @@ class ChecklyPublicClient {
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async getCheckGroups(connection: TChecklyConnectionConfig, accountId: string, limit = 50, page = 1) {
|
||||
const res = await this.send<{ id: number; name: string }[]>(connection, {
|
||||
accountId,
|
||||
method: "GET",
|
||||
url: `/v1/check-groups`,
|
||||
params: { limit, page }
|
||||
});
|
||||
|
||||
return res?.map((group) => ({
|
||||
id: group.id.toString(),
|
||||
name: group.name
|
||||
}));
|
||||
}
|
||||
|
||||
async getCheckGroup(connection: TChecklyConnectionConfig, accountId: string, groupId: string) {
|
||||
try {
|
||||
type ChecklyGroupResponse = {
|
||||
id: number;
|
||||
name: string;
|
||||
environmentVariables: Array<{
|
||||
key: string;
|
||||
value: string;
|
||||
locked: boolean;
|
||||
}>;
|
||||
};
|
||||
|
||||
const res = await this.send<ChecklyGroupResponse>(connection, {
|
||||
accountId,
|
||||
method: "GET",
|
||||
url: `/v1/check-groups/${groupId}`
|
||||
});
|
||||
|
||||
if (!res) return null;
|
||||
|
||||
return {
|
||||
id: res.id.toString(),
|
||||
name: res.name,
|
||||
environmentVariables: res.environmentVariables
|
||||
};
|
||||
} catch (error) {
|
||||
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async updateCheckGroupEnvironmentVariables(
|
||||
connection: TChecklyConnectionConfig,
|
||||
accountId: string,
|
||||
groupId: string,
|
||||
environmentVariables: Array<{ key: string; value: string; locked?: boolean }>
|
||||
) {
|
||||
if (environmentVariables.length > 50) {
|
||||
throw new SecretSyncError({
|
||||
message: "Checkly does not support syncing more than 50 variables to Check Group",
|
||||
shouldRetry: false
|
||||
});
|
||||
}
|
||||
|
||||
const apiVariables = environmentVariables.map((v) => ({
|
||||
key: v.key,
|
||||
value: v.value,
|
||||
locked: v.locked ?? false,
|
||||
secret: true
|
||||
}));
|
||||
|
||||
const group = await this.getCheckGroup(connection, accountId, groupId);
|
||||
|
||||
await this.send(connection, {
|
||||
accountId,
|
||||
method: "PUT",
|
||||
url: `/v2/check-groups/${groupId}`,
|
||||
data: { name: group?.name, environmentVariables: apiVariables }
|
||||
});
|
||||
|
||||
return this.getCheckGroup(connection, accountId, groupId);
|
||||
}
|
||||
|
||||
async getCheckGroupEnvironmentVariables(connection: TChecklyConnectionConfig, accountId: string, groupId: string) {
|
||||
const group = await this.getCheckGroup(connection, accountId, groupId);
|
||||
return group?.environmentVariables || [];
|
||||
}
|
||||
|
||||
async upsertCheckGroupEnvironmentVariables(
|
||||
connection: TChecklyConnectionConfig,
|
||||
accountId: string,
|
||||
groupId: string,
|
||||
variables: Array<{ key: string; value: string; locked?: boolean }>
|
||||
) {
|
||||
const existingVars = await this.getCheckGroupEnvironmentVariables(connection, accountId, groupId);
|
||||
const varMap = new Map(existingVars.map((v) => [v.key, v]));
|
||||
|
||||
for (const newVar of variables) {
|
||||
varMap.set(newVar.key, {
|
||||
key: newVar.key,
|
||||
value: newVar.value,
|
||||
locked: newVar.locked ?? false
|
||||
});
|
||||
}
|
||||
|
||||
return this.updateCheckGroupEnvironmentVariables(connection, accountId, groupId, Array.from(varMap.values()));
|
||||
}
|
||||
|
||||
async deleteCheckGroupEnvironmentVariable(
|
||||
connection: TChecklyConnectionConfig,
|
||||
accountId: string,
|
||||
groupId: string,
|
||||
variableKey: string
|
||||
) {
|
||||
const existingVars = await this.getCheckGroupEnvironmentVariables(connection, accountId, groupId);
|
||||
const filteredVars = existingVars.filter((v) => v.key !== variableKey);
|
||||
|
||||
return this.updateCheckGroupEnvironmentVariables(connection, accountId, groupId, filteredVars);
|
||||
}
|
||||
}
|
||||
|
||||
export const ChecklyPublicAPI = new ChecklyPublicClient();
|
||||
|
@@ -24,7 +24,19 @@ export const checklyConnectionService = (getAppConnection: TGetAppConnectionFunc
|
||||
}
|
||||
};
|
||||
|
||||
const listGroups = async (connectionId: string, accountId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.Checkly, connectionId, actor);
|
||||
try {
|
||||
const groups = await ChecklyPublicAPI.getCheckGroups(appConnection, accountId);
|
||||
return groups!;
|
||||
} catch (error) {
|
||||
logger.error(error, "Failed to list accounts on Checkly");
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
listAccounts
|
||||
listAccounts,
|
||||
listGroups
|
||||
};
|
||||
};
|
||||
|
@@ -33,3 +33,15 @@ export type TChecklyAccount = {
|
||||
name: string;
|
||||
runtimeId: string;
|
||||
};
|
||||
|
||||
export type TChecklyGroupEnvironmentVariable = {
|
||||
key: string;
|
||||
value: string;
|
||||
locked: boolean;
|
||||
};
|
||||
|
||||
export type TChecklyGroup = {
|
||||
id: string;
|
||||
name: string;
|
||||
environmentVariables?: TChecklyGroupEnvironmentVariable[];
|
||||
};
|
||||
|
@@ -1,5 +1,3 @@
|
||||
import { createAppAuth } from "@octokit/auth-app";
|
||||
import { request } from "@octokit/request";
|
||||
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
|
||||
import https from "https";
|
||||
import RE2 from "re2";
|
||||
@@ -8,6 +6,7 @@ import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request as httpRequest } from "@app/lib/config/request";
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { BadRequestError, ForbiddenRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@@ -114,10 +113,13 @@ export const requestWithGitHubGateway = async <T>(
|
||||
);
|
||||
};
|
||||
|
||||
export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) => {
|
||||
export const getGitHubAppAuthToken = async (
|
||||
appConnection: TGitHubConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
const appId = appCfg.INF_APP_CONNECTION_GITHUB_APP_ID;
|
||||
const appPrivateKey = appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
|
||||
let appPrivateKey = appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
|
||||
|
||||
if (!appId || !appPrivateKey) {
|
||||
throw new InternalServerError({
|
||||
@@ -125,21 +127,42 @@ export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) =>
|
||||
});
|
||||
}
|
||||
|
||||
appPrivateKey = appPrivateKey
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.join("\n");
|
||||
|
||||
if (appConnection.method !== GitHubConnectionMethod.App) {
|
||||
throw new InternalServerError({ message: "Cannot generate GitHub App token for non-app connection" });
|
||||
}
|
||||
|
||||
const appAuth = createAppAuth({
|
||||
appId,
|
||||
privateKey: appPrivateKey,
|
||||
installationId: appConnection.credentials.installationId,
|
||||
request: request.defaults({
|
||||
baseUrl: `https://${await getGitHubInstanceApiUrl(appConnection)}`
|
||||
})
|
||||
});
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const payload = {
|
||||
iat: now,
|
||||
exp: now + 5 * 60,
|
||||
iss: appId
|
||||
};
|
||||
|
||||
const { token } = await appAuth({ type: "installation" });
|
||||
return token;
|
||||
const appJwt = crypto.jwt().sign(payload, appPrivateKey, { algorithm: "RS256" });
|
||||
|
||||
const apiBaseUrl = await getGitHubInstanceApiUrl(appConnection);
|
||||
const { installationId } = appConnection.credentials;
|
||||
|
||||
const response = await requestWithGitHubGateway<{ token: string; expires_at: string }>(
|
||||
appConnection,
|
||||
gatewayService,
|
||||
{
|
||||
url: `https://${apiBaseUrl}/app/installations/${installationId}/access_tokens`,
|
||||
method: "POST",
|
||||
headers: {
|
||||
Accept: "application/vnd.github+json",
|
||||
Authorization: `Bearer ${appJwt}`,
|
||||
"X-GitHub-Api-Version": "2022-11-28"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return response.data.token;
|
||||
};
|
||||
|
||||
const parseGitHubLinkHeader = (linkHeader: string | undefined): Record<string, string> => {
|
||||
@@ -174,7 +197,9 @@ export const makePaginatedGitHubRequest = async <T, R = T[]>(
|
||||
const { credentials, method } = appConnection;
|
||||
|
||||
const token =
|
||||
method === GitHubConnectionMethod.OAuth ? credentials.accessToken : await getGitHubAppAuthToken(appConnection);
|
||||
method === GitHubConnectionMethod.OAuth
|
||||
? credentials.accessToken
|
||||
: await getGitHubAppAuthToken(appConnection, gatewayService);
|
||||
|
||||
const baseUrl = `https://${await getGitHubInstanceApiUrl(appConnection)}${path}`;
|
||||
const initialUrlObj = new URL(baseUrl);
|
||||
|
@@ -75,7 +75,7 @@ export const getTokenConfig = (tokenType: TokenType) => {
|
||||
};
|
||||
|
||||
export const tokenServiceFactory = ({ tokenDAL, userDAL, orgMembershipDAL }: TAuthTokenServiceFactoryDep) => {
|
||||
const createTokenForUser = async ({ type, userId, orgId }: TCreateTokenForUserDTO) => {
|
||||
const createTokenForUser = async ({ type, userId, orgId, aliasId }: TCreateTokenForUserDTO) => {
|
||||
const { token, ...tkCfg } = getTokenConfig(type);
|
||||
const appCfg = getConfig();
|
||||
const tokenHash = await crypto.hashing().createHash(token, appCfg.SALT_ROUNDS);
|
||||
@@ -88,7 +88,8 @@ export const tokenServiceFactory = ({ tokenDAL, userDAL, orgMembershipDAL }: TAu
|
||||
type,
|
||||
userId,
|
||||
orgId,
|
||||
triesLeft: tkCfg?.triesLeft
|
||||
triesLeft: tkCfg?.triesLeft,
|
||||
aliasId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@@ -14,6 +14,7 @@ export type TCreateTokenForUserDTO = {
|
||||
type: TokenType;
|
||||
userId: string;
|
||||
orgId?: string;
|
||||
aliasId?: string;
|
||||
};
|
||||
|
||||
export type TCreateOrgInviteTokenDTO = {
|
||||
|
@@ -453,6 +453,13 @@ export const authLoginServiceFactory = ({
|
||||
|
||||
const selectedOrg = await orgDAL.findById(organizationId);
|
||||
|
||||
// Check if authEnforced is true, if that's the case, throw an error
|
||||
if (selectedOrg.authEnforced) {
|
||||
throw new BadRequestError({
|
||||
message: "Authentication is required by your organization before you can log in."
|
||||
});
|
||||
}
|
||||
|
||||
if (!selectedOrgMembership) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: `User does not have access to the organization named ${selectedOrg?.name}`
|
||||
|
@@ -64,6 +64,8 @@ type DBConfigurationColumn = {
|
||||
directoryUrl: string;
|
||||
accountEmail: string;
|
||||
hostedZoneId: string;
|
||||
eabKid?: string;
|
||||
eabHmacKey?: string;
|
||||
};
|
||||
|
||||
export const castDbEntryToAcmeCertificateAuthority = (
|
||||
@@ -89,7 +91,9 @@ export const castDbEntryToAcmeCertificateAuthority = (
|
||||
hostedZoneId: dbConfigurationCol.hostedZoneId
|
||||
},
|
||||
directoryUrl: dbConfigurationCol.directoryUrl,
|
||||
accountEmail: dbConfigurationCol.accountEmail
|
||||
accountEmail: dbConfigurationCol.accountEmail,
|
||||
eabKid: dbConfigurationCol.eabKid,
|
||||
eabHmacKey: dbConfigurationCol.eabHmacKey
|
||||
},
|
||||
status: ca.status as CaStatus
|
||||
};
|
||||
@@ -128,7 +132,7 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
});
|
||||
}
|
||||
|
||||
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig } = configuration;
|
||||
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig, eabKid, eabHmacKey } = configuration;
|
||||
const appConnection = await appConnectionDAL.findById(dnsAppConnectionId);
|
||||
|
||||
if (!appConnection) {
|
||||
@@ -171,7 +175,9 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
directoryUrl,
|
||||
accountEmail,
|
||||
dnsProvider: dnsProviderConfig.provider,
|
||||
hostedZoneId: dnsProviderConfig.hostedZoneId
|
||||
hostedZoneId: dnsProviderConfig.hostedZoneId,
|
||||
eabKid,
|
||||
eabHmacKey
|
||||
}
|
||||
},
|
||||
tx
|
||||
@@ -214,7 +220,7 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
}) => {
|
||||
const updatedCa = await certificateAuthorityDAL.transaction(async (tx) => {
|
||||
if (configuration) {
|
||||
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig } = configuration;
|
||||
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig, eabKid, eabHmacKey } = configuration;
|
||||
const appConnection = await appConnectionDAL.findById(dnsAppConnectionId);
|
||||
|
||||
if (!appConnection) {
|
||||
@@ -254,7 +260,9 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
directoryUrl,
|
||||
accountEmail,
|
||||
dnsProvider: dnsProviderConfig.provider,
|
||||
hostedZoneId: dnsProviderConfig.hostedZoneId
|
||||
hostedZoneId: dnsProviderConfig.hostedZoneId,
|
||||
eabKid,
|
||||
eabHmacKey
|
||||
}
|
||||
},
|
||||
tx
|
||||
@@ -354,10 +362,19 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(acmeCa.configuration.directoryUrl);
|
||||
|
||||
const acmeClient = new acme.Client({
|
||||
const acmeClientOptions: acme.ClientOptions = {
|
||||
directoryUrl: acmeCa.configuration.directoryUrl,
|
||||
accountKey
|
||||
});
|
||||
};
|
||||
|
||||
if (acmeCa.configuration.eabKid && acmeCa.configuration.eabHmacKey) {
|
||||
acmeClientOptions.externalAccountBinding = {
|
||||
kid: acmeCa.configuration.eabKid,
|
||||
hmacKey: acmeCa.configuration.eabHmacKey
|
||||
};
|
||||
}
|
||||
|
||||
const acmeClient = new acme.Client(acmeClientOptions);
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048);
|
||||
|
||||
|
@@ -18,7 +18,9 @@ export const AcmeCertificateAuthorityConfigurationSchema = z.object({
|
||||
hostedZoneId: z.string().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.hostedZoneId)
|
||||
}),
|
||||
directoryUrl: z.string().url().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.directoryUrl),
|
||||
accountEmail: z.string().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.accountEmail)
|
||||
accountEmail: z.string().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.accountEmail),
|
||||
eabKid: z.string().trim().max(64).optional().describe(CertificateAuthorities.CONFIGURATIONS.ACME.eabKid),
|
||||
eabHmacKey: z.string().trim().max(512).optional().describe(CertificateAuthorities.CONFIGURATIONS.ACME.eabHmacKey)
|
||||
});
|
||||
|
||||
export const AcmeCertificateAuthorityCredentialsSchema = z.object({
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,29 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { CaType } from "../certificate-authority-enums";
|
||||
import {
|
||||
BaseCertificateAuthoritySchema,
|
||||
GenericCreateCertificateAuthorityFieldsSchema,
|
||||
GenericUpdateCertificateAuthorityFieldsSchema
|
||||
} from "../certificate-authority-schemas";
|
||||
|
||||
export const AzureAdCsCertificateAuthorityConfigurationSchema = z.object({
|
||||
azureAdcsConnectionId: z.string().uuid().trim().describe("Azure ADCS Connection ID")
|
||||
});
|
||||
|
||||
export const AzureAdCsCertificateAuthoritySchema = BaseCertificateAuthoritySchema.extend({
|
||||
type: z.literal(CaType.AZURE_AD_CS),
|
||||
configuration: AzureAdCsCertificateAuthorityConfigurationSchema
|
||||
});
|
||||
|
||||
export const CreateAzureAdCsCertificateAuthoritySchema = GenericCreateCertificateAuthorityFieldsSchema(
|
||||
CaType.AZURE_AD_CS
|
||||
).extend({
|
||||
configuration: AzureAdCsCertificateAuthorityConfigurationSchema
|
||||
});
|
||||
|
||||
export const UpdateAzureAdCsCertificateAuthoritySchema = GenericUpdateCertificateAuthorityFieldsSchema(
|
||||
CaType.AZURE_AD_CS
|
||||
).extend({
|
||||
configuration: AzureAdCsCertificateAuthorityConfigurationSchema.optional()
|
||||
});
|
@@ -0,0 +1,13 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
AzureAdCsCertificateAuthoritySchema,
|
||||
CreateAzureAdCsCertificateAuthoritySchema,
|
||||
UpdateAzureAdCsCertificateAuthoritySchema
|
||||
} from "./azure-ad-cs-certificate-authority-schemas";
|
||||
|
||||
export type TAzureAdCsCertificateAuthority = z.infer<typeof AzureAdCsCertificateAuthoritySchema>;
|
||||
|
||||
export type TCreateAzureAdCsCertificateAuthorityDTO = z.infer<typeof CreateAzureAdCsCertificateAuthoritySchema>;
|
||||
|
||||
export type TUpdateAzureAdCsCertificateAuthorityDTO = z.infer<typeof UpdateAzureAdCsCertificateAuthoritySchema>;
|
@@ -1,6 +1,7 @@
|
||||
export enum CaType {
|
||||
INTERNAL = "internal",
|
||||
ACME = "acme"
|
||||
ACME = "acme",
|
||||
AZURE_AD_CS = "azure-ad-cs"
|
||||
}
|
||||
|
||||
export enum InternalCaType {
|
||||
@@ -17,3 +18,9 @@ export enum CaStatus {
|
||||
export enum CaRenewalType {
|
||||
EXISTING = "existing"
|
||||
}
|
||||
|
||||
export enum CaCapability {
|
||||
ISSUE_CERTIFICATES = "issue-certificates",
|
||||
REVOKE_CERTIFICATES = "revoke-certificates",
|
||||
RENEW_CERTIFICATES = "renew-certificates"
|
||||
}
|
||||
|
@@ -1,6 +1,29 @@
|
||||
import { CaType } from "./certificate-authority-enums";
|
||||
import { CaCapability, CaType } from "./certificate-authority-enums";
|
||||
|
||||
export const CERTIFICATE_AUTHORITIES_TYPE_MAP: Record<CaType, string> = {
|
||||
[CaType.INTERNAL]: "Internal",
|
||||
[CaType.ACME]: "ACME"
|
||||
[CaType.ACME]: "ACME",
|
||||
[CaType.AZURE_AD_CS]: "Azure AD Certificate Service"
|
||||
};
|
||||
|
||||
export const CERTIFICATE_AUTHORITIES_CAPABILITIES_MAP: Record<CaType, CaCapability[]> = {
|
||||
[CaType.INTERNAL]: [
|
||||
CaCapability.ISSUE_CERTIFICATES,
|
||||
CaCapability.REVOKE_CERTIFICATES,
|
||||
CaCapability.RENEW_CERTIFICATES
|
||||
],
|
||||
[CaType.ACME]: [CaCapability.ISSUE_CERTIFICATES, CaCapability.REVOKE_CERTIFICATES, CaCapability.RENEW_CERTIFICATES],
|
||||
[CaType.AZURE_AD_CS]: [
|
||||
CaCapability.ISSUE_CERTIFICATES,
|
||||
CaCapability.RENEW_CERTIFICATES
|
||||
// Note: REVOKE_CERTIFICATES intentionally omitted - not supported by ADCS connector
|
||||
]
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if a certificate authority type supports a specific capability
|
||||
*/
|
||||
export const caSupportsCapability = (caType: CaType, capability: CaCapability): boolean => {
|
||||
const capabilities = CERTIFICATE_AUTHORITIES_CAPABILITIES_MAP[caType] || [];
|
||||
return capabilities.includes(capability);
|
||||
};
|
||||
|
@@ -21,6 +21,7 @@ import { TCertificateSecretDALFactory } from "../certificate/certificate-secret-
|
||||
import { TPkiSubscriberDALFactory } from "../pki-subscriber/pki-subscriber-dal";
|
||||
import { SubscriberOperationStatus } from "../pki-subscriber/pki-subscriber-types";
|
||||
import { AcmeCertificateAuthorityFns } from "./acme/acme-certificate-authority-fns";
|
||||
import { AzureAdCsCertificateAuthorityFns } from "./azure-ad-cs/azure-ad-cs-certificate-authority-fns";
|
||||
import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal";
|
||||
import { CaType } from "./certificate-authority-enums";
|
||||
import { keyAlgorithmToAlgCfg } from "./certificate-authority-fns";
|
||||
@@ -33,7 +34,7 @@ import {
|
||||
|
||||
type TCertificateAuthorityQueueFactoryDep = {
|
||||
certificateAuthorityDAL: TCertificateAuthorityDALFactory;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
|
||||
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
||||
externalCertificateAuthorityDAL: Pick<TExternalCertificateAuthorityDALFactory, "create" | "update">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem">;
|
||||
@@ -82,6 +83,19 @@ export const certificateAuthorityQueueFactory = ({
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const azureAdCsFns = AzureAdCsCertificateAuthorityFns({
|
||||
appConnectionDAL,
|
||||
appConnectionService,
|
||||
certificateAuthorityDAL,
|
||||
externalCertificateAuthorityDAL,
|
||||
certificateDAL,
|
||||
certificateBodyDAL,
|
||||
certificateSecretDAL,
|
||||
kmsService,
|
||||
pkiSubscriberDAL,
|
||||
projectDAL
|
||||
});
|
||||
|
||||
// TODO 1: auto-periodic rotation
|
||||
// TODO 2: manual rotation
|
||||
|
||||
@@ -158,6 +172,13 @@ export const certificateAuthorityQueueFactory = ({
|
||||
lastOperationMessage: "Certificate ordered successfully",
|
||||
lastOperationAt: new Date()
|
||||
});
|
||||
} else if (caType === CaType.AZURE_AD_CS) {
|
||||
await azureAdCsFns.orderSubscriberCertificate(subscriberId);
|
||||
await pkiSubscriberDAL.updateById(subscriberId, {
|
||||
lastOperationStatus: SubscriberOperationStatus.SUCCESS,
|
||||
lastOperationMessage: "Certificate ordered successfully",
|
||||
lastOperationAt: new Date()
|
||||
});
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof Error) {
|
||||
|
@@ -22,6 +22,14 @@ import {
|
||||
TCreateAcmeCertificateAuthorityDTO,
|
||||
TUpdateAcmeCertificateAuthorityDTO
|
||||
} from "./acme/acme-certificate-authority-types";
|
||||
import {
|
||||
AzureAdCsCertificateAuthorityFns,
|
||||
castDbEntryToAzureAdCsCertificateAuthority
|
||||
} from "./azure-ad-cs/azure-ad-cs-certificate-authority-fns";
|
||||
import {
|
||||
TCreateAzureAdCsCertificateAuthorityDTO,
|
||||
TUpdateAzureAdCsCertificateAuthorityDTO
|
||||
} from "./azure-ad-cs/azure-ad-cs-certificate-authority-types";
|
||||
import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal";
|
||||
import { CaType } from "./certificate-authority-enums";
|
||||
import {
|
||||
@@ -34,7 +42,7 @@ import { TInternalCertificateAuthorityServiceFactory } from "./internal/internal
|
||||
import { TCreateInternalCertificateAuthorityDTO } from "./internal/internal-certificate-authority-types";
|
||||
|
||||
type TCertificateAuthorityServiceFactoryDep = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
|
||||
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
||||
certificateAuthorityDAL: Pick<
|
||||
TCertificateAuthorityDALFactory,
|
||||
@@ -91,6 +99,19 @@ export const certificateAuthorityServiceFactory = ({
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const azureAdCsFns = AzureAdCsCertificateAuthorityFns({
|
||||
appConnectionDAL,
|
||||
appConnectionService,
|
||||
certificateAuthorityDAL,
|
||||
externalCertificateAuthorityDAL,
|
||||
certificateDAL,
|
||||
certificateBodyDAL,
|
||||
certificateSecretDAL,
|
||||
kmsService,
|
||||
pkiSubscriberDAL,
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const createCertificateAuthority = async (
|
||||
{ type, projectId, name, enableDirectIssuance, configuration, status }: TCreateCertificateAuthorityDTO,
|
||||
actor: OrgServiceActor
|
||||
@@ -146,6 +167,17 @@ export const certificateAuthorityServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return azureAdCsFns.createCertificateAuthority({
|
||||
name,
|
||||
projectId,
|
||||
configuration: configuration as TCreateAzureAdCsCertificateAuthorityDTO["configuration"],
|
||||
enableDirectIssuance,
|
||||
status,
|
||||
actor
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
@@ -205,6 +237,10 @@ export const certificateAuthorityServiceFactory = ({
|
||||
return castDbEntryToAcmeCertificateAuthority(certificateAuthority);
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return castDbEntryToAzureAdCsCertificateAuthority(certificateAuthority);
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
@@ -249,6 +285,10 @@ export const certificateAuthorityServiceFactory = ({
|
||||
return acmeFns.listCertificateAuthorities({ projectId });
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return azureAdCsFns.listCertificateAuthorities({ projectId });
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
@@ -323,6 +363,17 @@ export const certificateAuthorityServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return azureAdCsFns.updateCertificateAuthority({
|
||||
id: certificateAuthority.id,
|
||||
configuration: configuration as TUpdateAzureAdCsCertificateAuthorityDTO["configuration"],
|
||||
enableDirectIssuance,
|
||||
actor,
|
||||
status,
|
||||
name
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
@@ -384,14 +435,54 @@ export const certificateAuthorityServiceFactory = ({
|
||||
return castDbEntryToAcmeCertificateAuthority(certificateAuthority);
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return castDbEntryToAzureAdCsCertificateAuthority(certificateAuthority);
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
const getAzureAdcsTemplates = async ({
|
||||
caId,
|
||||
projectId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: {
|
||||
caId: string;
|
||||
projectId: string;
|
||||
actor: OrgServiceActor["type"];
|
||||
actorId: string;
|
||||
actorAuthMethod: OrgServiceActor["authMethod"];
|
||||
actorOrgId?: string;
|
||||
}) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.CertificateManager
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
return azureAdCsFns.getTemplates({
|
||||
caId,
|
||||
projectId
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
createCertificateAuthority,
|
||||
findCertificateAuthorityByNameAndProjectId,
|
||||
listCertificateAuthoritiesByProjectId,
|
||||
updateCertificateAuthority,
|
||||
deleteCertificateAuthority
|
||||
deleteCertificateAuthority,
|
||||
getAzureAdcsTemplates
|
||||
};
|
||||
};
|
||||
|
@@ -1,13 +1,23 @@
|
||||
import { TAcmeCertificateAuthority, TAcmeCertificateAuthorityInput } from "./acme/acme-certificate-authority-types";
|
||||
import {
|
||||
TAzureAdCsCertificateAuthority,
|
||||
TCreateAzureAdCsCertificateAuthorityDTO
|
||||
} from "./azure-ad-cs/azure-ad-cs-certificate-authority-types";
|
||||
import { CaType } from "./certificate-authority-enums";
|
||||
import {
|
||||
TInternalCertificateAuthority,
|
||||
TInternalCertificateAuthorityInput
|
||||
} from "./internal/internal-certificate-authority-types";
|
||||
|
||||
export type TCertificateAuthority = TInternalCertificateAuthority | TAcmeCertificateAuthority;
|
||||
export type TCertificateAuthority =
|
||||
| TInternalCertificateAuthority
|
||||
| TAcmeCertificateAuthority
|
||||
| TAzureAdCsCertificateAuthority;
|
||||
|
||||
export type TCertificateAuthorityInput = TInternalCertificateAuthorityInput | TAcmeCertificateAuthorityInput;
|
||||
export type TCertificateAuthorityInput =
|
||||
| TInternalCertificateAuthorityInput
|
||||
| TAcmeCertificateAuthorityInput
|
||||
| TCreateAzureAdCsCertificateAuthorityDTO;
|
||||
|
||||
export type TCreateCertificateAuthorityDTO = Omit<TCertificateAuthority, "id">;
|
||||
|
||||
|
@@ -36,12 +36,18 @@ import { validateAndMapAltNameType } from "../certificate-authority-validators";
|
||||
import { TIssueCertWithTemplateDTO } from "./internal-certificate-authority-types";
|
||||
|
||||
type TInternalCertificateAuthorityFnsDeps = {
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findByIdWithAssociatedCa" | "findById">;
|
||||
certificateAuthorityDAL: Pick<
|
||||
TCertificateAuthorityDALFactory,
|
||||
"findByIdWithAssociatedCa" | "findById" | "create" | "transaction" | "updateById" | "findWithAssociatedCa"
|
||||
>;
|
||||
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "findById">;
|
||||
certificateAuthoritySecretDAL: Pick<TCertificateAuthoritySecretDALFactory, "findOne">;
|
||||
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "findOne">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById" | "transaction" | "findOne" | "updateById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "encryptWithKmsKey" | "generateKmsKey">;
|
||||
kmsService: Pick<
|
||||
TKmsServiceFactory,
|
||||
"decryptWithKmsKey" | "encryptWithKmsKey" | "generateKmsKey" | "createCipherPairWithDataKey"
|
||||
>;
|
||||
certificateDAL: Pick<TCertificateDALFactory, "create" | "transaction">;
|
||||
certificateBodyDAL: Pick<TCertificateBodyDALFactory, "create">;
|
||||
certificateSecretDAL: Pick<TCertificateSecretDALFactory, "create">;
|
||||
|
@@ -14,6 +14,8 @@ import { TCertificateBodyDALFactory } from "@app/services/certificate/certificat
|
||||
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
|
||||
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { CaCapability, CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
import { caSupportsCapability } from "@app/services/certificate-authority/certificate-authority-maps";
|
||||
import { TCertificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TPkiCollectionDALFactory } from "@app/services/pki-collection/pki-collection-dal";
|
||||
@@ -184,9 +186,11 @@ export const certificateServiceFactory = ({
|
||||
|
||||
const ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(cert.caId);
|
||||
|
||||
if (ca.externalCa?.id) {
|
||||
// Check if the CA type supports revocation
|
||||
const caType = (ca.externalCa?.type as CaType) ?? CaType.INTERNAL;
|
||||
if (!caSupportsCapability(caType, CaCapability.REVOKE_CERTIFICATES)) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot revoke external certificates"
|
||||
message: "Certificate revocation is not supported by this certificate authority type"
|
||||
});
|
||||
}
|
||||
|
||||
@@ -218,18 +222,37 @@ export const certificateServiceFactory = ({
|
||||
}
|
||||
);
|
||||
|
||||
// rebuild CRL (TODO: move to interval-based cron job)
|
||||
await rebuildCaCrl({
|
||||
caId: ca.id,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCrlDAL,
|
||||
certificateAuthoritySecretDAL,
|
||||
projectDAL,
|
||||
certificateDAL,
|
||||
kmsService
|
||||
});
|
||||
// Note: External CA revocation handling would go here for supported CA types
|
||||
// Currently, only internal CAs and ACME CAs support revocation
|
||||
|
||||
return { revokedAt, cert, ca: expandInternalCa(ca) };
|
||||
// rebuild CRL (TODO: move to interval-based cron job)
|
||||
// Only rebuild CRL for internal CAs - external CAs manage their own CRLs
|
||||
if (!ca.externalCa?.id) {
|
||||
await rebuildCaCrl({
|
||||
caId: ca.id,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCrlDAL,
|
||||
certificateAuthoritySecretDAL,
|
||||
projectDAL,
|
||||
certificateDAL,
|
||||
kmsService
|
||||
});
|
||||
}
|
||||
|
||||
// Return appropriate CA format based on CA type
|
||||
const caResult = ca.externalCa?.id
|
||||
? {
|
||||
id: ca.id,
|
||||
name: ca.name,
|
||||
projectId: ca.projectId,
|
||||
status: ca.status,
|
||||
enableDirectIssuance: ca.enableDirectIssuance,
|
||||
type: ca.externalCa.type,
|
||||
externalCa: ca.externalCa
|
||||
}
|
||||
: expandInternalCa(ca);
|
||||
|
||||
return { revokedAt, cert, ca: caResult };
|
||||
};
|
||||
|
||||
/**
|
||||
|
@@ -156,6 +156,7 @@ export const groupProjectDALFactory = (db: TDbClient) => {
|
||||
`${TableName.GroupProjectMembershipRole}.customRoleId`,
|
||||
`${TableName.ProjectRoles}.id`
|
||||
)
|
||||
.join(TableName.OrgMembership, `${TableName.Users}.id`, `${TableName.OrgMembership}.userId`)
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.UserGroupMembership),
|
||||
db.ref("createdAt").withSchema(TableName.UserGroupMembership),
|
||||
@@ -176,7 +177,8 @@ export const groupProjectDALFactory = (db: TDbClient) => {
|
||||
db.ref("temporaryRange").withSchema(TableName.GroupProjectMembershipRole),
|
||||
db.ref("temporaryAccessStartTime").withSchema(TableName.GroupProjectMembershipRole),
|
||||
db.ref("temporaryAccessEndTime").withSchema(TableName.GroupProjectMembershipRole),
|
||||
db.ref("name").as("projectName").withSchema(TableName.Project)
|
||||
db.ref("name").as("projectName").withSchema(TableName.Project),
|
||||
db.ref("isActive").withSchema(TableName.OrgMembership)
|
||||
)
|
||||
.where({ isGhost: false });
|
||||
|
||||
@@ -192,7 +194,8 @@ export const groupProjectDALFactory = (db: TDbClient) => {
|
||||
id,
|
||||
userId,
|
||||
projectName,
|
||||
createdAt
|
||||
createdAt,
|
||||
isActive
|
||||
}) => ({
|
||||
isGroupMember: true,
|
||||
id,
|
||||
@@ -202,7 +205,7 @@ export const groupProjectDALFactory = (db: TDbClient) => {
|
||||
id: projectId,
|
||||
name: projectName
|
||||
},
|
||||
user: { email, username, firstName, lastName, id: userId, publicKey, isGhost },
|
||||
user: { email, username, firstName, lastName, id: userId, publicKey, isGhost, isOrgMembershipActive: isActive },
|
||||
createdAt
|
||||
}),
|
||||
key: "id",
|
||||
|
@@ -8,6 +8,7 @@ import {
|
||||
validatePrivilegeChangeOperation
|
||||
} from "@app/ee/services/permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
|
||||
@@ -22,6 +23,7 @@ import { TIdentityUaClientSecretDALFactory } from "./identity-ua-client-secret-d
|
||||
import { TIdentityUaDALFactory } from "./identity-ua-dal";
|
||||
import {
|
||||
TAttachUaDTO,
|
||||
TClearUaLockoutsDTO,
|
||||
TCreateUaClientSecretDTO,
|
||||
TGetUaClientSecretsDTO,
|
||||
TGetUaDTO,
|
||||
@@ -38,30 +40,30 @@ type TIdentityUaServiceFactoryDep = {
|
||||
identityOrgMembershipDAL: TIdentityOrgDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
keyStore: Pick<TKeyStoreFactory, "setItemWithExpiry" | "getItem" | "deleteItem" | "getKeysByPattern" | "deleteItems">;
|
||||
};
|
||||
|
||||
export type TIdentityUaServiceFactory = ReturnType<typeof identityUaServiceFactory>;
|
||||
|
||||
type LockoutObject = {
|
||||
lockedOut: boolean;
|
||||
failedAttempts: number;
|
||||
};
|
||||
|
||||
export const identityUaServiceFactory = ({
|
||||
identityUaDAL,
|
||||
identityUaClientSecretDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityOrgMembershipDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
keyStore
|
||||
}: TIdentityUaServiceFactoryDep) => {
|
||||
const login = async (clientId: string, clientSecret: string, ip: string) => {
|
||||
const identityUa = await identityUaDAL.findOne({ clientId });
|
||||
if (!identityUa) {
|
||||
throw new NotFoundError({
|
||||
message: "No identity with specified client ID was found"
|
||||
});
|
||||
}
|
||||
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityUa.identityId });
|
||||
if (!identityMembershipOrg) {
|
||||
throw new NotFoundError({
|
||||
message: "No identity with the org membership was found"
|
||||
throw new UnauthorizedError({
|
||||
message: "Invalid credentials"
|
||||
});
|
||||
}
|
||||
|
||||
@@ -69,69 +71,119 @@ export const identityUaServiceFactory = ({
|
||||
ipAddress: ip,
|
||||
trustedIps: identityUa.clientSecretTrustedIps as TIp[]
|
||||
});
|
||||
const clientSecretPrefix = clientSecret.slice(0, 4);
|
||||
const clientSecrtInfo = await identityUaClientSecretDAL.find({
|
||||
identityUAId: identityUa.id,
|
||||
isClientSecretRevoked: false,
|
||||
clientSecretPrefix
|
||||
});
|
||||
|
||||
let validClientSecretInfo: (typeof clientSecrtInfo)[0] | null = null;
|
||||
for await (const info of clientSecrtInfo) {
|
||||
const isMatch = await crypto.hashing().compareHash(clientSecret, info.clientSecretHash);
|
||||
const LOCKOUT_KEY = `lockout:identity:${identityUa.identityId}:${IdentityAuthMethod.UNIVERSAL_AUTH}:${clientId}`;
|
||||
|
||||
if (isMatch) {
|
||||
validClientSecretInfo = info;
|
||||
break;
|
||||
}
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityUa.identityId });
|
||||
if (!identityMembershipOrg) {
|
||||
throw new UnauthorizedError({
|
||||
message: "Invalid credentials"
|
||||
});
|
||||
}
|
||||
|
||||
if (!validClientSecretInfo) throw new UnauthorizedError({ message: "Invalid credentials" });
|
||||
const identityTx = await identityUaDAL.transaction(async (tx) => {
|
||||
// await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.IdentityLogin(identityUa.identityId, clientId)]);
|
||||
|
||||
const { clientSecretTTL, clientSecretNumUses, clientSecretNumUsesLimit } = validClientSecretInfo;
|
||||
if (Number(clientSecretTTL) > 0) {
|
||||
const clientSecretCreated = new Date(validClientSecretInfo.createdAt);
|
||||
const ttlInMilliseconds = Number(clientSecretTTL) * 1000;
|
||||
const currentDate = new Date();
|
||||
const expirationTime = new Date(clientSecretCreated.getTime() + ttlInMilliseconds);
|
||||
// Lockout Check
|
||||
const lockoutRaw = await keyStore.getItem(LOCKOUT_KEY);
|
||||
|
||||
if (currentDate > expirationTime) {
|
||||
let lockout: LockoutObject | undefined;
|
||||
if (lockoutRaw) {
|
||||
lockout = JSON.parse(lockoutRaw) as LockoutObject;
|
||||
}
|
||||
|
||||
if (lockout && lockout.lockedOut) {
|
||||
throw new UnauthorizedError({
|
||||
message: "This identity auth method is temporarily locked, please try again later"
|
||||
});
|
||||
}
|
||||
|
||||
const clientSecretPrefix = clientSecret.slice(0, 4);
|
||||
const clientSecretInfo = await identityUaClientSecretDAL.find({
|
||||
identityUAId: identityUa.id,
|
||||
isClientSecretRevoked: false,
|
||||
clientSecretPrefix
|
||||
});
|
||||
|
||||
let validClientSecretInfo: (typeof clientSecretInfo)[0] | null = null;
|
||||
for await (const info of clientSecretInfo) {
|
||||
const isMatch = await crypto.hashing().compareHash(clientSecret, info.clientSecretHash);
|
||||
|
||||
if (isMatch) {
|
||||
validClientSecretInfo = info;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!validClientSecretInfo) {
|
||||
if (identityUa.lockoutEnabled) {
|
||||
if (!lockout) {
|
||||
lockout = {
|
||||
lockedOut: false,
|
||||
failedAttempts: 0
|
||||
};
|
||||
}
|
||||
|
||||
lockout.failedAttempts += 1;
|
||||
if (lockout.failedAttempts >= identityUa.lockoutThreshold) {
|
||||
lockout.lockedOut = true;
|
||||
}
|
||||
|
||||
await keyStore.setItemWithExpiry(
|
||||
LOCKOUT_KEY,
|
||||
lockout.lockedOut ? identityUa.lockoutDurationSeconds : identityUa.lockoutCounterResetSeconds,
|
||||
JSON.stringify(lockout)
|
||||
);
|
||||
}
|
||||
|
||||
throw new UnauthorizedError({ message: "Invalid credentials" });
|
||||
} else if (lockout) {
|
||||
await keyStore.deleteItem(LOCKOUT_KEY);
|
||||
}
|
||||
|
||||
const { clientSecretTTL, clientSecretNumUses, clientSecretNumUsesLimit } = validClientSecretInfo;
|
||||
if (Number(clientSecretTTL) > 0) {
|
||||
const clientSecretCreated = new Date(validClientSecretInfo.createdAt);
|
||||
const ttlInMilliseconds = Number(clientSecretTTL) * 1000;
|
||||
const currentDate = new Date();
|
||||
const expirationTime = new Date(clientSecretCreated.getTime() + ttlInMilliseconds);
|
||||
|
||||
if (currentDate > expirationTime) {
|
||||
await identityUaClientSecretDAL.updateById(validClientSecretInfo.id, {
|
||||
isClientSecretRevoked: true
|
||||
});
|
||||
|
||||
throw new UnauthorizedError({
|
||||
message: "Access denied due to expired client secret"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (clientSecretNumUsesLimit > 0 && clientSecretNumUses === clientSecretNumUsesLimit) {
|
||||
// number of times client secret can be used for
|
||||
// a login operation reached
|
||||
await identityUaClientSecretDAL.updateById(validClientSecretInfo.id, {
|
||||
isClientSecretRevoked: true
|
||||
});
|
||||
|
||||
throw new UnauthorizedError({
|
||||
message: "Access denied due to expired client secret"
|
||||
message: "Access denied due to client secret usage limit reached"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (clientSecretNumUsesLimit > 0 && clientSecretNumUses === clientSecretNumUsesLimit) {
|
||||
// number of times client secret can be used for
|
||||
// a login operation reached
|
||||
await identityUaClientSecretDAL.updateById(validClientSecretInfo.id, {
|
||||
isClientSecretRevoked: true
|
||||
});
|
||||
throw new UnauthorizedError({
|
||||
message: "Access denied due to client secret usage limit reached"
|
||||
});
|
||||
}
|
||||
const accessTokenTTLParams =
|
||||
Number(identityUa.accessTokenPeriod) === 0
|
||||
? {
|
||||
accessTokenTTL: identityUa.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityUa.accessTokenMaxTTL
|
||||
}
|
||||
: {
|
||||
accessTokenTTL: identityUa.accessTokenPeriod,
|
||||
// We set a very large Max TTL for periodic tokens to ensure that clients (even outdated ones) can always renew their token
|
||||
// without them having to update their SDKs, CLIs, etc. This workaround sets it to 30 years to emulate "forever"
|
||||
accessTokenMaxTTL: 1000000000
|
||||
};
|
||||
|
||||
const accessTokenTTLParams =
|
||||
Number(identityUa.accessTokenPeriod) === 0
|
||||
? {
|
||||
accessTokenTTL: identityUa.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityUa.accessTokenMaxTTL
|
||||
}
|
||||
: {
|
||||
accessTokenTTL: identityUa.accessTokenPeriod,
|
||||
// We set a very large Max TTL for periodic tokens to ensure that clients (even outdated ones) can always renew their token
|
||||
// without them having to update their SDKs, CLIs, etc. This workaround sets it to 30 years to emulate "forever"
|
||||
accessTokenMaxTTL: 1000000000
|
||||
};
|
||||
|
||||
const identityAccessToken = await identityUaDAL.transaction(async (tx) => {
|
||||
const uaClientSecretDoc = await identityUaClientSecretDAL.incrementUsage(validClientSecretInfo!.id, tx);
|
||||
const uaClientSecretDoc = await identityUaClientSecretDAL.incrementUsage(validClientSecretInfo.id, tx);
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
@@ -154,33 +206,33 @@ export const identityUaServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
|
||||
return newToken;
|
||||
return { newToken, validClientSecretInfo, accessTokenTTLParams };
|
||||
});
|
||||
|
||||
const appCfg = getConfig();
|
||||
const accessToken = crypto.jwt().sign(
|
||||
{
|
||||
identityId: identityUa.identityId,
|
||||
clientSecretId: validClientSecretInfo.id,
|
||||
identityAccessTokenId: identityAccessToken.id,
|
||||
clientSecretId: identityTx.validClientSecretInfo.id,
|
||||
identityAccessTokenId: identityTx.newToken.id,
|
||||
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
|
||||
} as TIdentityAccessTokenJwtPayload,
|
||||
appCfg.AUTH_SECRET,
|
||||
// akhilmhdh: for non-expiry tokens you should not even set the value, including undefined. Even for undefined jsonwebtoken throws error
|
||||
Number(identityAccessToken.accessTokenTTL) === 0
|
||||
Number(identityTx.newToken.accessTokenTTL) === 0
|
||||
? undefined
|
||||
: {
|
||||
expiresIn: Number(identityAccessToken.accessTokenTTL)
|
||||
expiresIn: Number(identityTx.newToken.accessTokenTTL)
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
accessToken,
|
||||
identityUa,
|
||||
validClientSecretInfo,
|
||||
identityAccessToken,
|
||||
validClientSecretInfo: identityTx.validClientSecretInfo,
|
||||
identityAccessToken: identityTx.newToken,
|
||||
identityMembershipOrg,
|
||||
...accessTokenTTLParams
|
||||
...identityTx.accessTokenTTLParams
|
||||
};
|
||||
};
|
||||
|
||||
@@ -196,7 +248,11 @@ export const identityUaServiceFactory = ({
|
||||
actor,
|
||||
actorOrgId,
|
||||
isActorSuperAdmin,
|
||||
accessTokenPeriod
|
||||
accessTokenPeriod,
|
||||
lockoutEnabled,
|
||||
lockoutThreshold,
|
||||
lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds
|
||||
}: TAttachUaDTO) => {
|
||||
await validateIdentityUpdateForSuperAdminPrivileges(identityId, isActorSuperAdmin);
|
||||
|
||||
@@ -266,7 +322,11 @@ export const identityUaServiceFactory = ({
|
||||
accessTokenTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps),
|
||||
accessTokenPeriod
|
||||
accessTokenPeriod,
|
||||
lockoutEnabled,
|
||||
lockoutThreshold,
|
||||
lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -286,7 +346,11 @@ export const identityUaServiceFactory = ({
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
actorOrgId,
|
||||
lockoutEnabled,
|
||||
lockoutThreshold,
|
||||
lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds
|
||||
}: TUpdateUaDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
@@ -362,7 +426,11 @@ export const identityUaServiceFactory = ({
|
||||
accessTokenPeriod,
|
||||
accessTokenTrustedIps: reformattedAccessTokenTrustedIps
|
||||
? JSON.stringify(reformattedAccessTokenTrustedIps)
|
||||
: undefined
|
||||
: undefined,
|
||||
lockoutEnabled,
|
||||
lockoutThreshold,
|
||||
lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds
|
||||
});
|
||||
return { ...updatedUaAuth, orgId: identityMembershipOrg.orgId };
|
||||
};
|
||||
@@ -713,6 +781,38 @@ export const identityUaServiceFactory = ({
|
||||
return { ...updatedClientSecret, identityId, orgId: identityMembershipOrg.orgId };
|
||||
};
|
||||
|
||||
const clearUniversalAuthLockouts = async ({
|
||||
identityId,
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}: TClearUaLockoutsDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have universal auth"
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
|
||||
|
||||
const deleted = await keyStore.deleteItems({
|
||||
pattern: `lockout:identity:${identityId}:${IdentityAuthMethod.UNIVERSAL_AUTH}:*`
|
||||
});
|
||||
|
||||
return { deleted, identityId, orgId: identityMembershipOrg.orgId };
|
||||
};
|
||||
|
||||
return {
|
||||
login,
|
||||
attachUniversalAuth,
|
||||
@@ -722,6 +822,7 @@ export const identityUaServiceFactory = ({
|
||||
createUniversalAuthClientSecret,
|
||||
getUniversalAuthClientSecrets,
|
||||
revokeUniversalAuthClientSecret,
|
||||
getUniversalAuthClientSecretById
|
||||
getUniversalAuthClientSecretById,
|
||||
clearUniversalAuthLockouts
|
||||
};
|
||||
};
|
||||
|
@@ -9,6 +9,10 @@ export type TAttachUaDTO = {
|
||||
clientSecretTrustedIps: { ipAddress: string }[];
|
||||
accessTokenTrustedIps: { ipAddress: string }[];
|
||||
isActorSuperAdmin?: boolean;
|
||||
lockoutEnabled: boolean;
|
||||
lockoutThreshold: number;
|
||||
lockoutDurationSeconds: number;
|
||||
lockoutCounterResetSeconds: number;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateUaDTO = {
|
||||
@@ -19,6 +23,10 @@ export type TUpdateUaDTO = {
|
||||
accessTokenPeriod?: number;
|
||||
clientSecretTrustedIps?: { ipAddress: string }[];
|
||||
accessTokenTrustedIps?: { ipAddress: string }[];
|
||||
lockoutEnabled?: boolean;
|
||||
lockoutThreshold?: number;
|
||||
lockoutDurationSeconds?: number;
|
||||
lockoutCounterResetSeconds?: number;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetUaDTO = {
|
||||
@@ -45,6 +53,10 @@ export type TRevokeUaClientSecretDTO = {
|
||||
clientSecretId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TClearUaLockoutsDTO = {
|
||||
identityId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetUniversalAuthClientSecretByIdDTO = {
|
||||
identityId: string;
|
||||
clientSecretId: string;
|
||||
|
@@ -8,6 +8,7 @@ import {
|
||||
validatePrivilegeChangeOperation
|
||||
} from "@app/ee/services/permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
|
||||
@@ -32,6 +33,7 @@ type TIdentityServiceFactoryDep = {
|
||||
identityProjectDAL: Pick<TIdentityProjectDALFactory, "findByIdentityId">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getOrgPermissionByRole">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
keyStore: Pick<TKeyStoreFactory, "getKeysByPattern">;
|
||||
};
|
||||
|
||||
export type TIdentityServiceFactory = ReturnType<typeof identityServiceFactory>;
|
||||
@@ -42,7 +44,8 @@ export const identityServiceFactory = ({
|
||||
identityOrgMembershipDAL,
|
||||
identityProjectDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
keyStore
|
||||
}: TIdentityServiceFactoryDep) => {
|
||||
const createIdentity = async ({
|
||||
name,
|
||||
@@ -255,7 +258,20 @@ export const identityServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
|
||||
|
||||
return identity;
|
||||
const activeLockouts = await keyStore.getKeysByPattern(`lockout:identity:${id}:*`);
|
||||
|
||||
const activeLockoutAuthMethods = new Set<string>();
|
||||
activeLockouts.forEach((key) => {
|
||||
const parts = key.split(":");
|
||||
if (parts.length > 3) {
|
||||
activeLockoutAuthMethods.add(parts[3]);
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
...identity,
|
||||
identity: { ...identity.identity, activeLockoutAuthMethods: Array.from(activeLockoutAuthMethods) }
|
||||
};
|
||||
};
|
||||
|
||||
const deleteIdentity = async ({
|
||||
|
@@ -124,12 +124,12 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
|
||||
void qb
|
||||
.whereNull(`${TableName.OrgMembership}.lastInvitedAt`)
|
||||
.whereBetween(`${TableName.OrgMembership}.createdAt`, [twelveMonthsAgo, oneWeekAgo]);
|
||||
})
|
||||
.orWhere((qb) => {
|
||||
// lastInvitedAt is older than 1 week ago AND createdAt is younger than 1 month ago
|
||||
void qb
|
||||
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneWeekAgo)
|
||||
.where(`${TableName.OrgMembership}.createdAt`, ">", oneMonthAgo);
|
||||
void qb.orWhere((qbInner) => {
|
||||
void qbInner
|
||||
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneWeekAgo)
|
||||
.where(`${TableName.OrgMembership}.createdAt`, ">", oneMonthAgo);
|
||||
});
|
||||
});
|
||||
|
||||
return memberships;
|
||||
|
@@ -18,7 +18,8 @@ export const sanitizedPkiSubscriber = PkiSubscribersSchema.pick({
|
||||
lastOperationAt: true,
|
||||
enableAutoRenewal: true,
|
||||
autoRenewalPeriodInDays: true,
|
||||
lastAutoRenewAt: true
|
||||
lastAutoRenewAt: true,
|
||||
properties: true
|
||||
}).extend({
|
||||
supportsImmediateCertIssuance: z.boolean().optional()
|
||||
});
|
||||
|
@@ -109,6 +109,7 @@ export const pkiSubscriberServiceFactory = ({
|
||||
extendedKeyUsages,
|
||||
enableAutoRenewal,
|
||||
autoRenewalPeriodInDays,
|
||||
properties,
|
||||
projectId,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
@@ -157,7 +158,8 @@ export const pkiSubscriberServiceFactory = ({
|
||||
keyUsages,
|
||||
extendedKeyUsages,
|
||||
enableAutoRenewal,
|
||||
autoRenewalPeriodInDays
|
||||
autoRenewalPeriodInDays,
|
||||
properties
|
||||
});
|
||||
|
||||
return newSubscriber;
|
||||
@@ -221,6 +223,7 @@ export const pkiSubscriberServiceFactory = ({
|
||||
extendedKeyUsages,
|
||||
enableAutoRenewal,
|
||||
autoRenewalPeriodInDays,
|
||||
properties,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
@@ -275,7 +278,8 @@ export const pkiSubscriberServiceFactory = ({
|
||||
keyUsages,
|
||||
extendedKeyUsages,
|
||||
enableAutoRenewal,
|
||||
autoRenewalPeriodInDays
|
||||
autoRenewalPeriodInDays,
|
||||
properties
|
||||
});
|
||||
|
||||
return updatedSubscriber;
|
||||
@@ -360,7 +364,7 @@ export const pkiSubscriberServiceFactory = ({
|
||||
throw new BadRequestError({ message: "CA is disabled" });
|
||||
}
|
||||
|
||||
if (ca.externalCa?.id && ca.externalCa.type === CaType.ACME) {
|
||||
if (ca.externalCa?.id && (ca.externalCa.type === CaType.ACME || ca.externalCa.type === CaType.AZURE_AD_CS)) {
|
||||
await certificateAuthorityQueue.orderCertificateForSubscriber({
|
||||
subscriberId: subscriber.id,
|
||||
caType: ca.externalCa.type
|
||||
|
@@ -18,6 +18,7 @@ export type TCreatePkiSubscriberDTO = {
|
||||
extendedKeyUsages: CertExtendedKeyUsage[];
|
||||
enableAutoRenewal?: boolean;
|
||||
autoRenewalPeriodInDays?: number;
|
||||
properties?: TPkiSubscriberProperties;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetPkiSubscriberDTO = {
|
||||
@@ -36,6 +37,7 @@ export type TUpdatePkiSubscriberDTO = {
|
||||
extendedKeyUsages?: CertExtendedKeyUsage[];
|
||||
enableAutoRenewal?: boolean;
|
||||
autoRenewalPeriodInDays?: number;
|
||||
properties?: TPkiSubscriberProperties;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TDeletePkiSubscriberDTO = {
|
||||
@@ -69,3 +71,13 @@ export enum SubscriberOperationStatus {
|
||||
SUCCESS = "success",
|
||||
FAILED = "failed"
|
||||
}
|
||||
|
||||
export type TPkiSubscriberProperties = {
|
||||
azureTemplateType?: string;
|
||||
organization?: string;
|
||||
organizationalUnit?: string;
|
||||
country?: string;
|
||||
state?: string;
|
||||
locality?: string;
|
||||
emailAddress?: string;
|
||||
};
|
||||
|
@@ -21,6 +21,14 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
.where({ [`${TableName.ProjectMembership}.projectId` as "projectId"]: projectId })
|
||||
.join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
|
||||
.join(TableName.OrgMembership, (qb) => {
|
||||
qb.on(`${TableName.Users}.id`, "=", `${TableName.OrgMembership}.userId`).andOn(
|
||||
`${TableName.OrgMembership}.orgId`,
|
||||
"=",
|
||||
`${TableName.Project}.orgId`
|
||||
);
|
||||
})
|
||||
|
||||
.where((qb) => {
|
||||
if (filter.usernames) {
|
||||
void qb.whereIn("username", filter.usernames);
|
||||
@@ -90,7 +98,8 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
db.ref("temporaryRange").withSchema(TableName.ProjectUserMembershipRole),
|
||||
db.ref("temporaryAccessStartTime").withSchema(TableName.ProjectUserMembershipRole),
|
||||
db.ref("temporaryAccessEndTime").withSchema(TableName.ProjectUserMembershipRole),
|
||||
db.ref("name").as("projectName").withSchema(TableName.Project)
|
||||
db.ref("name").as("projectName").withSchema(TableName.Project),
|
||||
db.ref("isActive").withSchema(TableName.OrgMembership)
|
||||
)
|
||||
.where({ isGhost: false })
|
||||
.orderBy(`${TableName.Users}.username` as "username");
|
||||
@@ -107,12 +116,22 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
id,
|
||||
userId,
|
||||
projectName,
|
||||
createdAt
|
||||
createdAt,
|
||||
isActive
|
||||
}) => ({
|
||||
id,
|
||||
userId,
|
||||
projectId,
|
||||
user: { email, username, firstName, lastName, id: userId, publicKey, isGhost },
|
||||
user: {
|
||||
email,
|
||||
username,
|
||||
firstName,
|
||||
lastName,
|
||||
id: userId,
|
||||
publicKey,
|
||||
isGhost,
|
||||
isOrgMembershipActive: isActive
|
||||
},
|
||||
project: {
|
||||
id: projectId,
|
||||
name: projectName
|
||||
|
@@ -97,7 +97,6 @@ export const projectMembershipServiceFactory = ({
|
||||
|
||||
const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId, { roles });
|
||||
|
||||
// projectMembers[0].project
|
||||
if (includeGroupMembers) {
|
||||
const groupMembers = await groupProjectDAL.findAllProjectGroupMembers(projectId);
|
||||
const allMembers = [
|
||||
|
@@ -30,6 +30,7 @@ import {
|
||||
TDeleteFolderDTO,
|
||||
TDeleteManyFoldersDTO,
|
||||
TGetFolderByIdDTO,
|
||||
TGetFolderByPathDTO,
|
||||
TGetFolderDTO,
|
||||
TGetFoldersDeepByEnvsDTO,
|
||||
TUpdateFolderDTO,
|
||||
@@ -1398,6 +1399,31 @@ export const secretFolderServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
const getFolderByPath = async (
|
||||
{ projectId, environment, secretPath }: TGetFolderByPathDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
// folder check is allowed to be read by anyone
|
||||
// permission is to check if user has access
|
||||
await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
projectId,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find folder with path "${secretPath}" in environment "${environment}" for project with ID "${projectId}"`
|
||||
});
|
||||
|
||||
return folder;
|
||||
};
|
||||
|
||||
return {
|
||||
createFolder,
|
||||
updateFolder,
|
||||
@@ -1405,6 +1431,7 @@ export const secretFolderServiceFactory = ({
|
||||
deleteFolder,
|
||||
getFolders,
|
||||
getFolderById,
|
||||
getFolderByPath,
|
||||
getProjectFolderCount,
|
||||
getFoldersMultiEnv,
|
||||
getFoldersDeepByEnvs,
|
||||
|
@@ -91,3 +91,9 @@ export type TDeleteManyFoldersDTO = {
|
||||
idOrName: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
export type TGetFolderByPathDTO = {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
};
|
||||
|
@@ -23,56 +23,120 @@ export const ChecklySyncFns = {
|
||||
|
||||
const config = secretSync.destinationConfig;
|
||||
|
||||
const variables = await ChecklyPublicAPI.getVariables(secretSync.connection, config.accountId);
|
||||
if (config.groupId) {
|
||||
// Handle group environment variables
|
||||
const groupVars = await ChecklyPublicAPI.getCheckGroupEnvironmentVariables(
|
||||
secretSync.connection,
|
||||
config.accountId,
|
||||
config.groupId
|
||||
);
|
||||
|
||||
const checklySecrets = Object.fromEntries(variables!.map((variable) => [variable.key, variable]));
|
||||
const checklyGroupSecrets = Object.fromEntries(groupVars.map((variable) => [variable.key, variable]));
|
||||
|
||||
for await (const key of Object.keys(secretMap)) {
|
||||
try {
|
||||
// Prepare all variables to update at once
|
||||
const updatedVariables = { ...checklyGroupSecrets };
|
||||
|
||||
for (const key of Object.keys(secretMap)) {
|
||||
const entry = secretMap[key];
|
||||
|
||||
// If value is empty, we skip the upsert - checkly does not allow empty values
|
||||
// If value is empty, we skip adding it - checkly does not allow empty values
|
||||
if (entry.value.trim() === "") {
|
||||
// Delete the secret from Checkly if its empty
|
||||
// Delete the secret from the group if it's empty
|
||||
if (!disableSecretDeletion) {
|
||||
await ChecklyPublicAPI.deleteVariable(secretSync.connection, config.accountId, {
|
||||
key
|
||||
});
|
||||
delete updatedVariables[key];
|
||||
}
|
||||
continue; // Skip empty values
|
||||
}
|
||||
|
||||
await ChecklyPublicAPI.upsertVariable(secretSync.connection, config.accountId, {
|
||||
// Add or update the variable
|
||||
updatedVariables[key] = {
|
||||
key,
|
||||
value: entry.value,
|
||||
secret: true,
|
||||
locked: true
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
// Remove secrets that are not in the secretMap if deletion is enabled
|
||||
if (!disableSecretDeletion) {
|
||||
for (const key of Object.keys(checklyGroupSecrets)) {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!matchesSchema(key, environment?.slug || "", keySchema)) continue;
|
||||
|
||||
if (!secretMap[key]) {
|
||||
delete updatedVariables[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update all group environment variables at once
|
||||
try {
|
||||
await ChecklyPublicAPI.updateCheckGroupEnvironmentVariables(
|
||||
secretSync.connection,
|
||||
config.accountId,
|
||||
config.groupId,
|
||||
Object.values(updatedVariables)
|
||||
);
|
||||
} catch (error) {
|
||||
if (error instanceof SecretSyncError) throw error;
|
||||
|
||||
throw new SecretSyncError({
|
||||
error,
|
||||
secretKey: key
|
||||
secretKey: "group_update"
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Handle global variables (existing logic)
|
||||
const variables = await ChecklyPublicAPI.getVariables(secretSync.connection, config.accountId);
|
||||
|
||||
if (disableSecretDeletion) return;
|
||||
const checklySecrets = Object.fromEntries(variables!.map((variable) => [variable.key, variable]));
|
||||
|
||||
for await (const key of Object.keys(checklySecrets)) {
|
||||
try {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!matchesSchema(key, environment?.slug || "", keySchema)) continue;
|
||||
for await (const key of Object.keys(secretMap)) {
|
||||
try {
|
||||
const entry = secretMap[key];
|
||||
|
||||
if (!secretMap[key]) {
|
||||
await ChecklyPublicAPI.deleteVariable(secretSync.connection, config.accountId, {
|
||||
key
|
||||
// If value is empty, we skip the upsert - checkly does not allow empty values
|
||||
if (entry.value.trim() === "") {
|
||||
// Delete the secret from Checkly if its empty
|
||||
if (!disableSecretDeletion) {
|
||||
await ChecklyPublicAPI.deleteVariable(secretSync.connection, config.accountId, {
|
||||
key
|
||||
});
|
||||
}
|
||||
continue; // Skip empty values
|
||||
}
|
||||
|
||||
await ChecklyPublicAPI.upsertVariable(secretSync.connection, config.accountId, {
|
||||
key,
|
||||
value: entry.value,
|
||||
secret: true,
|
||||
locked: true
|
||||
});
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error,
|
||||
secretKey: key
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (disableSecretDeletion) return;
|
||||
|
||||
for await (const key of Object.keys(checklySecrets)) {
|
||||
try {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!matchesSchema(key, environment?.slug || "", keySchema)) continue;
|
||||
|
||||
if (!secretMap[key]) {
|
||||
await ChecklyPublicAPI.deleteVariable(secretSync.connection, config.accountId, {
|
||||
key
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error,
|
||||
secretKey: key
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error,
|
||||
secretKey: key
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -80,23 +144,54 @@ export const ChecklySyncFns = {
|
||||
async removeSecrets(secretSync: TChecklySyncWithCredentials, secretMap: TSecretMap) {
|
||||
const config = secretSync.destinationConfig;
|
||||
|
||||
const variables = await ChecklyPublicAPI.getVariables(secretSync.connection, config.accountId);
|
||||
if (config.groupId) {
|
||||
// Handle group environment variables
|
||||
const groupVars = await ChecklyPublicAPI.getCheckGroupEnvironmentVariables(
|
||||
secretSync.connection,
|
||||
config.accountId,
|
||||
config.groupId
|
||||
);
|
||||
|
||||
const checklySecrets = Object.fromEntries(variables!.map((variable) => [variable.key, variable]));
|
||||
const checklyGroupSecrets = Object.fromEntries(groupVars.map((variable) => [variable.key, variable]));
|
||||
|
||||
// Filter out the secrets to remove
|
||||
const remainingVariables = Object.keys(checklyGroupSecrets)
|
||||
.filter((key) => !(key in secretMap))
|
||||
.map((key) => checklyGroupSecrets[key]);
|
||||
|
||||
for await (const secret of Object.keys(checklySecrets)) {
|
||||
try {
|
||||
if (secret in secretMap) {
|
||||
await ChecklyPublicAPI.deleteVariable(secretSync.connection, config.accountId, {
|
||||
key: secret
|
||||
});
|
||||
}
|
||||
await ChecklyPublicAPI.updateCheckGroupEnvironmentVariables(
|
||||
secretSync.connection,
|
||||
config.accountId,
|
||||
config.groupId,
|
||||
remainingVariables
|
||||
);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error,
|
||||
secretKey: secret
|
||||
secretKey: "group_remove"
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Handle global variables (existing logic)
|
||||
const variables = await ChecklyPublicAPI.getVariables(secretSync.connection, config.accountId);
|
||||
|
||||
const checklySecrets = Object.fromEntries(variables!.map((variable) => [variable.key, variable]));
|
||||
|
||||
for await (const secret of Object.keys(checklySecrets)) {
|
||||
try {
|
||||
if (secret in secretMap) {
|
||||
await ChecklyPublicAPI.deleteVariable(secretSync.connection, config.accountId, {
|
||||
key: secret
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error,
|
||||
secretKey: secret
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@@ -11,7 +11,17 @@ import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types"
|
||||
|
||||
const ChecklySyncDestinationConfigSchema = z.object({
|
||||
accountId: z.string().min(1, "Account ID is required").max(255, "Account ID must be less than 255 characters"),
|
||||
accountName: z.string().min(1, "Account Name is required").max(255, "Account ID must be less than 255 characters")
|
||||
accountName: z
|
||||
.string()
|
||||
.min(1, "Account Name is required")
|
||||
.max(255, "Account ID must be less than 255 characters")
|
||||
.optional(),
|
||||
groupId: z.string().min(1, "Group ID is required").max(255, "Group ID must be less than 255 characters").optional(),
|
||||
groupName: z
|
||||
.string()
|
||||
.min(1, "Group Name is required")
|
||||
.max(255, "Group Name must be less than 255 characters")
|
||||
.optional()
|
||||
});
|
||||
|
||||
const ChecklySyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: false };
|
||||
|
@@ -207,7 +207,7 @@ export const GithubSyncFns = {
|
||||
const token =
|
||||
connection.method === GitHubConnectionMethod.OAuth
|
||||
? connection.credentials.accessToken
|
||||
: await getGitHubAppAuthToken(connection);
|
||||
: await getGitHubAppAuthToken(connection, gatewayService);
|
||||
|
||||
const encryptedSecrets = await getEncryptedSecrets(secretSync, gatewayService);
|
||||
const publicKey = await getPublicKey(secretSync, gatewayService, token);
|
||||
@@ -264,7 +264,7 @@ export const GithubSyncFns = {
|
||||
const token =
|
||||
connection.method === GitHubConnectionMethod.OAuth
|
||||
? connection.credentials.accessToken
|
||||
: await getGitHubAppAuthToken(connection);
|
||||
: await getGitHubAppAuthToken(connection, gatewayService);
|
||||
|
||||
const encryptedSecrets = await getEncryptedSecrets(secretSync, gatewayService);
|
||||
|
||||
|
@@ -684,9 +684,9 @@ export const secretV2BridgeDALFactory = ({ db, keyStore }: TSecretV2DalArg) => {
|
||||
throw new BadRequestError({ message: "Missing personal user id" });
|
||||
}
|
||||
void bd.orWhere({
|
||||
key: el.key,
|
||||
type: el.type,
|
||||
userId: el.type === SecretType.Personal ? el.userId : null
|
||||
[`${TableName.SecretV2}.key` as "key"]: el.key,
|
||||
[`${TableName.SecretV2}.type` as "type"]: el.type,
|
||||
[`${TableName.SecretV2}.userId` as "userId"]: el.type === SecretType.Personal ? el.userId : null
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -695,12 +695,60 @@ export const secretV2BridgeDALFactory = ({ db, keyStore }: TSecretV2DalArg) => {
|
||||
`${TableName.SecretV2}.id`,
|
||||
`${TableName.SecretRotationV2SecretMapping}.secretId`
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.SecretV2JnTag,
|
||||
`${TableName.SecretV2}.id`,
|
||||
`${TableName.SecretV2JnTag}.${TableName.SecretV2}Id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.SecretTag,
|
||||
`${TableName.SecretV2JnTag}.${TableName.SecretTag}Id`,
|
||||
`${TableName.SecretTag}.id`
|
||||
)
|
||||
.leftJoin(TableName.ResourceMetadata, `${TableName.SecretV2}.id`, `${TableName.ResourceMetadata}.secretId`)
|
||||
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
|
||||
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
|
||||
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.ResourceMetadata).as("metadataId"),
|
||||
db.ref("key").withSchema(TableName.ResourceMetadata).as("metadataKey"),
|
||||
db.ref("value").withSchema(TableName.ResourceMetadata).as("metadataValue")
|
||||
)
|
||||
.select(selectAllTableCols(TableName.SecretV2))
|
||||
.select(db.ref("rotationId").withSchema(TableName.SecretRotationV2SecretMapping));
|
||||
return secrets.map((secret) => ({
|
||||
...secret,
|
||||
isRotatedSecret: Boolean(secret.rotationId)
|
||||
}));
|
||||
|
||||
const docs = sqlNestRelationships({
|
||||
data: secrets,
|
||||
key: "id",
|
||||
parentMapper: (secret) => ({
|
||||
...secret,
|
||||
isRotatedSecret: Boolean(secret.rotationId)
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "tagId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
|
||||
id,
|
||||
color,
|
||||
slug,
|
||||
name: slug
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "metadataId",
|
||||
label: "secretMetadata" as const,
|
||||
mapper: ({ metadataKey, metadataValue, metadataId }) => ({
|
||||
id: metadataId,
|
||||
key: metadataKey,
|
||||
value: metadataValue
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "find by secret keys" });
|
||||
}
|
||||
|
@@ -1074,12 +1074,22 @@ export const secretV2BridgeServiceFactory = ({
|
||||
currentPath: path
|
||||
});
|
||||
|
||||
if (!deepPaths) return { secrets: [], imports: [] };
|
||||
if (!deepPaths?.length) {
|
||||
throw new NotFoundError({
|
||||
message: `Folder with path '${path}' in environment '${environment}' was not found. Please ensure the environment slug and secret path is correct.`,
|
||||
name: "SecretPathNotFound"
|
||||
});
|
||||
}
|
||||
|
||||
paths = deepPaths.map(({ folderId, path: p }) => ({ folderId, path: p }));
|
||||
} else {
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) return { secrets: [], imports: [] };
|
||||
if (!folder) {
|
||||
throw new NotFoundError({
|
||||
message: `Folder with path '${path}' in environment '${environment}' was not found. Please ensure the environment slug and secret path is correct.`,
|
||||
name: "SecretPathNotFound"
|
||||
});
|
||||
}
|
||||
|
||||
paths = [{ folderId: folder.id, path }];
|
||||
}
|
||||
|
@@ -637,7 +637,12 @@ export const secretServiceFactory = ({
|
||||
}
|
||||
});
|
||||
|
||||
if (!deepPaths) return { secrets: [], imports: [] };
|
||||
if (!deepPaths?.length) {
|
||||
throw new NotFoundError({
|
||||
message: `Folder with path '${path}' in environment '${environment}' was not found. Please ensure the environment slug and secret path is correct.`,
|
||||
name: "SecretPathNotFound"
|
||||
});
|
||||
}
|
||||
|
||||
paths = deepPaths.map(({ folderId, path: p }) => ({ folderId, path: p }));
|
||||
} else {
|
||||
@@ -647,7 +652,12 @@ export const secretServiceFactory = ({
|
||||
});
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) return { secrets: [], imports: [] };
|
||||
if (!folder) {
|
||||
throw new NotFoundError({
|
||||
message: `Folder with path '${path}' in environment '${environment}' was not found. Please ensure the environment slug and secret path is correct.`,
|
||||
name: "SecretPathNotFound"
|
||||
});
|
||||
}
|
||||
|
||||
paths = [{ folderId: folder.id, path }];
|
||||
}
|
||||
|
@@ -2,6 +2,7 @@ import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
@@ -9,9 +10,10 @@ import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
import { AuthMethod } from "../auth/auth-type";
|
||||
import { AuthMethod, AuthTokenType } from "../auth/auth-type";
|
||||
import { TGroupProjectDALFactory } from "../group-project/group-project-dal";
|
||||
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
|
||||
import { TUserAliasDALFactory } from "../user-alias/user-alias-dal";
|
||||
import { TUserDALFactory } from "./user-dal";
|
||||
import { TListUserGroupsDTO, TUpdateUserMfaDTO } from "./user-types";
|
||||
|
||||
@@ -37,6 +39,7 @@ type TUserServiceFactoryDep = {
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
permissionService: TPermissionServiceFactory;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "findOne" | "find" | "updateById">;
|
||||
};
|
||||
|
||||
export type TUserServiceFactory = ReturnType<typeof userServiceFactory>;
|
||||
@@ -48,22 +51,38 @@ export const userServiceFactory = ({
|
||||
groupProjectDAL,
|
||||
tokenService,
|
||||
smtpService,
|
||||
permissionService
|
||||
permissionService,
|
||||
userAliasDAL
|
||||
}: TUserServiceFactoryDep) => {
|
||||
const sendEmailVerificationCode = async (username: string) => {
|
||||
const sendEmailVerificationCode = async (token: string) => {
|
||||
const { authType, aliasId, username, authTokenType } = crypto.jwt().decode(token) as {
|
||||
authType: string;
|
||||
aliasId?: string;
|
||||
username: string;
|
||||
authTokenType: AuthTokenType;
|
||||
};
|
||||
if (authTokenType !== AuthTokenType.PROVIDER_TOKEN) throw new BadRequestError({ name: "Invalid auth token type" });
|
||||
|
||||
// akhilmhdh: case sensitive email resolution
|
||||
const users = await userDAL.findUserByUsername(username);
|
||||
const user = users?.length > 1 ? users.find((el) => el.username === username) : users?.[0];
|
||||
if (!user) throw new NotFoundError({ name: `User with username '${username}' not found` });
|
||||
let { isEmailVerified } = user;
|
||||
if (aliasId) {
|
||||
const userAlias = await userAliasDAL.findOne({ userId: user.id, aliasType: authType, id: aliasId });
|
||||
if (!userAlias) throw new NotFoundError({ name: `User alias with ID '${aliasId}' not found` });
|
||||
isEmailVerified = userAlias.isEmailVerified;
|
||||
}
|
||||
|
||||
if (!user.email)
|
||||
throw new BadRequestError({ name: "Failed to send email verification code due to no email on user" });
|
||||
if (user.isEmailVerified)
|
||||
if (isEmailVerified)
|
||||
throw new BadRequestError({ name: "Failed to send email verification code due to email already verified" });
|
||||
|
||||
const token = await tokenService.createTokenForUser({
|
||||
const userToken = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
userId: user.id,
|
||||
aliasId
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
@@ -71,7 +90,7 @@ export const userServiceFactory = ({
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
code: userToken
|
||||
}
|
||||
});
|
||||
};
|
||||
@@ -95,15 +114,21 @@ export const userServiceFactory = ({
|
||||
if (!user) throw new NotFoundError({ name: `User with username '${username}' not found` });
|
||||
if (!user.email)
|
||||
throw new BadRequestError({ name: "Failed to verify email verification code due to no email on user" });
|
||||
if (user.isEmailVerified)
|
||||
throw new BadRequestError({ name: "Failed to verify email verification code due to email already verified" });
|
||||
|
||||
await tokenService.validateTokenForUser({
|
||||
const token = await tokenService.validateTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id,
|
||||
code
|
||||
});
|
||||
|
||||
if (token?.aliasId) {
|
||||
const userAlias = await userAliasDAL.findOne({ userId: user.id, id: token.aliasId });
|
||||
if (!userAlias) throw new NotFoundError({ name: `User alias with ID '${token.aliasId}' not found` });
|
||||
if (userAlias?.isEmailVerified)
|
||||
throw new BadRequestError({ name: "Failed to verify email verification code due to email already verified" });
|
||||
|
||||
await userAliasDAL.updateById(token.aliasId, { isEmailVerified: true });
|
||||
}
|
||||
const userEmails = user?.email ? await userDAL.find({ email: user.email }) : [];
|
||||
|
||||
await userDAL.updateById(user.id, {
|
||||
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Available"
|
||||
openapi: "GET /api/v1/app-connections/azure-adcs/available"
|
||||
---
|
@@ -0,0 +1,10 @@
|
||||
---
|
||||
title: "Create"
|
||||
openapi: "POST /api/v1/app-connections/azure-adcs"
|
||||
---
|
||||
|
||||
<Note>
|
||||
Azure ADCS Connections must be created through the Infisical UI.
|
||||
Check out the configuration docs for [Azure ADCS Connections](/integrations/app-connections/azure-adcs) for a step-by-step
|
||||
guide.
|
||||
</Note>
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Delete"
|
||||
openapi: "DELETE /api/v1/app-connections/azure-adcs/{connectionId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by ID"
|
||||
openapi: "GET /api/v1/app-connections/azure-adcs/{connectionId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by Name"
|
||||
openapi: "GET /api/v1/app-connections/azure-adcs/connection-name/{connectionName}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "List"
|
||||
openapi: "GET /api/v1/app-connections/azure-adcs"
|
||||
---
|
@@ -0,0 +1,10 @@
|
||||
---
|
||||
title: "Update"
|
||||
openapi: "PATCH /api/v1/app-connections/azure-adcs/{connectionId}"
|
||||
---
|
||||
|
||||
<Note>
|
||||
Azure ADCS Connections must be updated through the Infisical UI.
|
||||
Check out the configuration docs for [Azure ADCS Connections](/integrations/app-connections/azure-adcs) for a step-by-step
|
||||
guide.
|
||||
</Note>
|
@@ -26,7 +26,7 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
- Revamped UI for Access Controls, Access Tree, Policies, and Approval Workflows.
|
||||
- Released [TLS Certificate Authentication method](https://infisical.com/docs/documentation/platform/identities/tls-cert-auth).
|
||||
- Added ability to copy session tokens in the Infisical Dashboard.
|
||||
- Expanded resource support for [Infisical Terraform Provider](https://infisical.com/docs/integrations/frameworks/terraform).
|
||||
- Expanded resource support for [Infisical Terraform Provider](https://registry.terraform.io/providers/Infisical/infisical/latest/docs).
|
||||
|
||||
|
||||
## May 2025
|
||||
@@ -62,7 +62,7 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
## March 2025
|
||||
|
||||
- Released [Infisical Gateway](https://infisical.com/docs/documentation/platform/gateways/overview) for secure access to private resources without needing direct inbound connections to private networks.
|
||||
- Enhanced [Terraform](https://infisical.com/docs/integrations/frameworks/terraform#terraform) capabilities with token authentication, ability to import existing Infisical secrets as resources, and support for project templates.
|
||||
- Enhanced [Terraform](https://registry.terraform.io/providers/Infisical/infisical/latest/docs) capabilities with token authentication, ability to import existing Infisical secrets as resources, and support for project templates.
|
||||
- Self-hosted improvements: Usage and billing visibility for enabled features, ability to delete users, and support for multiple super admins.
|
||||
- UI and UX updates: Improved secret import interface on the overview page, password reset without backup PDF.
|
||||
- CLI enhancements: Various improvements including multiline secret support and ability to pass headers.
|
||||
@@ -93,7 +93,7 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
- Added support for OIDC group mapping in [Keycloak](https://infisical.com/docs/documentation/platform/sso/keycloak-oidc/overview), enabling automatic mapping of Keycloak groups to Infisical for role-based access control.
|
||||
- Enhanced [Kubernetes operator](https://infisical.com/docs/integrations/platforms/kubernetes/overview#kubernetes-operator) with namespaced group support, bi-directional secret sync (push to Infisical), [dynamic secrets](https://infisical.com/docs/documentation/platform/dynamic-secrets/overview#dynamic-secrets) capabilities, and support for multiple operator instances.
|
||||
- Restructured navigation with dedicated sections for Secrets Management, [Certificate Management (PKI)](https://infisical.com/docs/documentation/platform/pki/overview), [Key Management (KMS)](https://infisical.com/docs/documentation/platform/kms/overview#key-management-service-kms), and [SSH Key Management](https://infisical.com/docs/documentation/platform/ssh).
|
||||
- Added [ephemeral Terraform resource](https://infisical.com/docs/integrations/frameworks/terraform#terraform-provider) support and improved secret sync architecture.
|
||||
- Added [ephemeral Terraform resource](https://registry.terraform.io/providers/Infisical/infisical/latest/docs) support and improved secret sync architecture.
|
||||
- Released [.NET provider](https://github.com/Infisical/infisical-dotnet-configuration) with first-party Azure authentication support and Azure CLI integration.
|
||||
- Implemented secret Access Visibility allowing users to view all entities with access to specific secrets in the secret side panel.
|
||||
- Added secret filtering by metadata and SSH assigned certificates (Version 1).
|
||||
@@ -212,7 +212,7 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
- Completed Postgres migration initiative with restructed Fastify-based backend.
|
||||
- Reduced size of Infisical Node.js SDK by ≈90%.
|
||||
- Added secret fallback support to all SDK's.
|
||||
- Added Machine Identity support to [Terraform Provider](https://github.com/Infisical/terraform-provider-infisical).
|
||||
- Added Machine Identity support to [Terraform Provider](https://registry.terraform.io/providers/Infisical/infisical/latest/docs).
|
||||
- Released [.NET SDK](https://infisical.com/docs/sdks/languages/csharp).
|
||||
- Added symmetric encryption support to all SDK's.
|
||||
- Fixed secret reminders bug, where reminders were not being updated correctly.
|
||||
@@ -276,7 +276,7 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
|
||||
## June 2023
|
||||
|
||||
- Released the [Terraform Provider](https://infisical.com/docs/integrations/frameworks/terraform#5-run-terraform).
|
||||
- Released the [Terraform Provider](https://registry.terraform.io/providers/Infisical/infisical/latest/docs).
|
||||
- Updated the usage and billing page. Added the free trial for the professional tier.
|
||||
- Added native integrations with [Checkly](https://infisical.com/docs/integrations/cloud/checkly), [Hashicorp Vault](https://infisical.com/docs/integrations/cloud/hashicorp-vault), and [Cloudflare Pages](https://infisical.com/docs/integrations/cloud/cloudflare-pages).
|
||||
- Completed a penetration test with a `very good` result.
|
||||
|
@@ -1,408 +0,0 @@
|
||||
---
|
||||
title: "Local development"
|
||||
description: "This guide will help you contribute to the Infisical SDK."
|
||||
---
|
||||
|
||||
## Fork and clone the repo
|
||||
|
||||
[Fork](https://docs.github.com/en/get-started/quickstart/fork-a-repo) the [repository](https://github.com/Infisical/sdk) to your own GitHub account and then [clone](https://docs.github.com/en/repositories/creating-and-managing-repositories/cloning-a-repository) it to your local device.
|
||||
|
||||
Once, you've done that, create a new branch:
|
||||
|
||||
```console
|
||||
git checkout -b MY_BRANCH_NAME
|
||||
```
|
||||
|
||||
## Set up environment variables
|
||||
|
||||
Start by creating a .env file at the root of the Infisical directory then copy the contents of the file below into the .env file.
|
||||
|
||||
<Accordion title=".env file content">
|
||||
```env
|
||||
# This is required for running tests locally.
|
||||
# Rename this file to ".env" and fill in the values below.
|
||||
|
||||
# Please make sure that the machine identity has access to the project you are testing in.
|
||||
# https://infisical.com/docs/documentation/platform/identities/universal-auth
|
||||
INFISICAL_UNIVERSAL_CLIENT_ID=MACHINE_IDENTITY_CLIENT_ID
|
||||
INFISICAL_UNIVERSAL_CLIENT_SECRET=MACHINE_IDENTITY_CLIENT_SECRET
|
||||
|
||||
# The ID of the Infisical project where we will create the test secrets.
|
||||
# NOTE: The project must have a dev environment. (This is created by default when you create a project.)
|
||||
INFISICAL_PROJECT_ID=INFISICAL_TEST_PROJECT_ID
|
||||
|
||||
# The Infisical site URL. If you are testing with a local Infisical instance, then this should be set to "http://localhost:8080".
|
||||
INFISICAL_SITE_URL=https://app.infisical.com
|
||||
|
||||
````
|
||||
</Accordion>
|
||||
|
||||
<Warning>
|
||||
The above values are required for running tests locally. Before opening a pull request, make sure to run `cargo test` to ensure that all tests pass.
|
||||
</Warning>
|
||||
|
||||
|
||||
## Guidelines
|
||||
|
||||
### Predictable and consistent
|
||||
When adding new functionality (such as new functions), it's very important that the functionality is added to _all_ the SDK's. This is to ensure that the SDK's are predictable and consistent across all languages. If you are adding new functionality, please make sure to add it to all the SDK's.
|
||||
|
||||
### Handling errors
|
||||
Error handling is very important when writing SDK's. We want to make sure that the SDK's are easy to use, and that the user gets a good understanding of what went wrong when something fails. When adding new functionality, please make sure to add proper error handling. [Read more about error handling here](#error-handling).
|
||||
|
||||
### Tests
|
||||
If you add new functionality or modify existing functionality, please write tests thats properly cover the new functionality. You can run tests locally by running `cargo test` from the root directory. You must always run tests before opening a pull request.
|
||||
|
||||
### Code style
|
||||
Please follow the default rust styling guide when writing code for the base SDK. [Read more about rust code style here](https://doc.rust-lang.org/nightly/style-guide/#the-default-rust-style).
|
||||
|
||||
|
||||
## Prerequisites for contributing
|
||||
|
||||
### Understanding the terms
|
||||
|
||||
In the guide we use some terms that might be unfamiliar to you. Here's a quick explanation of the terms we use:
|
||||
- **Base SDK**: The base SDK is the SDK that all other SDK's are built on top of. The base SDK is written in Rust, and is responsible for executing commands and parsing the input and output to and from JSON.
|
||||
- **Commands**: Commands are what's being sent from the target language to the command handler. The command handler uses the command to execute the corresponding function in the base SDK. Commands are in reality just a JSON string that tells the command handler what function to execute, and what input to use.
|
||||
- **Command handler**: The command handler is the part of the base SDK that takes care of executing commands. It also takes care of parsing the input and output to and from JSON.
|
||||
- **Target language**: The target language refers to the actual SDK code. For example, the [Node.js SDK](https://www.npmjs.com/package/@infisical/sdk) is a "target language", and so is the [Python SDK](https://pypi.org/project/infisical-python/).
|
||||
|
||||
|
||||
### Understanding the execution flow
|
||||
After the target language SDK is initiated, it uses language-specific bindings to interact with the base SDK.
|
||||
These bindings are instantiated, setting up the interface for command execution. A client within the command handler is created, which issues commands to the base SDK.
|
||||
When a command is executed, it is first validated. If valid, the command handler locates the corresponding command to perform. If the command executes successfully, the command handler returns the output to the target language SDK, where it is parsed and returned to the user.
|
||||
If the command handler fails to validate the input, an error will be returned to the target language SDK.
|
||||
|
||||
|
||||
<Frame caption="Execution flow diagram for the SDK from the target language to the base SDK. The execution flow is the same for all target languages.">
|
||||
<img height="640" width="520" src="/images/sdk-flow.png" />
|
||||
</Frame>
|
||||
|
||||
|
||||
|
||||
### Rust knowledge
|
||||
|
||||
Contributing to the SDK requires intermediate to advanced knowledge of Rust concepts such as lifetimes, traits, generics, and async/await _(futures)_, and more.
|
||||
|
||||
### Rust setup
|
||||
The base SDK is written in rust. Therefore you must have rustc and cargo installed. You can install rustc and cargo by following the instructions [here](https://www.rust-lang.org/tools/install).
|
||||
|
||||
You shouldn't have to use the rust cross compilation toolchain, as all compilation is done through a collection of Github Actions. However. If you need to test cross compilation, please do so with Github Actions.
|
||||
|
||||
### Tests
|
||||
If you add new functionality or modify existing functionality, please write tests thats properly cover the new functionality. You can run tests locally by running `cargo test` from the root directory.
|
||||
|
||||
### Language-specific crates
|
||||
The language-specific crates should ideally never have to be modified, as they are simply a wrapper for the `infisical-json` crate, which executes "commands" from the base SDK. If you need to create a new target-language specific crate, please try to create native bindings for the target language. Some languages don't have direct support for native bindings (Java as an example). In those cases we can use the C bindings (`crates/infisical-c`) in the target language.
|
||||
|
||||
|
||||
|
||||
|
||||
## Generate types
|
||||
Having almost seemless type safety from the base SDK to the target language is critical, as writing types for each language has a lot of drawbacks such as duplicated code, and lots of overhead trying to keep the types up-to-date and in sync across a large collection of languages. Therefore we decided to use [QuickType](https://quicktype.io/) and [Serde](https://serde.rs/) to help us generate types for each language. In our Rust base SDK (`crates/infisical`), we define all the inputs/outputs.
|
||||
|
||||
If you are interested in reading about QuickType works under the hood, you can [read more here](http://blog.quicktype.io/under-the-hood/).
|
||||
|
||||
This is an example of a type defined in Rust (both input and output). For this to become a generated type, you'll need to add it to our schema generator. More on that further down.
|
||||
```rust
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, JsonSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
// Input:
|
||||
pub struct CreateSecretOptions {
|
||||
pub environment: String, // environment
|
||||
pub secret_comment: Option<String>, // secretComment
|
||||
pub path: Option<String>, // secretPath
|
||||
pub secret_value: String, // secretValue
|
||||
pub skip_multiline_encoding: Option<bool>, // skipMultilineEncoding
|
||||
pub r#type: Option<String>, // shared / personal
|
||||
pub project_id: String, // workspaceId
|
||||
pub secret_name: String, // secretName (PASSED AS PARAMETER IN REQUEST)
|
||||
}
|
||||
|
||||
// Output:
|
||||
#[derive(Serialize, Deserialize, Debug, JsonSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CreateSecretResponse {
|
||||
pub secret: Secret, // "Secret" is defined elsewhere.
|
||||
}
|
||||
````
|
||||
|
||||
### Adding input types to the schema generator
|
||||
|
||||
You will _only_ have to define outputs in our schema generator, then QuickType will take care of the rest behind the scenes. You can find the Rust crate that takes care of type generation here: `crates/sdk-schemas/src/main.rs`.
|
||||
|
||||
Simply add the output _(also called response)_, to the `write_schema_for_response!` macro. This will let QuickType know that it should generate types for the given structs. The main function will look something like this:
|
||||
|
||||
```rust
|
||||
fn main() -> Result<()> {
|
||||
// Input types for new Client
|
||||
write_schema_for!(infisical_json::client::ClientSettings);
|
||||
// Input types for Client::run_command
|
||||
write_schema_for!(infisical_json::command::Command);
|
||||
|
||||
// Output types for Client::run_command
|
||||
// Only add structs which are direct results of SDK commands.
|
||||
write_schema_for_response! {
|
||||
infisical::manager::secrets::GetSecretResponse,
|
||||
infisical::manager::secrets::ListSecretsResponse,
|
||||
infisical::manager::secrets::UpdateSecretResponse,
|
||||
infisical::manager::secrets::DeleteSecretResponse,
|
||||
infisical::manager::secrets::CreateSecretResponse, // <-- This is the output from the above example!
|
||||
infisical::auth::AccessTokenSuccessResponse
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
### Generating the types for the target language
|
||||
|
||||
Once you've added the output to the schema generator, you can generate the types for the target language by running the following command from the root directory:
|
||||
|
||||
```console
|
||||
$ npm install
|
||||
$ npm run schemas
|
||||
```
|
||||
|
||||
<Warning>If you change any of the structs defined in the base SDK, you will need to run this script to re-generate the types.</Warning>
|
||||
|
||||
This command will run the `schemas.ts` file found in the `support/scripts` folder. If you are adding a new language, it's important that you add the language to the code.
|
||||
|
||||
This is an example of how how we generate types for Node.js:
|
||||
|
||||
```ts
|
||||
const ts = await quicktype({
|
||||
inputData,
|
||||
lang: "typescript",
|
||||
rendererOptions: {}
|
||||
});
|
||||
await ensureDir("./languages/node/src/infisical_client");
|
||||
writeToFile("./languages/node/src/infisical_client/schemas.ts", ts.lines);
|
||||
```
|
||||
|
||||
## Building bindings
|
||||
We've tried to streamline the building process as much as possible. So you shouldn't have to worry much about building bindings, as it should just be a few commands.
|
||||
|
||||
### Node.js
|
||||
Building bindings for Node.js is very straight foward. The command below will generate NAPI bindings for Node.js, and move the bindings to the correct folder. We use [NAPI-RS](https://napi.rs/) to generate the bindings.
|
||||
|
||||
```console
|
||||
$ cd languages/node
|
||||
$ npm run build
|
||||
```
|
||||
|
||||
### Python
|
||||
To generate and use python bindings you will need to run the following commands.
|
||||
The Python SDK is located inside the crates folder. This is a limitation of the maturin tool, forcing us to structure the project in this way.
|
||||
|
||||
```console
|
||||
$ pip install -U pip maturin
|
||||
$ cd crates/infisical-py
|
||||
$ python3 -m venv .venv
|
||||
$ source .venv/bin/activate
|
||||
$ maturin develop
|
||||
```
|
||||
|
||||
<Warning>
|
||||
After running the commands above, it's very important that you rename the generated .so file to `infisical_py.so`. After renaming it you also need to move it into the root of the `crates/infisical-py` folder.
|
||||
</Warning>
|
||||
|
||||
### Java
|
||||
Java uses the C bindings to interact with the base SDK. To build and use the C bindings in Java, please follow the instructions below.
|
||||
|
||||
```console
|
||||
$ cd crates/infisical-c
|
||||
$ cargo build --release
|
||||
$ cd ../../languages/java
|
||||
```
|
||||
<Warning>
|
||||
After generating the C bindings, the generated .so or .dll has been created in the `/target` directory at the root of the project.
|
||||
You have to manually move the generated file into the `languages/java/src/main/resources` directory.
|
||||
</Warning>
|
||||
|
||||
## Error handling
|
||||
|
||||
### Error handling in the base SDK
|
||||
|
||||
The base SDK should never panic. If an error occurs, we should return a `Result` with an error message. We have a custom Result type defined in the `error.rs` file in the base SDK.
|
||||
|
||||
All our errors are defined in an enum called `Error`. The `Error` enum is defined in the `error.rs` file in the base SDK. The `Error` enum is used in the `Result` type, which is used as the return type for all functions in the base SDK.
|
||||
|
||||
```rust
|
||||
#[derive(Debug, Error)]
|
||||
pub enum Error {
|
||||
// Secret not found
|
||||
#[error("Secret with name '{}' not found.", .secret_name)]
|
||||
SecretNotFound { secret_name: String },
|
||||
|
||||
// .. other errors
|
||||
|
||||
// Errors that are not specific to the base SDK.
|
||||
#[error(transparent)]
|
||||
Reqwest(#[from] reqwest::Error),
|
||||
#[error(transparent)]
|
||||
Serde(#[from] serde_json::Error),
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
}
|
||||
```
|
||||
|
||||
### Returning an error
|
||||
|
||||
You can find many examples of how we return errors in the SDK code. A relevant example is for creating secrets, which can be found in `crates/infisical/src/api/secrets/create_secret.rs`. When the error happened due to a request error to our API, we have an API error handler. This prevents duplicate code and keeps error handling consistent across the SDK. You can find the api error handler in the `error.rs` file.
|
||||
|
||||
### Error handling in the target language SDK's.
|
||||
|
||||
All data sent to the target language SDK has the same format. The format is an object with 3 fields: `success (boolean)`, `data (could be anything or nothing)`, and `errorMessage (string or null)`.
|
||||
|
||||
The `success` field is used to determine if the request was successful or not. The `data` field is used to return data from the SDK. The `errorMessage` field is used to return an error message if the request was not successful.
|
||||
|
||||
This means that if the success if false or if the error message is not null, something went wrong and we should throw an error on the target-language level, with the error message.
|
||||
|
||||
## Command handler
|
||||
|
||||
### What is the command handler
|
||||
|
||||
The command handler (the `infisical-json` crate), takes care of executing commands sent from the target language. It also takes care of parsing the input and output to and from JSON. The command handler is the only part of the base SDK that should be aware of JSON. The rest of the base SDK should be completely unaware of JSON, and only work with the Rust structs defined in the base SDK.
|
||||
|
||||
The command handler exposes a function called `run_command`, which is what we use in the target language to execute commands. The function takes a json string as input, and returns a json string as output. We use helper functions generated by QuickType to convert the input and output to and from JSON.
|
||||
|
||||
### Creating new SDK methods
|
||||
|
||||
Creating new commands is necessary when adding new methods to the SDK's. Defining a new command is a 3-step process in most cases.
|
||||
|
||||
#### 1. Define the input and output structs
|
||||
|
||||
Earlier in this guide, we defined the input and output structs for the `CreateSecret` command. We will use that as an example here as well.
|
||||
|
||||
#### 2. Creating the method in the base SDK
|
||||
|
||||
The first step is to create the method in the base SDK. This step will be different depending on what method you are adding. In this example we're going to assume you're adding a function for creating a new secret.
|
||||
|
||||
After you created the function for creating the secret, you'll need need to add it to the ClientSecrets implementation. We do it this way to keep the code organized and easy to read. The ClientSecrets struct is located in the `crates/infisical/src/manager/secrets.rs` file.
|
||||
|
||||
```rust
|
||||
pub struct ClientSecrets<'a> {
|
||||
pub(crate) client: &'a mut crate::Client,
|
||||
}
|
||||
|
||||
impl<'a> ClientSecrets<'a> {
|
||||
pub async fn create(&mut self, input: &CreateSecretOptions) -> Result<CreateSecretResponse> {
|
||||
create_secret(self.client, input).await // <-- This is the function you created!
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Client {
|
||||
pub fn secrets(&'a mut self) -> ClientSecrets<'a> {
|
||||
ClientSecrets { client: self }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### 3. Define a new command
|
||||
|
||||
We define new commands in the `crates/infisical-json/src/command.rs` file. The `Command` enum is what we use to define new commands.
|
||||
|
||||
In the codesnippet below we define a new command called `CreateSecret`. The `CreateSecret` command takes a `CreateSecretOptions` struct as input. We don't have to define the output, because QuickType's converter helps us with figuring out the return type for each command.
|
||||
|
||||
````rust
|
||||
```rust
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub enum Command {
|
||||
GetSecret(GetSecretOptions),
|
||||
ListSecrets(ListSecretsOptions),
|
||||
CreateSecret(CreateSecretOptions), // <-- The new command!
|
||||
UpdateSecret(UpdateSecretOptions),
|
||||
DeleteSecret(DeleteSecretOptions),
|
||||
}
|
||||
````
|
||||
|
||||
#### 4. Add the command to the command handler
|
||||
|
||||
After defining the command, we need to add it to the command handler itself. This takes place in the `crates/infisical-json/src/client.rs` file. The `run_command` function is what we use to execute commands.
|
||||
|
||||
In the Client implementation we try to parse the JSON string into a `Command` enum. If the parsing is successful, we match the command and execute the corresponding function.
|
||||
|
||||
```rust
|
||||
match cmd {
|
||||
Command::GetSecret(req) => self.0.secrets().get(&req).await.into_string(),
|
||||
Command::ListSecrets(req) => self.0.secrets().list(&req).await.into_string(),
|
||||
Command::UpdateSecret(req) => self.0.secrets().update(&req).await.into_string(),
|
||||
Command::DeleteSecret(req) => self.0.secrets().delete(&req).await.into_string(),
|
||||
|
||||
// This is the new command:
|
||||
Command::CreateSecret(req) => self.0.secrets().create(&req).await.into_string(),
|
||||
}
|
||||
```
|
||||
|
||||
#### 5. Implementing the new command in the target language SDK's
|
||||
|
||||
We did it! We've now added a new command to the base SDK. The last step is to implement the new command in the target language SDK's. The process is a little different from language to language, but in this example we're going to assume that we're adding a new command to the Node.js SDK.
|
||||
|
||||
First you'll need to generate the new type schemas, we added a new command, input struct, and output struct. [Read more about generating types here](#generating-the-types-for-the-target-language).
|
||||
|
||||
Secondly you need to build the new node bindings so we can use the new functionality in the Node.js SDK. You can do this by running the following command from the `languages/node` directory:
|
||||
|
||||
```console
|
||||
$ npm install
|
||||
$ npm run build
|
||||
```
|
||||
|
||||
The build command will execute a build script in the `infisical-napi` crate, and move the generated bindings to the appropriate folder.
|
||||
|
||||
After building the new bindings, you can access the new functionality in the Node.js SDK source.
|
||||
|
||||
```ts
|
||||
// 'binding' is a js file that makes it easier to access the methods in the bindings. (it's auto generated when running npm run build)
|
||||
import * as rust from "../../binding";
|
||||
// We can import the newly generated types from the schemas.ts file. (Generated with QuickType!)
|
||||
import type { CreateSecretOptions, CreateSecretResponse } from "./schemas";
|
||||
// This is the QuickType converter that we use to create commands with! It takes care of all JSON parsing and serialization.
|
||||
import { Convert, ClientSettings } from "./schemas";
|
||||
|
||||
export class InfisicalClient {
|
||||
#client: rust.Client;
|
||||
|
||||
constructor(settings: ClientSettings) {
|
||||
const settingsJson = settings == null ? null : Convert.clientSettingsToJson(settings);
|
||||
this.#client = new rust.InfisicalClient(settingsJson);
|
||||
}
|
||||
|
||||
// ... getSecret
|
||||
// ... listSecrets
|
||||
// ... updateSecret
|
||||
// ... deleteSecret
|
||||
|
||||
async createSecret(options: CreateSecretOptions): Promise<CreateSecretResponse["secret"]> {
|
||||
// The runCommand will return a JSON string, which we can parse into a CreateSecretResponse.
|
||||
const command = await this.#client.runCommand(
|
||||
Convert.commandToJson({
|
||||
createSecret: options
|
||||
})
|
||||
);
|
||||
const response = Convert.toResponseForCreateSecretResponse(command); // <-- This is the QuickType converter in action!
|
||||
|
||||
// If the response is not successful or the data is null, we throw an error.
|
||||
if (!response.success || response.data == null) {
|
||||
throw new Error(response.errorMessage ?? "Something went wrong");
|
||||
}
|
||||
|
||||
// To make it easier to work with the response, we return the secret directly.
|
||||
return response.data.secret;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
And that's it! We've now added a new command to the base SDK, and implemented it in the Node.js SDK. The process is very similar for all other languages, but the code will look a little different.
|
||||
|
||||
## Conclusion
|
||||
|
||||
The SDK has a lot of moving parts, and it can be a little overwhelming at first. But once you get the hang of it, it's actually quite simple. If you have any questions, feel free to reach out to us on [Slack](https://infisical.com/slack), or [open an issue](https://github.com/Infisical/sdk/issues) on GitHub.
|
@@ -106,6 +106,7 @@
|
||||
"integrations/app-connections/auth0",
|
||||
"integrations/app-connections/aws",
|
||||
"integrations/app-connections/azure-app-configuration",
|
||||
"integrations/app-connections/azure-adcs",
|
||||
"integrations/app-connections/azure-client-secrets",
|
||||
"integrations/app-connections/azure-devops",
|
||||
"integrations/app-connections/azure-key-vault",
|
||||
@@ -342,10 +343,7 @@
|
||||
},
|
||||
{
|
||||
"group": "Architecture",
|
||||
"pages": [
|
||||
"internals/architecture/components",
|
||||
"internals/architecture/cloud"
|
||||
]
|
||||
"pages": ["internals/architecture/components", "internals/architecture/cloud"]
|
||||
},
|
||||
"internals/security",
|
||||
"internals/service-tokens"
|
||||
@@ -370,10 +368,6 @@
|
||||
"contributing/platform/backend/how-to-create-a-feature",
|
||||
"contributing/platform/backend/folder-structure"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Contributing to SDK",
|
||||
"pages": ["contributing/sdk/developing"]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -564,10 +558,7 @@
|
||||
"integrations/cloud/gcp-secret-manager",
|
||||
{
|
||||
"group": "Cloudflare",
|
||||
"pages": [
|
||||
"integrations/cloud/cloudflare-pages",
|
||||
"integrations/cloud/cloudflare-workers"
|
||||
]
|
||||
"pages": ["integrations/cloud/cloudflare-pages", "integrations/cloud/cloudflare-workers"]
|
||||
},
|
||||
"integrations/cloud/terraform-cloud",
|
||||
"integrations/cloud/databricks",
|
||||
@@ -661,9 +652,7 @@
|
||||
"documentation/platform/secret-scanning/overview",
|
||||
{
|
||||
"group": "Concepts",
|
||||
"pages": [
|
||||
"documentation/platform/secret-scanning/concepts/secret-scanning"
|
||||
]
|
||||
"pages": ["documentation/platform/secret-scanning/concepts/secret-scanning"]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -690,6 +679,7 @@
|
||||
"documentation/platform/pki/subscribers",
|
||||
"documentation/platform/pki/certificates",
|
||||
"documentation/platform/pki/acme-ca",
|
||||
"documentation/platform/pki/azure-adcs",
|
||||
"documentation/platform/pki/est",
|
||||
"documentation/platform/pki/alerting",
|
||||
{
|
||||
@@ -712,18 +702,13 @@
|
||||
"documentation/platform/ssh/overview",
|
||||
{
|
||||
"group": "Concepts",
|
||||
"pages": [
|
||||
"documentation/platform/ssh/concepts/ssh-certificates"
|
||||
]
|
||||
"pages": ["documentation/platform/ssh/concepts/ssh-certificates"]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Platform Reference",
|
||||
"pages": [
|
||||
"documentation/platform/ssh/usage",
|
||||
"documentation/platform/ssh/host-groups"
|
||||
]
|
||||
"pages": ["documentation/platform/ssh/usage", "documentation/platform/ssh/host-groups"]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -770,11 +755,7 @@
|
||||
"cli/commands/reset",
|
||||
{
|
||||
"group": "infisical scan",
|
||||
"pages": [
|
||||
"cli/commands/scan",
|
||||
"cli/commands/scan-git-changes",
|
||||
"cli/commands/scan-install"
|
||||
]
|
||||
"pages": ["cli/commands/scan", "cli/commands/scan-git-changes", "cli/commands/scan-install"]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -1108,9 +1089,7 @@
|
||||
"pages": [
|
||||
{
|
||||
"group": "Kubernetes",
|
||||
"pages": [
|
||||
"api-reference/endpoints/dynamic-secrets/kubernetes/create-lease"
|
||||
]
|
||||
"pages": ["api-reference/endpoints/dynamic-secrets/kubernetes/create-lease"]
|
||||
},
|
||||
"api-reference/endpoints/dynamic-secrets/create",
|
||||
"api-reference/endpoints/dynamic-secrets/update",
|
||||
@@ -1396,6 +1375,18 @@
|
||||
"api-reference/endpoints/app-connections/aws/delete"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Azure ADCS",
|
||||
"pages": [
|
||||
"api-reference/endpoints/app-connections/azure-adcs/list",
|
||||
"api-reference/endpoints/app-connections/azure-adcs/available",
|
||||
"api-reference/endpoints/app-connections/azure-adcs/get-by-id",
|
||||
"api-reference/endpoints/app-connections/azure-adcs/get-by-name",
|
||||
"api-reference/endpoints/app-connections/azure-adcs/create",
|
||||
"api-reference/endpoints/app-connections/azure-adcs/update",
|
||||
"api-reference/endpoints/app-connections/azure-adcs/delete"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Azure App Configuration",
|
||||
"pages": [
|
||||
@@ -2453,7 +2444,7 @@
|
||||
"sdks/languages/node",
|
||||
"sdks/languages/python",
|
||||
"sdks/languages/java",
|
||||
"sdks/languages/csharp",
|
||||
"sdks/languages/dotnet",
|
||||
"sdks/languages/cpp",
|
||||
"sdks/languages/rust",
|
||||
"sdks/languages/go",
|
||||
@@ -2569,7 +2560,7 @@
|
||||
},
|
||||
{
|
||||
"label": "Terraform",
|
||||
"href": "https://infisical.com/docs/integrations/frameworks/terraform"
|
||||
"href": "https://registry.terraform.io/providers/Infisical/infisical/latest/docs"
|
||||
},
|
||||
{
|
||||
"label": "Ansible",
|
||||
@@ -2681,5 +2672,11 @@
|
||||
"koala": {
|
||||
"publicApiKey": "pk_b50d7184e0e39ddd5cdb43cf6abeadd9b97d"
|
||||
}
|
||||
}
|
||||
},
|
||||
"redirects": [
|
||||
{
|
||||
"source": "/sdks/languages/csharp",
|
||||
"destination": "/sdks/languages/dotnet"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -147,6 +147,8 @@ In the following steps, we explore how to set up ACME Certificate Authority inte
|
||||
- **Directory URL**: Enter the ACME v2 directory URL for your chosen CA provider (e.g., `https://acme-v02.api.letsencrypt.org/directory` for Let's Encrypt).
|
||||
- **Account Email**: Email address to associate with your ACME account. This email will receive important notifications about your certificates.
|
||||
- **Enable Direct Issuance**: Toggle on to allow direct certificate issuance without requiring subscribers.
|
||||
- **EAB Key Identifier (KID)**: (Optional) The Key Identifier (KID) provided by your ACME CA for External Account Binding (EAB). This is required by some ACME providers (e.g., ZeroSSL, DigiCert) to link your ACME account to an external account you've pre-registered with them.
|
||||
- **EAB HMAC Key**: (Optional) The HMAC Key provided by your ACME CA for External Account Binding (EAB). This key is used in conjunction with the KID to prove ownership of the external account during ACME account registration.
|
||||
|
||||
Finally, press **Create** to register the ACME CA with Infisical.
|
||||
</Step>
|
||||
@@ -277,6 +279,19 @@ Let's Encrypt is a free, automated, and open Certificate Authority that provides
|
||||
Always test your ACME integration using Let's Encrypt's staging environment first. This allows you to verify your DNS configuration and certificate issuance process without consuming your production rate limits.
|
||||
</Note>
|
||||
|
||||
## Example: DigiCert Integration
|
||||
|
||||
DigiCert is a leading commercial Certificate Authority providing a wide range of trusted SSL/TLS certificates. Infisical can integrate with [DigiCert's ACME](https://docs.digicert.com/en/certcentral/certificate-tools/certificate-lifecycle-automation-guides/third-party-acme-integration/request-and-manage-certificates-with-acme.html) service to automate the provisioning and management of these certificates.
|
||||
|
||||
- **Directory URL**: `https://acme.digicert.com/v2/acme/directory`
|
||||
- **External Account Binding (EAB)**: Required. You will need a Key Identifier (KID) and HMAC Key from your DigiCert account to register the ACME CA in Infisical.
|
||||
- **Certificate Validity**: Typically 90 days, with automatic renewal through Infisical.
|
||||
- **Trusted By**: All major browsers and operating systems.
|
||||
|
||||
<Note>
|
||||
When integrating with DigiCert ACME, ensure you have obtained the necessary External Account Binding (EAB) Key Identifier (KID) and HMAC Key from your DigiCert account.
|
||||
</Note>
|
||||
|
||||
## FAQ
|
||||
|
||||
<AccordionGroup>
|
||||
|
206
docs/documentation/platform/pki/azure-adcs.mdx
Normal file
206
docs/documentation/platform/pki/azure-adcs.mdx
Normal file
@@ -0,0 +1,206 @@
|
||||
---
|
||||
title: "Certificates with Azure ADCS"
|
||||
description: "Learn how to issue and manage certificates using Microsoft Active Directory Certificate Services (ADCS) with Infisical."
|
||||
---
|
||||
|
||||
Issue and manage certificates using Microsoft Active Directory Certificate Services (ADCS) for enterprise-grade certificate management integrated with your existing Windows infrastructure.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before setting up ADCS integration, ensure you have:
|
||||
|
||||
- Microsoft Active Directory Certificate Services (ADCS) server running and accessible
|
||||
- Domain administrator account with certificate management permissions
|
||||
- ADCS web enrollment enabled on your server
|
||||
- Network connectivity from Infisical to the ADCS server
|
||||
- **IP whitelisting**: Your ADCS server must allow connections from Infisical's IP addresses
|
||||
- For Infisical Cloud instances, see [Networking Configuration](/documentation/setup/networking) for the list of IPs to whitelist
|
||||
- For self-hosted instances, whitelist your Infisical server's IP address
|
||||
- Azure ADCS app connection configured (see [Azure ADCS Connection](/integrations/app-connections/azure-adcs))
|
||||
|
||||
## Complete Workflow: From Setup to Certificate Issuance
|
||||
|
||||
This section walks you through the complete end-to-end process of setting up Azure ADCS integration and issuing your first certificate.
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to External Certificate Authorities">
|
||||
In your Infisical project, go to your **Certificate Project** → **Certificate Authority** to access the external CAs page.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Create New Azure ADCS Certificate Service CA">
|
||||
Click **Create CA** and configure:
|
||||
- **Type**: Choose **Azure AD Certificate Service**
|
||||
- **Name**: Friendly name for this CA (e.g., "Production ADCS CA")
|
||||
- **App Connection**: Choose your ADCS connection from the dropdown
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Certificate Authority Created">
|
||||
Once created, your Azure ADCS Certificate Authority will appear in the list and be ready for use.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Navigate to Subscribers">
|
||||
Go to **Subscribers** to access the subscribers page.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Create New Subscriber">
|
||||
Click **Add Subscriber** and configure:
|
||||
- **Name**: Unique subscriber name (e.g., "web-server-certs")
|
||||
- **Certificate Authority**: Select your ADCS CA
|
||||
- **Common Name**: Certificate CN (e.g., "api.example.com")
|
||||
- **Certificate Template**: Select from dynamically loaded ADCS templates
|
||||
- **Subject Alternative Names**: DNS names, IP addresses, or email addresses
|
||||
- **TTL**: Certificate validity period (e.g., "1y" for 1 year)
|
||||
- **Additional Subject Fields**: Organization, OU, locality, state, country, email (if required by template)
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Subscriber Created">
|
||||
Your subscriber is now created and ready to issue certificates.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Issue New Certificate">
|
||||
Click into your subscriber and click **Order Certificate** to generate a new certificate using your ADCS template.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Certificate Created">
|
||||
Your certificate has been successfully issued by the ADCS server and is ready for use.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="View Certificate Details">
|
||||
Navigate to **Certificates** to view detailed information about all issued certificates, including expiration dates, serial numbers, and certificate chains.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Certificate Templates
|
||||
|
||||
Infisical automatically retrieves available certificate templates from your ADCS server, ensuring you can only select templates that are properly configured and accessible. The system dynamically discovers templates during the certificate authority setup and certificate issuance process.
|
||||
|
||||
### Common Template Types
|
||||
|
||||
ADCS templates you might see include:
|
||||
- **Web Server**: For SSL/TLS certificates with server authentication
|
||||
- **Computer**: For machine authentication certificates
|
||||
- **User**: For client authentication certificates
|
||||
- **Basic EFS**: For Encrypting File System certificates
|
||||
- **EFS Recovery Agent**: For EFS data recovery
|
||||
- **Administrator**: For administrative certificates
|
||||
- **Subordinate Certification Authority**: For issuing CA certificates
|
||||
|
||||
### Template Requirements
|
||||
|
||||
Ensure your ADCS templates are configured with:
|
||||
- **Enroll permissions** for your connection account
|
||||
- **Auto-enroll permissions** if using automated workflows
|
||||
- **Subject name requirements** matching your certificate requests
|
||||
- **Key usage extensions** appropriate for your use case
|
||||
|
||||
<Info>
|
||||
**Dynamic Template Discovery**: Infisical queries your ADCS server in real-time to populate available templates. Only templates you have permission to use will be displayed during certificate issuance.
|
||||
</Info>
|
||||
|
||||
## Certificate Issuance Limitations
|
||||
|
||||
### Immediate Issuance Only
|
||||
|
||||
<Warning>
|
||||
**Manual Approval Not Supported**: Infisical currently supports only **immediate certificate issuance**. Certificates that require manual approval or are held by ADCS policies cannot be issued through Infisical yet.
|
||||
</Warning>
|
||||
|
||||
For successful certificate issuance, ensure your ADCS templates and policies are configured to:
|
||||
- **Auto-approve** certificate requests without manual intervention
|
||||
- **Not require** administrator approval for the templates you plan to use
|
||||
- **Allow** the connection account to request and receive certificates immediately
|
||||
|
||||
### What Happens with Manual Approval
|
||||
|
||||
If a certificate request requires manual approval:
|
||||
1. The request will be submitted to ADCS successfully
|
||||
2. Infisical will attempt to retrieve the certificate with exponential backoff (up to 5 retries over ~1 minute)
|
||||
3. If the certificate is not approved within this timeframe, the request will **fail**
|
||||
4. **No background polling**: Currently, Infisical does not check for certificates that might be approved hours or days later
|
||||
|
||||
<Info>
|
||||
**Future Enhancement**: Background polling for delayed certificate approvals is planned for future releases.
|
||||
</Info>
|
||||
|
||||
### Certificate Revocation
|
||||
|
||||
<Warning>
|
||||
Certificate revocation is **not supported** by the Azure ADCS connector due to security and complexity considerations.
|
||||
</Warning>
|
||||
|
||||
## Advanced Configuration
|
||||
|
||||
### Custom Validity Periods
|
||||
|
||||
Enable custom certificate validity periods on your ADCS server:
|
||||
|
||||
```cmd
|
||||
# Run on ADCS server as Administrator
|
||||
certutil -setreg policy\EditFlags +EDITF_ATTRIBUTEENDDATE
|
||||
net stop certsvc
|
||||
net start certsvc
|
||||
```
|
||||
|
||||
This allows Infisical to control certificate expiration dates directly.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
**Certificate Request Denied**
|
||||
- Verify ADCS template permissions for your connection account
|
||||
- Check template subject name requirements
|
||||
- Ensure template allows the requested key algorithm and size
|
||||
|
||||
**Revocation Service Unavailable**
|
||||
- Verify IIS is running and the revocation endpoint is accessible
|
||||
- Check IIS application pool permissions
|
||||
- Test endpoint connectivity from Infisical
|
||||
|
||||
**Template Not Found**
|
||||
- Verify template exists on ADCS server and is published
|
||||
- Check that your connection account has enrollment permissions for the template
|
||||
- Ensure the template is properly configured and available in the ADCS web enrollment interface
|
||||
- Templates are dynamically loaded - refresh the PKI Subscriber form if templates don't appear
|
||||
|
||||
**Certificate Request Pending/Timeout**
|
||||
- Check if your ADCS template requires manual approval - Infisical only supports immediate issuance
|
||||
- Verify the certificate template is configured for auto-approval
|
||||
- Ensure your connection account has sufficient permissions to request certificates without approval
|
||||
- Review ADCS server policies that might be holding the certificate request
|
||||
|
||||
**Network Connectivity Issues**
|
||||
- Verify your ADCS server's firewall allows connections from Infisical
|
||||
- For Infisical Cloud: Ensure Infisical's IP addresses are whitelisted (see [Networking Configuration](/documentation/setup/networking))
|
||||
- For self-hosted: Whitelist your Infisical server's IP address on the ADCS server
|
||||
- Test HTTPS connectivity to the ADCS web enrollment endpoint
|
||||
- Check for any network security appliances blocking the connection
|
||||
|
||||
**Authentication Failures**
|
||||
- Verify ADCS connection credentials
|
||||
- Check domain account permissions
|
||||
- Ensure network connectivity to ADCS server
|
||||
|
||||
**SSL/TLS Certificate Errors**
|
||||
- For ADCS servers with self-signed or private certificates: disable "Reject Unauthorized" in the SSL tab of your Azure ADCS app connection, or provide the certificate in PEM format
|
||||
- Common SSL errors: `UNABLE_TO_VERIFY_LEAF_SIGNATURE`, `SELF_SIGNED_CERT_IN_CHAIN`, `CERT_HAS_EXPIRED`
|
||||
- The SSL configuration applies to all HTTPS communications between Infisical and your ADCS server
|
||||
- Only HTTPS URLs are supported - HTTP connections are not allowed for security reasons
|
@@ -22,7 +22,7 @@ The table below provides a quick overview of which delivery method may be suitab
|
||||
| Kubernetes (file-based, with rotation) | [Kubernetes CSI Provider](/integrations/platforms/kubernetes-csi) | Mounted files | Uses CSI driver to mount secrets as files with automatic rotation |
|
||||
| Image builds (VMs or containers) | [Packer Plugin](/integrations/frameworks/packer) | Env vars or files | Inject secrets at image build time |
|
||||
| Ansible automation | [Ansible Collection](/integrations/platforms/ansible) | Variables | Runtime secret fetching in playbooks using lookup plugin |
|
||||
| Terraform / Pulumi | [Terraform Provider](/integrations/frameworks/terraform), [Pulumi](/integrations/frameworks/pulumi) | Inputs / ephemeral resources | Use ephemeral for security; avoids storing secrets in state |
|
||||
| Terraform / Pulumi | [Terraform Provider](https://registry.terraform.io/providers/Infisical/infisical/latest/docs), [Pulumi](/integrations/frameworks/pulumi) | Inputs / ephemeral resources | Use ephemeral for security; avoids storing secrets in state |
|
||||
| Third-party platforms (GitHub, AWS, etc.) | [Secret Syncs](/integrations/secret-syncs/overview) | Preloaded secrets | Push secrets to platforms that can't fetch directly from Infisical |
|
||||
|
||||
From here, you can explore the delivery method that best matches your environment:
|
||||
@@ -90,7 +90,7 @@ This is useful when external systems require secrets to be available ahead of ti
|
||||
|
||||
Infisical integrates with common IaC and automation tools to help you securely inject secrets into your infrastructure provisioning workflows:
|
||||
|
||||
- [Terraform](/integrations/frameworks/terraform): Use the official Infisical Terraform provider to fetch secrets either as ephemeral resources (never written to state files) or as traditional data sources. Ideal for managing cloud infrastructure while keeping secrets secure and version-safe.
|
||||
- [Terraform](https://registry.terraform.io/providers/Infisical/infisical/latest/docs): Use the official Infisical Terraform provider to fetch secrets either as ephemeral resources (never written to state files) or as traditional data sources. Ideal for managing cloud infrastructure while keeping secrets secure and version-safe.
|
||||
- [Pulumi](/integrations/frameworks/pulumi): Integrate Infisical into Pulumi projects using the Terraform Bridge, allowing you to fetch and manage secrets in TypeScript, Go, Python, or C# — without changing your existing workflows.
|
||||
- [Ansible](/integrations/platforms/ansible): Retrieve secrets from Infisical at runtime using the official Ansible Collection and lookup plugin. Works well for dynamic configuration during playbook execution.
|
||||
- [Packer](/integrations/frameworks/packer): Inject secrets into VM or container images at build time using the Infisical Packer Plugin — useful for provisioning base images that require secure configuration values.
|
||||
|
Binary file not shown.
After Width: | Height: | Size: 600 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user