mirror of
https://github.com/Infisical/infisical.git
synced 2025-09-06 06:00:42 +00:00
Compare commits
114 Commits
project-vi
...
daniel/cus
Author | SHA1 | Date | |
---|---|---|---|
|
a94635e5be | ||
|
912cd5d20a | ||
|
e29a0e487e | ||
|
3c24132e97 | ||
|
38a7cb896b | ||
|
6abd58ee21 | ||
|
c8275f41a3 | ||
|
8467286aa3 | ||
|
cea43d497d | ||
|
3700597ba7 | ||
|
65f0597bd8 | ||
|
5b3cae7255 | ||
|
a4ff6340f8 | ||
|
bfb2486204 | ||
|
c29b5e37f3 | ||
|
e666409026 | ||
|
ecfc8b5f87 | ||
|
a6b4939ea5 | ||
|
640dccadb7 | ||
|
3ebd5305c2 | ||
|
8d1c0b432b | ||
|
be588c2653 | ||
|
f7828ed458 | ||
|
b40bb72643 | ||
|
4f1cd69bcc | ||
|
4d4b4c13c3 | ||
|
c8bf9049de | ||
|
ab91863c77 | ||
|
6db4c614af | ||
|
21e2db2963 | ||
|
da0d4a31b1 | ||
|
b7d3ddff21 | ||
|
a3c6b1134b | ||
|
d931725930 | ||
|
6702498028 | ||
|
b650b142f7 | ||
|
19a5f52d20 | ||
|
e51c5256a0 | ||
|
3bb0c9b3ad | ||
|
41404148e1 | ||
|
e04e11f597 | ||
|
5fffa17c30 | ||
|
3fa6154517 | ||
|
1d5cdb4000 | ||
|
a1b53855bb | ||
|
b447ccd3f0 | ||
|
2058afb3e0 | ||
|
dc0a7d3a70 | ||
|
53618a4bd8 | ||
|
d6ca2cdc2e | ||
|
acf3bdc5a3 | ||
|
533d9cea38 | ||
|
82faf3a797 | ||
|
ece0af7787 | ||
|
6bccb1e5eb | ||
|
dc23abdb86 | ||
|
8d3be92d09 | ||
|
1e7f0f8a39 | ||
|
c99a4b7cc8 | ||
|
e3838643e5 | ||
|
5bd961735d | ||
|
1147cfcea4 | ||
|
abb577e4e9 | ||
|
29dd49d696 | ||
|
0f76003f77 | ||
|
1c4dfbe028 | ||
|
65be2e7f7b | ||
|
cf64c89ea3 | ||
|
d934f03597 | ||
|
e051cfd146 | ||
|
be30327dc9 | ||
|
f9784f15ed | ||
|
8e42fdaf5b | ||
|
2a52463585 | ||
|
20287973b1 | ||
|
7f958e6d89 | ||
|
e7138f1be9 | ||
|
01fba20872 | ||
|
696a70577a | ||
|
8ba61e8293 | ||
|
5944642278 | ||
|
f5434b5cba | ||
|
1159b74bdb | ||
|
bc4885b098 | ||
|
97be78a107 | ||
|
4b42f7b1b5 | ||
|
3de7fec650 | ||
|
07a55bb943 | ||
|
7894bd8ae1 | ||
|
5eee99e9ac | ||
|
e8ef0191d6 | ||
|
7d74dce82b | ||
|
43dd45de29 | ||
|
13b20806ba | ||
|
49b5ab8126 | ||
|
c99d5c210c | ||
|
0762de93d6 | ||
|
8d6461b01d | ||
|
f52dbaa2f2 | ||
|
0c92764409 | ||
|
976317e71b | ||
|
7b52d60036 | ||
|
83479a091e | ||
|
4e2592960d | ||
|
8d5b6a17b1 | ||
|
8945bc0dc1 | ||
|
1b22438c46 | ||
|
57c667f0b1 | ||
|
15d3638612 | ||
|
ebd3b5c9d1 | ||
|
5136dbc543 | ||
|
bceddab89f | ||
|
6d5bed756a | ||
|
a7f33d669f |
290
backend/package-lock.json
generated
290
backend/package-lock.json
generated
@@ -63,6 +63,7 @@
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
"axios": "^1.11.0",
|
||||
"axios-ntlm": "^1.4.4",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"botbuilder": "^4.23.2",
|
||||
@@ -12956,216 +12957,6 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@swc/core": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.107.tgz",
|
||||
"integrity": "sha512-zKhqDyFcTsyLIYK1iEmavljZnf4CCor5pF52UzLAz4B6Nu/4GLU+2LQVAf+oRHjusG39PTPjd2AlRT3f3QWfsQ==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@swc/counter": "^0.1.1",
|
||||
"@swc/types": "^0.1.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/swc"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@swc/core-darwin-arm64": "1.3.107",
|
||||
"@swc/core-darwin-x64": "1.3.107",
|
||||
"@swc/core-linux-arm-gnueabihf": "1.3.107",
|
||||
"@swc/core-linux-arm64-gnu": "1.3.107",
|
||||
"@swc/core-linux-arm64-musl": "1.3.107",
|
||||
"@swc/core-linux-x64-gnu": "1.3.107",
|
||||
"@swc/core-linux-x64-musl": "1.3.107",
|
||||
"@swc/core-win32-arm64-msvc": "1.3.107",
|
||||
"@swc/core-win32-ia32-msvc": "1.3.107",
|
||||
"@swc/core-win32-x64-msvc": "1.3.107"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@swc/helpers": "^0.5.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@swc/helpers": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-darwin-arm64": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.107.tgz",
|
||||
"integrity": "sha512-47tD/5vSXWxPd0j/ZllyQUg4bqalbQTsmqSw0J4dDdS82MWqCAwUErUrAZPRjBkjNQ6Kmrf5rpCWaGTtPw+ngw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-darwin-x64": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.107.tgz",
|
||||
"integrity": "sha512-hwiLJ2ulNkBGAh1m1eTfeY1417OAYbRGcb/iGsJ+LuVLvKAhU/itzsl535CvcwAlt2LayeCFfcI8gdeOLeZa9A==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-arm-gnueabihf": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.107.tgz",
|
||||
"integrity": "sha512-I2wzcC0KXqh0OwymCmYwNRgZ9nxX7DWnOOStJXV3pS0uB83TXAkmqd7wvMBuIl9qu4Hfomi9aDM7IlEEn9tumQ==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-arm64-gnu": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.107.tgz",
|
||||
"integrity": "sha512-HWgnn7JORYlOYnGsdunpSF8A+BCZKPLzLtEUA27/M/ZuANcMZabKL9Zurt7XQXq888uJFAt98Gy+59PU90aHKg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-arm64-musl": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.107.tgz",
|
||||
"integrity": "sha512-vfPF74cWfAm8hyhS8yvYI94ucMHIo8xIYU+oFOW9uvDlGQRgnUf/6DEVbLyt/3yfX5723Ln57U8uiMALbX5Pyw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-x64-gnu": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.107.tgz",
|
||||
"integrity": "sha512-uBVNhIg0ip8rH9OnOsCARUFZ3Mq3tbPHxtmWk9uAa5u8jQwGWeBx5+nTHpDOVd3YxKb6+5xDEI/edeeLpha/9g==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-x64-musl": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.107.tgz",
|
||||
"integrity": "sha512-mvACkUvzSIB12q1H5JtabWATbk3AG+pQgXEN95AmEX2ZA5gbP9+B+mijsg7Sd/3tboHr7ZHLz/q3SHTvdFJrEw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-win32-arm64-msvc": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.107.tgz",
|
||||
"integrity": "sha512-J3P14Ngy/1qtapzbguEH41kY109t6DFxfbK4Ntz9dOWNuVY3o9/RTB841ctnJk0ZHEG+BjfCJjsD2n8H5HcaOA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-win32-ia32-msvc": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.107.tgz",
|
||||
"integrity": "sha512-ZBUtgyjTHlz8TPJh7kfwwwFma+ktr6OccB1oXC8fMSopD0AxVnQasgun3l3099wIsAB9eEsJDQ/3lDkOLs1gBA==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-win32-x64-msvc": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.107.tgz",
|
||||
"integrity": "sha512-Eyzo2XRqWOxqhE1gk9h7LWmUf4Bp4Xn2Ttb0ayAXFp6YSTxQIThXcT9kipXZqcpxcmDwoq8iWbbf2P8XL743EA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/counter": {
|
||||
"version": "0.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz",
|
||||
@@ -13183,14 +12974,6 @@
|
||||
"tslib": "^2.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/types": {
|
||||
"version": "0.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz",
|
||||
"integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/@techteamer/ocsp": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@techteamer/ocsp/-/ocsp-1.0.1.tgz",
|
||||
@@ -15195,6 +14978,18 @@
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/axios-ntlm": {
|
||||
"version": "1.4.4",
|
||||
"resolved": "https://registry.npmjs.org/axios-ntlm/-/axios-ntlm-1.4.4.tgz",
|
||||
"integrity": "sha512-kpCRdzMfL8gi0Z0o96P3QPAK4XuC8iciGgxGXe+PeQ4oyjI2LZN8WSOKbu0Y9Jo3T/A7pB81n6jYVPIpglEuRA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"axios": "^1.8.4",
|
||||
"des.js": "^1.1.0",
|
||||
"dev-null": "^0.1.1",
|
||||
"js-md4": "^0.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/axios-retry": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/axios-retry/-/axios-retry-4.0.0.tgz",
|
||||
@@ -16954,6 +16749,16 @@
|
||||
"resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
|
||||
"integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="
|
||||
},
|
||||
"node_modules/des.js": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/des.js/-/des.js-1.1.0.tgz",
|
||||
"integrity": "sha512-r17GxjhUCjSRy8aiJpr8/UadFIzMzJGexI3Nmz4ADi9LYSFx4gTBp80+NaX/YsXWWLhpZ7v/v/ubEc/bCNfKwg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.1",
|
||||
"minimalistic-assert": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/destroy": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
|
||||
@@ -16981,6 +16786,12 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/dev-null": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/dev-null/-/dev-null-0.1.1.tgz",
|
||||
"integrity": "sha512-nMNZG0zfMgmdv8S5O0TM5cpwNbGKRGPCxVsr0SmA3NZZy9CYBbuNLL0PD3Acx9e5LIUgwONXtM9kM6RlawPxEQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/diff": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
|
||||
@@ -19029,49 +18840,6 @@
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/gcp-metadata": {
|
||||
"version": "5.3.0",
|
||||
"resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.3.0.tgz",
|
||||
"integrity": "sha512-FNTkdNEnBdlqF2oatizolQqNANMrcqJt6AAYt99B3y1aLLC8Hc5IOBb+ZnnzllodEEf6xMBp6wRcBbc16fa65w==",
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"gaxios": "^5.0.0",
|
||||
"json-bigint": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/gcp-metadata/node_modules/gaxios": {
|
||||
"version": "5.1.3",
|
||||
"resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.3.tgz",
|
||||
"integrity": "sha512-95hVgBRgEIRQQQHIbnxBXeHbW4TqFk4ZDJW7wmVtvYar72FdhRIo1UGOLS2eRAKCPEdPBWu+M7+A33D9CdX9rA==",
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"extend": "^3.0.2",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
"is-stream": "^2.0.0",
|
||||
"node-fetch": "^2.6.9"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/gcp-metadata/node_modules/is-stream": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
|
||||
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/generate-function": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz",
|
||||
|
@@ -37,7 +37,7 @@
|
||||
"build": "tsup --sourcemap",
|
||||
"build:frontend": "npm run build --prefix ../frontend",
|
||||
"start": "node --enable-source-maps dist/main.mjs",
|
||||
"type:check": "tsc --noEmit",
|
||||
"type:check": "node --max-old-space-size=8192 ./node_modules/.bin/tsc --noEmit",
|
||||
"lint:fix": "node --max-old-space-size=8192 ./node_modules/.bin/eslint --fix --ext js,ts ./src",
|
||||
"lint": "node --max-old-space-size=8192 ./node_modules/.bin/eslint 'src/**/*.ts'",
|
||||
"test:unit": "vitest run -c vitest.unit.config.ts",
|
||||
@@ -183,6 +183,7 @@
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
"axios": "^1.11.0",
|
||||
"axios-ntlm": "^1.4.4",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"botbuilder": "^4.23.2",
|
||||
|
@@ -0,0 +1,57 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.IdentityUniversalAuth)) {
|
||||
const hasLockoutEnabled = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutEnabled");
|
||||
const hasLockoutThreshold = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutThreshold");
|
||||
const hasLockoutDuration = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutDurationSeconds");
|
||||
const hasLockoutCounterReset = await knex.schema.hasColumn(
|
||||
TableName.IdentityUniversalAuth,
|
||||
"lockoutCounterResetSeconds"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.IdentityUniversalAuth, (t) => {
|
||||
if (!hasLockoutEnabled) {
|
||||
t.boolean("lockoutEnabled").notNullable().defaultTo(true);
|
||||
}
|
||||
if (!hasLockoutThreshold) {
|
||||
t.integer("lockoutThreshold").notNullable().defaultTo(3);
|
||||
}
|
||||
if (!hasLockoutDuration) {
|
||||
t.integer("lockoutDurationSeconds").notNullable().defaultTo(300); // 5 minutes
|
||||
}
|
||||
if (!hasLockoutCounterReset) {
|
||||
t.integer("lockoutCounterResetSeconds").notNullable().defaultTo(30); // 30 seconds
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.IdentityUniversalAuth)) {
|
||||
const hasLockoutEnabled = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutEnabled");
|
||||
const hasLockoutThreshold = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutThreshold");
|
||||
const hasLockoutDuration = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutDurationSeconds");
|
||||
const hasLockoutCounterReset = await knex.schema.hasColumn(
|
||||
TableName.IdentityUniversalAuth,
|
||||
"lockoutCounterResetSeconds"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.IdentityUniversalAuth, (t) => {
|
||||
if (hasLockoutEnabled) {
|
||||
t.dropColumn("lockoutEnabled");
|
||||
}
|
||||
if (hasLockoutThreshold) {
|
||||
t.dropColumn("lockoutThreshold");
|
||||
}
|
||||
if (hasLockoutDuration) {
|
||||
t.dropColumn("lockoutDurationSeconds");
|
||||
}
|
||||
if (hasLockoutCounterReset) {
|
||||
t.dropColumn("lockoutCounterResetSeconds");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasPropertiesCol = await knex.schema.hasColumn(TableName.PkiSubscriber, "properties");
|
||||
|
||||
if (!hasPropertiesCol) {
|
||||
await knex.schema.alterTable(TableName.PkiSubscriber, (t) => {
|
||||
t.jsonb("properties").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasPropertiesCol = await knex.schema.hasColumn(TableName.PkiSubscriber, "properties");
|
||||
|
||||
if (hasPropertiesCol) {
|
||||
await knex.schema.alterTable(TableName.PkiSubscriber, (t) => {
|
||||
t.dropColumn("properties");
|
||||
});
|
||||
}
|
||||
}
|
@@ -18,7 +18,11 @@ export const IdentityUniversalAuthsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
accessTokenPeriod: z.coerce.number().default(0),
|
||||
lockoutEnabled: z.boolean().default(true),
|
||||
lockoutThreshold: z.number().default(3),
|
||||
lockoutDurationSeconds: z.number().default(300),
|
||||
lockoutCounterResetSeconds: z.number().default(30)
|
||||
});
|
||||
|
||||
export type TIdentityUniversalAuths = z.infer<typeof IdentityUniversalAuthsSchema>;
|
||||
|
@@ -25,7 +25,8 @@ export const PkiSubscribersSchema = z.object({
|
||||
lastAutoRenewAt: z.date().nullable().optional(),
|
||||
lastOperationStatus: z.string().nullable().optional(),
|
||||
lastOperationMessage: z.string().nullable().optional(),
|
||||
lastOperationAt: z.date().nullable().optional()
|
||||
lastOperationAt: z.date().nullable().optional(),
|
||||
properties: z.unknown().nullable().optional()
|
||||
});
|
||||
|
||||
export type TPkiSubscribers = z.infer<typeof PkiSubscribersSchema>;
|
||||
|
@@ -126,4 +126,39 @@ export const registerGithubOrgSyncRouter = async (server: FastifyZodProvider) =>
|
||||
return { githubOrgSyncConfig };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/sync-all-teams",
|
||||
method: "POST",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
totalUsers: z.number(),
|
||||
errors: z.array(z.string()),
|
||||
createdTeams: z.array(z.string()),
|
||||
updatedTeams: z.array(z.string()),
|
||||
removedMemberships: z.number(),
|
||||
syncDuration: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const result = await server.services.githubOrgSync.syncAllTeams({
|
||||
orgPermission: req.permission
|
||||
});
|
||||
|
||||
return {
|
||||
totalUsers: result.totalUsers,
|
||||
errors: result.errors,
|
||||
createdTeams: result.createdTeams,
|
||||
updatedTeams: result.updatedTeams,
|
||||
removedMemberships: result.removedMemberships,
|
||||
syncDuration: result.syncDuration
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -6,9 +6,9 @@ import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { OrgPermissionAuditLogsActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { ProjectPermissionAuditLogsActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TAuditLogDALFactory } from "./audit-log-dal";
|
||||
import { TAuditLogQueueServiceFactory } from "./audit-log-queue";
|
||||
import { EventType, TAuditLogServiceFactory } from "./audit-log-types";
|
||||
@@ -41,7 +41,10 @@ export const auditLogServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionAuditLogsActions.Read,
|
||||
ProjectPermissionSub.AuditLogs
|
||||
);
|
||||
} else {
|
||||
// Organization-wide logs
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
@@ -52,7 +55,10 @@ export const auditLogServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionAuditLogsActions.Read,
|
||||
OrgPermissionSubjects.AuditLogs
|
||||
);
|
||||
}
|
||||
|
||||
// If project ID is not provided, then we need to return all the audit logs for the organization itself.
|
||||
|
@@ -198,6 +198,7 @@ export enum EventType {
|
||||
|
||||
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
|
||||
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
|
||||
CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS = "clear-identity-universal-auth-lockouts",
|
||||
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id",
|
||||
@@ -281,6 +282,7 @@ export enum EventType {
|
||||
UPDATE_SSH_CERTIFICATE_TEMPLATE = "update-ssh-certificate-template",
|
||||
DELETE_SSH_CERTIFICATE_TEMPLATE = "delete-ssh-certificate-template",
|
||||
GET_SSH_CERTIFICATE_TEMPLATE = "get-ssh-certificate-template",
|
||||
GET_AZURE_AD_TEMPLATES = "get-azure-ad-templates",
|
||||
GET_SSH_HOST = "get-ssh-host",
|
||||
CREATE_SSH_HOST = "create-ssh-host",
|
||||
UPDATE_SSH_HOST = "update-ssh-host",
|
||||
@@ -866,6 +868,10 @@ interface AddIdentityUniversalAuthEvent {
|
||||
accessTokenMaxTTL: number;
|
||||
accessTokenNumUsesLimit: number;
|
||||
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
|
||||
lockoutEnabled: boolean;
|
||||
lockoutThreshold: number;
|
||||
lockoutDurationSeconds: number;
|
||||
lockoutCounterResetSeconds: number;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -878,6 +884,10 @@ interface UpdateIdentityUniversalAuthEvent {
|
||||
accessTokenMaxTTL?: number;
|
||||
accessTokenNumUsesLimit?: number;
|
||||
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
|
||||
lockoutEnabled?: boolean;
|
||||
lockoutThreshold?: number;
|
||||
lockoutDurationSeconds?: number;
|
||||
lockoutCounterResetSeconds?: number;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1037,6 +1047,13 @@ interface RevokeIdentityUniversalAuthClientSecretEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface ClearIdentityUniversalAuthLockoutsEvent {
|
||||
type: EventType.CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface LoginIdentityGcpAuthEvent {
|
||||
type: EventType.LOGIN_IDENTITY_GCP_AUTH;
|
||||
metadata: {
|
||||
@@ -2497,6 +2514,14 @@ interface CreateCertificateTemplateEstConfig {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetAzureAdCsTemplatesEvent {
|
||||
type: EventType.GET_AZURE_AD_TEMPLATES;
|
||||
metadata: {
|
||||
caId: string;
|
||||
amount: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateCertificateTemplateEstConfig {
|
||||
type: EventType.UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG;
|
||||
metadata: {
|
||||
@@ -3491,6 +3516,7 @@ export type Event =
|
||||
| GetIdentityUniversalAuthClientSecretsEvent
|
||||
| GetIdentityUniversalAuthClientSecretByIdEvent
|
||||
| RevokeIdentityUniversalAuthClientSecretEvent
|
||||
| ClearIdentityUniversalAuthLockoutsEvent
|
||||
| LoginIdentityGcpAuthEvent
|
||||
| AddIdentityGcpAuthEvent
|
||||
| DeleteIdentityGcpAuthEvent
|
||||
@@ -3636,6 +3662,7 @@ export type Event =
|
||||
| CreateCertificateTemplateEstConfig
|
||||
| UpdateCertificateTemplateEstConfig
|
||||
| GetCertificateTemplateEstConfig
|
||||
| GetAzureAdCsTemplatesEvent
|
||||
| AttemptCreateSlackIntegration
|
||||
| AttemptReinstallSlackIntegration
|
||||
| UpdateSlackIntegration
|
||||
|
@@ -1,14 +1,19 @@
|
||||
/* eslint-disable @typescript-eslint/return-await */
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { Octokit } from "@octokit/core";
|
||||
import { paginateGraphql } from "@octokit/plugin-paginate-graphql";
|
||||
import { Octokit as OctokitRest } from "@octokit/rest";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { OrgMembershipRole } from "@app/db/schemas";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { groupBy } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { retryWithBackoff } from "@app/lib/retry";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
|
||||
import { TGroupDALFactory } from "../group/group-dal";
|
||||
import { TUserGroupMembershipDALFactory } from "../group/user-group-membership-dal";
|
||||
@@ -16,20 +21,67 @@ import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { TGithubOrgSyncDALFactory } from "./github-org-sync-dal";
|
||||
import { TCreateGithubOrgSyncDTO, TDeleteGithubOrgSyncDTO, TUpdateGithubOrgSyncDTO } from "./github-org-sync-types";
|
||||
import {
|
||||
TCreateGithubOrgSyncDTO,
|
||||
TDeleteGithubOrgSyncDTO,
|
||||
TSyncAllTeamsDTO,
|
||||
TSyncResult,
|
||||
TUpdateGithubOrgSyncDTO,
|
||||
TValidateGithubTokenDTO
|
||||
} from "./github-org-sync-types";
|
||||
|
||||
const OctokitWithPlugin = Octokit.plugin(paginateGraphql);
|
||||
|
||||
// Type definitions for GitHub API errors
|
||||
interface GitHubApiError extends Error {
|
||||
status?: number;
|
||||
response?: {
|
||||
status?: number;
|
||||
headers?: {
|
||||
"x-ratelimit-reset"?: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
interface OrgMembershipWithUser {
|
||||
id: string;
|
||||
orgId: string;
|
||||
role: string;
|
||||
status: string;
|
||||
isActive: boolean;
|
||||
inviteEmail: string | null;
|
||||
user: {
|
||||
id: string;
|
||||
email: string;
|
||||
username: string | null;
|
||||
firstName: string | null;
|
||||
lastName: string | null;
|
||||
} | null;
|
||||
}
|
||||
|
||||
interface GroupMembership {
|
||||
id: string;
|
||||
groupId: string;
|
||||
groupName: string;
|
||||
orgMembershipId: string;
|
||||
firstName: string | null;
|
||||
lastName: string | null;
|
||||
}
|
||||
|
||||
type TGithubOrgSyncServiceFactoryDep = {
|
||||
githubOrgSyncDAL: TGithubOrgSyncDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
userGroupMembershipDAL: Pick<
|
||||
TUserGroupMembershipDALFactory,
|
||||
"findGroupMembershipsByUserIdInOrg" | "insertMany" | "delete"
|
||||
"findGroupMembershipsByUserIdInOrg" | "findGroupMembershipsByGroupIdInOrg" | "insertMany" | "delete"
|
||||
>;
|
||||
groupDAL: Pick<TGroupDALFactory, "insertMany" | "transaction" | "find">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
orgMembershipDAL: Pick<
|
||||
TOrgMembershipDALFactory,
|
||||
"find" | "findOrgMembershipById" | "findOrgMembershipsWithUsersByOrgId"
|
||||
>;
|
||||
};
|
||||
|
||||
export type TGithubOrgSyncServiceFactory = ReturnType<typeof githubOrgSyncServiceFactory>;
|
||||
@@ -40,7 +92,8 @@ export const githubOrgSyncServiceFactory = ({
|
||||
kmsService,
|
||||
userGroupMembershipDAL,
|
||||
groupDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
orgMembershipDAL
|
||||
}: TGithubOrgSyncServiceFactoryDep) => {
|
||||
const createGithubOrgSync = async ({
|
||||
githubOrgName,
|
||||
@@ -304,8 +357,8 @@ export const githubOrgSyncServiceFactory = ({
|
||||
const removeFromTeams = infisicalUserGroups.filter((el) => !githubUserTeamSet.has(el.groupName));
|
||||
|
||||
if (newTeams.length || updateTeams.length || removeFromTeams.length) {
|
||||
await groupDAL.transaction(async (tx) => {
|
||||
if (newTeams.length) {
|
||||
if (newTeams.length) {
|
||||
await groupDAL.transaction(async (tx) => {
|
||||
const newGroups = await groupDAL.insertMany(
|
||||
newTeams.map((newGroupName) => ({
|
||||
name: newGroupName,
|
||||
@@ -322,9 +375,11 @@ export const githubOrgSyncServiceFactory = ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (updateTeams.length) {
|
||||
if (updateTeams.length) {
|
||||
await groupDAL.transaction(async (tx) => {
|
||||
await userGroupMembershipDAL.insertMany(
|
||||
updateTeams.map((el) => ({
|
||||
groupId: githubUserTeamOnInfisicalGroupByName[el][0].id,
|
||||
@@ -332,16 +387,433 @@ export const githubOrgSyncServiceFactory = ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (removeFromTeams.length) {
|
||||
if (removeFromTeams.length) {
|
||||
await groupDAL.transaction(async (tx) => {
|
||||
await userGroupMembershipDAL.delete(
|
||||
{ userId, $in: { groupId: removeFromTeams.map((el) => el.groupId) } },
|
||||
tx
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const validateGithubToken = async ({ orgPermission, githubOrgAccessToken }: TValidateGithubTokenDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
orgPermission.orgId,
|
||||
orgPermission.authMethod,
|
||||
orgPermission.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.GithubOrgSync);
|
||||
|
||||
const plan = await licenseService.getPlan(orgPermission.orgId);
|
||||
if (!plan.githubOrgSync) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to validate GitHub token due to plan restriction. Upgrade plan to use GitHub organization sync."
|
||||
});
|
||||
}
|
||||
|
||||
const config = await githubOrgSyncDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!config) {
|
||||
throw new BadRequestError({ message: "GitHub organization sync is not configured" });
|
||||
}
|
||||
|
||||
try {
|
||||
const testOctokit = new OctokitRest({
|
||||
auth: githubOrgAccessToken,
|
||||
request: {
|
||||
signal: AbortSignal.timeout(10000)
|
||||
}
|
||||
});
|
||||
|
||||
const { data: org } = await testOctokit.rest.orgs.get({
|
||||
org: config.githubOrgName
|
||||
});
|
||||
|
||||
const octokitGraphQL = new OctokitWithPlugin({
|
||||
auth: githubOrgAccessToken,
|
||||
request: {
|
||||
signal: AbortSignal.timeout(10000)
|
||||
}
|
||||
});
|
||||
|
||||
await octokitGraphQL.graphql(`query($org: String!) { organization(login: $org) { id name } }`, {
|
||||
org: config.githubOrgName
|
||||
});
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
organizationInfo: {
|
||||
id: org.id,
|
||||
login: org.login,
|
||||
name: org.name || org.login,
|
||||
publicRepos: org.public_repos,
|
||||
privateRepos: org.owned_private_repos || 0
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(error, `GitHub token validation failed for org ${config.githubOrgName}`);
|
||||
|
||||
const gitHubError = error as GitHubApiError;
|
||||
const statusCode = gitHubError.status || gitHubError.response?.status;
|
||||
if (statusCode) {
|
||||
if (statusCode === 401) {
|
||||
throw new BadRequestError({
|
||||
message: "GitHub access token is invalid or expired."
|
||||
});
|
||||
}
|
||||
if (statusCode === 403) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"GitHub access token lacks required permissions. Required: 1) 'read:org' scope for organization teams, 2) Token owner must be an organization member with team visibility access, 3) Organization settings must allow team visibility. Check GitHub token scopes and organization member permissions."
|
||||
});
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
throw new BadRequestError({
|
||||
message: `Organization '${config.githubOrgName}' not found or access token does not have access to it.`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `GitHub token validation failed: ${(error as Error).message}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const syncAllTeams = async ({ orgPermission }: TSyncAllTeamsDTO): Promise<TSyncResult> => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
orgPermission.orgId,
|
||||
orgPermission.authMethod,
|
||||
orgPermission.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionActions.Edit,
|
||||
OrgPermissionSubjects.GithubOrgSyncManual
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(orgPermission.orgId);
|
||||
if (!plan.githubOrgSync) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to sync all GitHub teams due to plan restriction. Upgrade plan to use GitHub organization sync."
|
||||
});
|
||||
}
|
||||
|
||||
const config = await githubOrgSyncDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!config || !config?.isActive) {
|
||||
throw new BadRequestError({ message: "GitHub organization sync is not configured or not active" });
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: orgPermission.orgId
|
||||
});
|
||||
|
||||
if (!config.encryptedGithubOrgAccessToken) {
|
||||
throw new BadRequestError({
|
||||
message: "GitHub organization access token is required. Please set a token first."
|
||||
});
|
||||
}
|
||||
|
||||
const orgAccessToken = decryptor({ cipherTextBlob: config.encryptedGithubOrgAccessToken }).toString();
|
||||
|
||||
try {
|
||||
const testOctokit = new OctokitRest({
|
||||
auth: orgAccessToken,
|
||||
request: {
|
||||
signal: AbortSignal.timeout(10000)
|
||||
}
|
||||
});
|
||||
|
||||
await testOctokit.rest.orgs.get({
|
||||
org: config.githubOrgName
|
||||
});
|
||||
|
||||
await testOctokit.rest.users.getAuthenticated();
|
||||
} catch (error) {
|
||||
throw new BadRequestError({
|
||||
message: "Stored GitHub access token is invalid or expired. Please set a new token."
|
||||
});
|
||||
}
|
||||
|
||||
const allMembers = await orgMembershipDAL.findOrgMembershipsWithUsersByOrgId(orgPermission.orgId);
|
||||
const activeMembers = allMembers.filter(
|
||||
(member) => member.status === "accepted" && member.isActive
|
||||
) as OrgMembershipWithUser[];
|
||||
|
||||
const startTime = Date.now();
|
||||
const syncErrors: string[] = [];
|
||||
|
||||
const octokit = new OctokitWithPlugin({
|
||||
auth: orgAccessToken,
|
||||
request: {
|
||||
signal: AbortSignal.timeout(30000)
|
||||
}
|
||||
});
|
||||
|
||||
const data = await retryWithBackoff(async () => {
|
||||
return octokit.graphql
|
||||
.paginate<{
|
||||
organization: {
|
||||
teams: {
|
||||
totalCount: number;
|
||||
edges: {
|
||||
node: {
|
||||
name: string;
|
||||
description: string;
|
||||
members: {
|
||||
edges: {
|
||||
node: {
|
||||
login: string;
|
||||
};
|
||||
}[];
|
||||
};
|
||||
};
|
||||
}[];
|
||||
};
|
||||
};
|
||||
}>(
|
||||
`
|
||||
query orgTeams($cursor: String, $org: String!) {
|
||||
organization(login: $org) {
|
||||
teams(first: 100, after: $cursor) {
|
||||
totalCount
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
description
|
||||
members(first: 100) {
|
||||
edges {
|
||||
node {
|
||||
login
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
{
|
||||
org: config.githubOrgName
|
||||
}
|
||||
)
|
||||
.catch((err) => {
|
||||
logger.error(err, "GitHub GraphQL error for batched team sync");
|
||||
|
||||
const gitHubError = err as GitHubApiError;
|
||||
const statusCode = gitHubError.status || gitHubError.response?.status;
|
||||
if (statusCode) {
|
||||
if (statusCode === 401) {
|
||||
throw new BadRequestError({
|
||||
message: "GitHub access token is invalid or expired. Please provide a new token."
|
||||
});
|
||||
}
|
||||
if (statusCode === 403) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"GitHub access token lacks required permissions for organization team sync. Required: 1) 'admin:org' scope, 2) Token owner must be organization owner or have team read permissions, 3) Organization settings must allow team visibility. Check token scopes and user role."
|
||||
});
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
throw new BadRequestError({
|
||||
message: `Organization ${config.githubOrgName} not found or access token does not have sufficient permissions to read it.`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if ((err as Error)?.message?.includes("Although you appear to have the correct authorization credential")) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Organization has restricted OAuth app access. Please check that: 1) Your organization has approved the Infisical OAuth application, 2) The token owner has sufficient organization permissions."
|
||||
});
|
||||
}
|
||||
throw new BadRequestError({ message: `GitHub GraphQL query failed: ${(err as Error)?.message}` });
|
||||
});
|
||||
});
|
||||
|
||||
const {
|
||||
organization: { teams }
|
||||
} = data;
|
||||
|
||||
const userTeamMap = new Map<string, string[]>();
|
||||
const allGithubUsernamesInTeams = new Set<string>();
|
||||
|
||||
teams?.edges?.forEach((teamEdge) => {
|
||||
const teamName = teamEdge.node.name.toLowerCase();
|
||||
|
||||
teamEdge.node.members.edges.forEach((memberEdge) => {
|
||||
const username = memberEdge.node.login.toLowerCase();
|
||||
allGithubUsernamesInTeams.add(username);
|
||||
|
||||
if (!userTeamMap.has(username)) {
|
||||
userTeamMap.set(username, []);
|
||||
}
|
||||
userTeamMap.get(username)!.push(teamName);
|
||||
});
|
||||
});
|
||||
|
||||
const allGithubTeamNames = Array.from(new Set(teams?.edges?.map((edge) => edge.node.name.toLowerCase()) || []));
|
||||
|
||||
const existingTeamsOnInfisical = await groupDAL.find({
|
||||
orgId: orgPermission.orgId,
|
||||
$in: { name: allGithubTeamNames }
|
||||
});
|
||||
const existingTeamsMap = groupBy(existingTeamsOnInfisical, (i) => i.name);
|
||||
|
||||
const teamsToCreate = allGithubTeamNames.filter((teamName) => !(teamName in existingTeamsMap));
|
||||
const createdTeams = new Set<string>();
|
||||
const updatedTeams = new Set<string>();
|
||||
const totalRemovedMemberships = 0;
|
||||
|
||||
await groupDAL.transaction(async (tx) => {
|
||||
if (teamsToCreate.length > 0) {
|
||||
const newGroups = await groupDAL.insertMany(
|
||||
teamsToCreate.map((teamName) => ({
|
||||
name: teamName,
|
||||
role: OrgMembershipRole.Member,
|
||||
slug: teamName,
|
||||
orgId: orgPermission.orgId
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
||||
newGroups.forEach((group) => {
|
||||
if (!existingTeamsMap[group.name]) {
|
||||
existingTeamsMap[group.name] = [];
|
||||
}
|
||||
existingTeamsMap[group.name].push(group);
|
||||
createdTeams.add(group.name);
|
||||
});
|
||||
}
|
||||
|
||||
const allTeams = [...Object.values(existingTeamsMap).flat()];
|
||||
|
||||
for (const team of allTeams) {
|
||||
const teamName = team.name.toLowerCase();
|
||||
|
||||
const currentMemberships = (await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(
|
||||
team.id,
|
||||
orgPermission.orgId
|
||||
)) as GroupMembership[];
|
||||
|
||||
const expectedUserIds = new Set<string>();
|
||||
teams?.edges?.forEach((teamEdge) => {
|
||||
if (teamEdge.node.name.toLowerCase() === teamName) {
|
||||
teamEdge.node.members.edges.forEach((memberEdge) => {
|
||||
const githubUsername = memberEdge.node.login.toLowerCase();
|
||||
|
||||
const matchingMember = activeMembers.find((member) => {
|
||||
const email = member.user?.email || member.inviteEmail;
|
||||
if (!email) return false;
|
||||
|
||||
const emailPrefix = email.split("@")[0].toLowerCase();
|
||||
const emailDomain = email.split("@")[1].toLowerCase();
|
||||
|
||||
if (emailPrefix === githubUsername) {
|
||||
return true;
|
||||
}
|
||||
const domainName = emailDomain.split(".")[0];
|
||||
if (githubUsername.endsWith(domainName) && githubUsername.length > domainName.length) {
|
||||
const baseUsername = githubUsername.slice(0, -domainName.length);
|
||||
if (emailPrefix === baseUsername) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
const emailSplitRegex = new RE2(/[._-]/);
|
||||
const emailParts = emailPrefix.split(emailSplitRegex);
|
||||
const longestEmailPart = emailParts.reduce((a, b) => (a.length > b.length ? a : b), "");
|
||||
if (longestEmailPart.length >= 4 && githubUsername.includes(longestEmailPart)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
if (matchingMember?.user?.id) {
|
||||
expectedUserIds.add(matchingMember.user.id);
|
||||
logger.info(
|
||||
`Matched GitHub user ${githubUsername} to email ${matchingMember.user?.email || matchingMember.inviteEmail}`
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const currentUserIds = new Set<string>();
|
||||
currentMemberships.forEach((membership) => {
|
||||
const activeMember = activeMembers.find((am) => am.id === membership.orgMembershipId);
|
||||
if (activeMember?.user?.id) {
|
||||
currentUserIds.add(activeMember.user.id);
|
||||
}
|
||||
});
|
||||
|
||||
const usersToAdd = Array.from(expectedUserIds).filter((userId) => !currentUserIds.has(userId));
|
||||
|
||||
const membershipsToRemove = currentMemberships.filter((membership) => {
|
||||
const activeMember = activeMembers.find((am) => am.id === membership.orgMembershipId);
|
||||
return activeMember?.user?.id && !expectedUserIds.has(activeMember.user.id);
|
||||
});
|
||||
|
||||
if (usersToAdd.length > 0) {
|
||||
await userGroupMembershipDAL.insertMany(
|
||||
usersToAdd.map((userId) => ({
|
||||
userId,
|
||||
groupId: team.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
updatedTeams.add(teamName);
|
||||
}
|
||||
|
||||
if (membershipsToRemove.length > 0) {
|
||||
await userGroupMembershipDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: membershipsToRemove.map((m) => m.id)
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
updatedTeams.add(teamName);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const syncDuration = Date.now() - startTime;
|
||||
|
||||
logger.info(
|
||||
{
|
||||
orgId: orgPermission.orgId,
|
||||
createdTeams: createdTeams.size,
|
||||
syncDuration
|
||||
},
|
||||
"GitHub team sync completed"
|
||||
);
|
||||
|
||||
return {
|
||||
totalUsers: activeMembers.length,
|
||||
errors: syncErrors,
|
||||
createdTeams: Array.from(createdTeams),
|
||||
updatedTeams: Array.from(updatedTeams),
|
||||
removedMemberships: totalRemovedMemberships,
|
||||
syncDuration
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
@@ -349,6 +821,8 @@ export const githubOrgSyncServiceFactory = ({
|
||||
updateGithubOrgSync,
|
||||
deleteGithubOrgSync,
|
||||
getGithubOrgSync,
|
||||
syncUserGroups
|
||||
syncUserGroups,
|
||||
syncAllTeams,
|
||||
validateGithubToken
|
||||
};
|
||||
};
|
||||
|
@@ -21,3 +21,21 @@ export interface TDeleteGithubOrgSyncDTO {
|
||||
export interface TGetGithubOrgSyncDTO {
|
||||
orgPermission: OrgServiceActor;
|
||||
}
|
||||
|
||||
export interface TSyncAllTeamsDTO {
|
||||
orgPermission: OrgServiceActor;
|
||||
}
|
||||
|
||||
export interface TSyncResult {
|
||||
totalUsers: number;
|
||||
errors: string[];
|
||||
createdTeams: string[];
|
||||
updatedTeams: string[];
|
||||
removedMemberships: number;
|
||||
syncDuration: number;
|
||||
}
|
||||
|
||||
export interface TValidateGithubTokenDTO {
|
||||
orgPermission: OrgServiceActor;
|
||||
githubOrgAccessToken: string;
|
||||
}
|
||||
|
@@ -2,6 +2,7 @@ import { AbilityBuilder, createMongoAbility, MongoAbility } from "@casl/ability"
|
||||
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionAuditLogsActions,
|
||||
ProjectPermissionCertificateActions,
|
||||
ProjectPermissionCmekActions,
|
||||
ProjectPermissionCommitsActions,
|
||||
@@ -394,7 +395,7 @@ const buildMemberPermissionRules = () => {
|
||||
);
|
||||
|
||||
can([ProjectPermissionActions.Read], ProjectPermissionSub.Role);
|
||||
can([ProjectPermissionActions.Read], ProjectPermissionSub.AuditLogs);
|
||||
can([ProjectPermissionAuditLogsActions.Read], ProjectPermissionSub.AuditLogs);
|
||||
can([ProjectPermissionActions.Read], ProjectPermissionSub.IpAllowList);
|
||||
|
||||
// double check if all CRUD are needed for CA and Certificates
|
||||
@@ -502,7 +503,7 @@ const buildViewerPermissionRules = () => {
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
can(ProjectPermissionAuditLogsActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
|
||||
can(ProjectPermissionCertificateActions.Read, ProjectPermissionSub.Certificates);
|
||||
|
@@ -23,6 +23,10 @@ export enum OrgPermissionAppConnectionActions {
|
||||
Connect = "connect"
|
||||
}
|
||||
|
||||
export enum OrgPermissionAuditLogsActions {
|
||||
Read = "read"
|
||||
}
|
||||
|
||||
export enum OrgPermissionKmipActions {
|
||||
Proxy = "proxy",
|
||||
Setup = "setup"
|
||||
@@ -90,6 +94,7 @@ export enum OrgPermissionSubjects {
|
||||
Sso = "sso",
|
||||
Scim = "scim",
|
||||
GithubOrgSync = "github-org-sync",
|
||||
GithubOrgSyncManual = "github-org-sync-manual",
|
||||
Ldap = "ldap",
|
||||
Groups = "groups",
|
||||
Billing = "billing",
|
||||
@@ -119,13 +124,14 @@ export type OrgPermissionSet =
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Sso]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Scim]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.GithubOrgSync]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.GithubOrgSyncManual]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Ldap]
|
||||
| [OrgPermissionGroupActions, OrgPermissionSubjects.Groups]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
|
||||
| [OrgPermissionBillingActions, OrgPermissionSubjects.Billing]
|
||||
| [OrgPermissionIdentityActions, OrgPermissionSubjects.Identity]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
|
||||
| [OrgPermissionAuditLogsActions, OrgPermissionSubjects.AuditLogs]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
|
||||
| [OrgPermissionGatewayActions, OrgPermissionSubjects.Gateway]
|
||||
| [
|
||||
@@ -188,6 +194,10 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
|
||||
subject: z.literal(OrgPermissionSubjects.GithubOrgSync).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.GithubOrgSyncManual).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.Ldap).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
|
||||
@@ -214,7 +224,9 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.AuditLogs).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionAuditLogsActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.ProjectTemplates).describe("The entity this permission pertains to."),
|
||||
@@ -309,6 +321,11 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.GithubOrgSync);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.GithubOrgSync);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.GithubOrgSyncManual);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.GithubOrgSyncManual);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.GithubOrgSyncManual);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.GithubOrgSyncManual);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Ldap);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Ldap);
|
||||
@@ -340,10 +357,7 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Kms);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Kms);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.AuditLogs);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.AuditLogs);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.AuditLogs);
|
||||
can(OrgPermissionAuditLogsActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.ProjectTemplates);
|
||||
@@ -416,7 +430,7 @@ const buildMemberPermission = () => {
|
||||
can(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.Delete, OrgPermissionSubjects.Identity);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
can(OrgPermissionAuditLogsActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
|
||||
can(OrgPermissionAppConnectionActions.Connect, OrgPermissionSubjects.AppConnections);
|
||||
can(OrgPermissionGatewayActions.ListGateways, OrgPermissionSubjects.Gateway);
|
||||
|
@@ -164,6 +164,10 @@ export enum ProjectPermissionSecretEventActions {
|
||||
SubscribeImportMutations = "subscribe-on-import-mutations"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionAuditLogsActions {
|
||||
Read = "read"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSub {
|
||||
Role = "role",
|
||||
Member = "member",
|
||||
@@ -304,7 +308,7 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionGroupActions, ProjectPermissionSub.Groups]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Integrations]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Webhooks]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.AuditLogs]
|
||||
| [ProjectPermissionAuditLogsActions, ProjectPermissionSub.AuditLogs]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Environments]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.IpAllowList]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Settings]
|
||||
@@ -645,7 +649,7 @@ const GeneralPermissionSchema = [
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.AuditLogs).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionAuditLogsActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
|
@@ -13,7 +13,8 @@ export const PgSqlLock = {
|
||||
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`),
|
||||
CreateProject: (orgId: string) => pgAdvisoryLockHashText(`create-project:${orgId}`),
|
||||
CreateFolder: (envId: string, projectId: string) => pgAdvisoryLockHashText(`create-folder:${envId}-${projectId}`),
|
||||
SshInit: (projectId: string) => pgAdvisoryLockHashText(`ssh-bootstrap:${projectId}`)
|
||||
SshInit: (projectId: string) => pgAdvisoryLockHashText(`ssh-bootstrap:${projectId}`),
|
||||
IdentityLogin: (identityId: string, nonce: string) => pgAdvisoryLockHashText(`identity-login:${identityId}:${nonce}`)
|
||||
} as const;
|
||||
|
||||
// all the key prefixes used must be set here to avoid conflict
|
||||
@@ -40,6 +41,7 @@ export const KeyStorePrefixes = {
|
||||
SecretRotationLock: (rotationId: string) => `secret-rotation-v2-mutex-${rotationId}` as const,
|
||||
SecretScanningLock: (dataSourceId: string, resourceExternalId: string) =>
|
||||
`secret-scanning-v2-mutex-${dataSourceId}-${resourceExternalId}` as const,
|
||||
IdentityLockoutLock: (lockoutKey: string) => `identity-lockout-lock-${lockoutKey}` as const,
|
||||
CaOrderCertificateForSubscriberLock: (subscriberId: string) =>
|
||||
`ca-order-certificate-for-subscriber-lock-${subscriberId}` as const,
|
||||
SecretSyncLastRunTimestamp: (syncId: string) => `secret-sync-last-run-${syncId}` as const,
|
||||
|
@@ -166,7 +166,12 @@ export const UNIVERSAL_AUTH = {
|
||||
accessTokenNumUsesLimit:
|
||||
"The maximum number of times that an access token can be used; a value of 0 implies infinite number of uses.",
|
||||
accessTokenPeriod:
|
||||
"The period for an access token in seconds. This value will be referenced at renewal time. Default value is 0."
|
||||
"The period for an access token in seconds. This value will be referenced at renewal time. Default value is 0.",
|
||||
lockoutEnabled: "Whether the lockout feature is enabled.",
|
||||
lockoutThreshold: "The amount of times login must fail before locking the identity auth method.",
|
||||
lockoutDurationSeconds: "How long an identity auth method lockout lasts.",
|
||||
lockoutCounterResetSeconds:
|
||||
"How long to wait from the most recent failed login until resetting the lockout counter."
|
||||
},
|
||||
RETRIEVE: {
|
||||
identityId: "The ID of the identity to retrieve the auth method for."
|
||||
@@ -181,7 +186,12 @@ export const UNIVERSAL_AUTH = {
|
||||
accessTokenTTL: "The new lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
|
||||
accessTokenPeriod: "The new period for an access token in seconds."
|
||||
accessTokenPeriod: "The new period for an access token in seconds.",
|
||||
lockoutEnabled: "Whether the lockout feature is enabled.",
|
||||
lockoutThreshold: "The amount of times login must fail before locking the identity auth method.",
|
||||
lockoutDurationSeconds: "How long an identity auth method lockout lasts.",
|
||||
lockoutCounterResetSeconds:
|
||||
"How long to wait from the most recent failed login until resetting the lockout counter."
|
||||
},
|
||||
CREATE_CLIENT_SECRET: {
|
||||
identityId: "The ID of the identity to create a client secret for.",
|
||||
@@ -201,6 +211,9 @@ export const UNIVERSAL_AUTH = {
|
||||
identityId: "The ID of the identity to revoke the client secret from.",
|
||||
clientSecretId: "The ID of the client secret to revoke."
|
||||
},
|
||||
CLEAR_CLIENT_LOCKOUTS: {
|
||||
identityId: "The ID of the identity to clear the client lockouts from."
|
||||
},
|
||||
RENEW_ACCESS_TOKEN: {
|
||||
accessToken: "The access token to renew."
|
||||
},
|
||||
@@ -2148,7 +2161,9 @@ export const CertificateAuthorities = {
|
||||
directoryUrl: `The directory URL for the ACME Certificate Authority.`,
|
||||
accountEmail: `The email address for the ACME Certificate Authority.`,
|
||||
provider: `The DNS provider for the ACME Certificate Authority.`,
|
||||
hostedZoneId: `The hosted zone ID for the ACME Certificate Authority.`
|
||||
hostedZoneId: `The hosted zone ID for the ACME Certificate Authority.`,
|
||||
eabKid: `The External Account Binding (EAB) Key ID for the ACME Certificate Authority. Required if the ACME provider uses EAB.`,
|
||||
eabHmacKey: `The External Account Binding (EAB) HMAC key for the ACME Certificate Authority. Required if the ACME provider uses EAB.`
|
||||
},
|
||||
INTERNAL: {
|
||||
type: "The type of CA to create.",
|
||||
@@ -2312,6 +2327,15 @@ export const AppConnections = {
|
||||
OKTA: {
|
||||
instanceUrl: "The URL used to access your Okta organization.",
|
||||
apiToken: "The API token used to authenticate with Okta."
|
||||
},
|
||||
AZURE_ADCS: {
|
||||
adcsUrl:
|
||||
"The HTTPS URL of the Azure ADCS instance to connect with (e.g., 'https://adcs.yourdomain.com/certsrv').",
|
||||
username: "The username used to access Azure ADCS (format: 'DOMAIN\\username' or 'username@domain.com').",
|
||||
password: "The password used to access Azure ADCS.",
|
||||
sslRejectUnauthorized:
|
||||
"Whether or not to reject unauthorized SSL certificates (true/false). Set to false only in test environments with self-signed certificates.",
|
||||
sslCertificate: "The SSL certificate (PEM format) to use for secure connection."
|
||||
}
|
||||
}
|
||||
};
|
||||
|
121
backend/src/lib/ip/index.test.ts
Normal file
121
backend/src/lib/ip/index.test.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import { extractIPDetails, IPType, isValidCidr, isValidIp, isValidIpOrCidr } from "./index";
|
||||
|
||||
describe("IP Validation", () => {
|
||||
describe("isValidIp", () => {
|
||||
test("should validate IPv4 addresses with ports", () => {
|
||||
expect(isValidIp("192.168.1.1:8080")).toBe(true);
|
||||
expect(isValidIp("10.0.0.1:1234")).toBe(true);
|
||||
expect(isValidIp("172.16.0.1:80")).toBe(true);
|
||||
});
|
||||
|
||||
test("should validate IPv6 addresses with ports", () => {
|
||||
expect(isValidIp("[2001:db8::1]:8080")).toBe(true);
|
||||
expect(isValidIp("[fe80::1ff:fe23:4567:890a]:1234")).toBe(true);
|
||||
expect(isValidIp("[::1]:80")).toBe(true);
|
||||
});
|
||||
|
||||
test("should validate regular IPv4 addresses", () => {
|
||||
expect(isValidIp("192.168.1.1")).toBe(true);
|
||||
expect(isValidIp("10.0.0.1")).toBe(true);
|
||||
expect(isValidIp("172.16.0.1")).toBe(true);
|
||||
});
|
||||
|
||||
test("should validate regular IPv6 addresses", () => {
|
||||
expect(isValidIp("2001:db8::1")).toBe(true);
|
||||
expect(isValidIp("fe80::1ff:fe23:4567:890a")).toBe(true);
|
||||
expect(isValidIp("::1")).toBe(true);
|
||||
});
|
||||
|
||||
test("should reject invalid IP addresses", () => {
|
||||
expect(isValidIp("256.256.256.256")).toBe(false);
|
||||
expect(isValidIp("192.168.1")).toBe(false);
|
||||
expect(isValidIp("192.168.1.1.1")).toBe(false);
|
||||
expect(isValidIp("2001:db8::1::1")).toBe(false);
|
||||
expect(isValidIp("invalid")).toBe(false);
|
||||
});
|
||||
|
||||
test("should reject malformed IP addresses with ports", () => {
|
||||
expect(isValidIp("192.168.1.1:")).toBe(false);
|
||||
expect(isValidIp("192.168.1.1:abc")).toBe(false);
|
||||
expect(isValidIp("[2001:db8::1]")).toBe(false);
|
||||
expect(isValidIp("[2001:db8::1]:")).toBe(false);
|
||||
expect(isValidIp("[2001:db8::1]:abc")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isValidCidr", () => {
|
||||
test("should validate IPv4 CIDR blocks", () => {
|
||||
expect(isValidCidr("192.168.1.0/24")).toBe(true);
|
||||
expect(isValidCidr("10.0.0.0/8")).toBe(true);
|
||||
expect(isValidCidr("172.16.0.0/16")).toBe(true);
|
||||
});
|
||||
|
||||
test("should validate IPv6 CIDR blocks", () => {
|
||||
expect(isValidCidr("2001:db8::/32")).toBe(true);
|
||||
expect(isValidCidr("fe80::/10")).toBe(true);
|
||||
expect(isValidCidr("::/0")).toBe(true);
|
||||
});
|
||||
|
||||
test("should reject invalid CIDR blocks", () => {
|
||||
expect(isValidCidr("192.168.1.0/33")).toBe(false);
|
||||
expect(isValidCidr("2001:db8::/129")).toBe(false);
|
||||
expect(isValidCidr("192.168.1.0/abc")).toBe(false);
|
||||
expect(isValidCidr("invalid/24")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isValidIpOrCidr", () => {
|
||||
test("should validate both IP addresses and CIDR blocks", () => {
|
||||
expect(isValidIpOrCidr("192.168.1.1")).toBe(true);
|
||||
expect(isValidIpOrCidr("2001:db8::1")).toBe(true);
|
||||
expect(isValidIpOrCidr("192.168.1.0/24")).toBe(true);
|
||||
expect(isValidIpOrCidr("2001:db8::/32")).toBe(true);
|
||||
});
|
||||
|
||||
test("should reject invalid inputs", () => {
|
||||
expect(isValidIpOrCidr("invalid")).toBe(false);
|
||||
expect(isValidIpOrCidr("192.168.1.0/33")).toBe(false);
|
||||
expect(isValidIpOrCidr("2001:db8::/129")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("extractIPDetails", () => {
|
||||
test("should extract IPv4 address details", () => {
|
||||
const result = extractIPDetails("192.168.1.1");
|
||||
expect(result).toEqual({
|
||||
ipAddress: "192.168.1.1",
|
||||
type: IPType.IPV4
|
||||
});
|
||||
});
|
||||
|
||||
test("should extract IPv6 address details", () => {
|
||||
const result = extractIPDetails("2001:db8::1");
|
||||
expect(result).toEqual({
|
||||
ipAddress: "2001:db8::1",
|
||||
type: IPType.IPV6
|
||||
});
|
||||
});
|
||||
|
||||
test("should extract IPv4 CIDR details", () => {
|
||||
const result = extractIPDetails("192.168.1.0/24");
|
||||
expect(result).toEqual({
|
||||
ipAddress: "192.168.1.0",
|
||||
type: IPType.IPV4,
|
||||
prefix: 24
|
||||
});
|
||||
});
|
||||
|
||||
test("should extract IPv6 CIDR details", () => {
|
||||
const result = extractIPDetails("2001:db8::/32");
|
||||
expect(result).toEqual({
|
||||
ipAddress: "2001:db8::",
|
||||
type: IPType.IPV6,
|
||||
prefix: 32
|
||||
});
|
||||
});
|
||||
|
||||
test("should throw error for invalid IP", () => {
|
||||
expect(() => extractIPDetails("invalid")).toThrow("Failed to extract IP details");
|
||||
});
|
||||
});
|
||||
});
|
@@ -1,5 +1,7 @@
|
||||
import net from "node:net";
|
||||
|
||||
import RE2 from "re2";
|
||||
|
||||
import { ForbiddenRequestError } from "../errors";
|
||||
|
||||
export enum IPType {
|
||||
@@ -7,25 +9,55 @@ export enum IPType {
|
||||
IPV6 = "ipv6"
|
||||
}
|
||||
|
||||
const PORT_REGEX = new RE2(/^\d+$/);
|
||||
|
||||
/**
|
||||
* Strips port from IP address if present.
|
||||
* Handles both IPv4 (e.g. 1.2.3.4:1234) and IPv6 (e.g. [2001:db8::1]:8080) formats.
|
||||
* Returns the IP address without port and a boolean indicating if a port was present.
|
||||
*/
|
||||
const stripPort = (ip: string): { ipAddress: string } => {
|
||||
// Handle IPv6 with port (e.g. [2001:db8::1]:8080)
|
||||
if (ip.startsWith("[") && ip.includes("]:")) {
|
||||
const endBracketIndex = ip.indexOf("]");
|
||||
if (endBracketIndex === -1) return { ipAddress: ip };
|
||||
const ipPart = ip.slice(1, endBracketIndex);
|
||||
const portPart = ip.slice(endBracketIndex + 2);
|
||||
if (!portPart || !PORT_REGEX.test(portPart)) return { ipAddress: ip };
|
||||
return { ipAddress: ipPart };
|
||||
}
|
||||
|
||||
// Handle IPv4 with port (e.g. 1.2.3.4:1234)
|
||||
if (ip.includes(":")) {
|
||||
const [ipPart, portPart] = ip.split(":");
|
||||
if (!portPart || !PORT_REGEX.test(portPart)) return { ipAddress: ip };
|
||||
return { ipAddress: ipPart };
|
||||
}
|
||||
|
||||
return { ipAddress: ip };
|
||||
};
|
||||
|
||||
/**
|
||||
* Return details of IP [ip]:
|
||||
* - If [ip] is a specific IP address then return the IPv4/IPv6 address
|
||||
* - If [ip] is a subnet then return the network IPv4/IPv6 address and prefix
|
||||
*/
|
||||
export const extractIPDetails = (ip: string) => {
|
||||
if (net.isIPv4(ip))
|
||||
const { ipAddress } = stripPort(ip);
|
||||
|
||||
if (net.isIPv4(ipAddress))
|
||||
return {
|
||||
ipAddress: ip,
|
||||
ipAddress,
|
||||
type: IPType.IPV4
|
||||
};
|
||||
|
||||
if (net.isIPv6(ip))
|
||||
if (net.isIPv6(ipAddress))
|
||||
return {
|
||||
ipAddress: ip,
|
||||
ipAddress,
|
||||
type: IPType.IPV6
|
||||
};
|
||||
|
||||
const [ipNet, prefix] = ip.split("/");
|
||||
const [ipNet, prefix] = ipAddress.split("/");
|
||||
|
||||
let type;
|
||||
switch (net.isIP(ipNet)) {
|
||||
@@ -57,7 +89,8 @@ export const extractIPDetails = (ip: string) => {
|
||||
*
|
||||
*/
|
||||
export const isValidCidr = (cidr: string): boolean => {
|
||||
const [ip, prefix] = cidr.split("/");
|
||||
const { ipAddress } = stripPort(cidr);
|
||||
const [ip, prefix] = ipAddress.split("/");
|
||||
|
||||
const prefixNum = parseInt(prefix, 10);
|
||||
|
||||
@@ -90,13 +123,15 @@ export const isValidCidr = (cidr: string): boolean => {
|
||||
*
|
||||
*/
|
||||
export const isValidIpOrCidr = (ip: string): boolean => {
|
||||
const { ipAddress } = stripPort(ip);
|
||||
|
||||
// if the string contains a slash, treat it as a CIDR block
|
||||
if (ip.includes("/")) {
|
||||
return isValidCidr(ip);
|
||||
if (ipAddress.includes("/")) {
|
||||
return isValidCidr(ipAddress);
|
||||
}
|
||||
|
||||
// otherwise, treat it as a standalone IP address
|
||||
if (net.isIPv4(ip) || net.isIPv6(ip)) {
|
||||
if (net.isIPv4(ipAddress) || net.isIPv6(ipAddress)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -104,7 +139,8 @@ export const isValidIpOrCidr = (ip: string): boolean => {
|
||||
};
|
||||
|
||||
export const isValidIp = (ip: string) => {
|
||||
return net.isIPv4(ip) || net.isIPv6(ip);
|
||||
const { ipAddress } = stripPort(ip);
|
||||
return net.isIPv4(ipAddress) || net.isIPv6(ipAddress);
|
||||
};
|
||||
|
||||
export type TIp = {
|
||||
@@ -112,6 +148,7 @@ export type TIp = {
|
||||
type: IPType;
|
||||
prefix: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates the IP address [ipAddress] against the trusted IPs [trustedIps].
|
||||
*/
|
||||
@@ -126,8 +163,9 @@ export const checkIPAgainstBlocklist = ({ ipAddress, trustedIps }: { ipAddress:
|
||||
}
|
||||
}
|
||||
|
||||
const { type } = extractIPDetails(ipAddress);
|
||||
const check = blockList.check(ipAddress, type);
|
||||
const { type, ipAddress: cleanIpAddress } = extractIPDetails(ipAddress);
|
||||
|
||||
const check = blockList.check(cleanIpAddress, type);
|
||||
|
||||
if (!check)
|
||||
throw new ForbiddenRequestError({
|
||||
|
43
backend/src/lib/retry/index.ts
Normal file
43
backend/src/lib/retry/index.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
interface GitHubApiError extends Error {
|
||||
status?: number;
|
||||
response?: {
|
||||
status?: number;
|
||||
headers?: {
|
||||
"x-ratelimit-reset"?: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
const delay = (ms: number) =>
|
||||
new Promise<void>((resolve) => {
|
||||
setTimeout(() => resolve(), ms);
|
||||
});
|
||||
|
||||
export const retryWithBackoff = async <T>(fn: () => Promise<T>, maxRetries = 3, baseDelay = 1000): Promise<T> => {
|
||||
let lastError: Error;
|
||||
|
||||
for (let attempt = 0; attempt <= maxRetries; attempt += 1) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (error) {
|
||||
lastError = error as Error;
|
||||
const gitHubError = error as GitHubApiError;
|
||||
const statusCode = gitHubError.status || gitHubError.response?.status;
|
||||
if (statusCode === 403) {
|
||||
const rateLimitReset = gitHubError.response?.headers?.["x-ratelimit-reset"];
|
||||
if (rateLimitReset) {
|
||||
const resetTime = parseInt(rateLimitReset, 10) * 1000;
|
||||
const waitTime = Math.max(resetTime - Date.now(), baseDelay);
|
||||
await delay(Math.min(waitTime, 60000));
|
||||
} else {
|
||||
await delay(baseDelay * 2 ** attempt);
|
||||
}
|
||||
} else if (attempt < maxRetries) {
|
||||
await delay(baseDelay * 2 ** attempt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError!;
|
||||
};
|
@@ -680,7 +680,8 @@ export const registerRoutes = async (
|
||||
kmsService,
|
||||
permissionService,
|
||||
groupDAL,
|
||||
userGroupMembershipDAL
|
||||
userGroupMembershipDAL,
|
||||
orgMembershipDAL
|
||||
});
|
||||
|
||||
const ldapService = ldapConfigServiceFactory({
|
||||
@@ -1456,7 +1457,8 @@ export const registerRoutes = async (
|
||||
identityOrgMembershipDAL,
|
||||
identityProjectDAL,
|
||||
licenseService,
|
||||
identityMetadataDAL
|
||||
identityMetadataDAL,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const identityAuthTemplateService = identityAuthTemplateServiceFactory({
|
||||
@@ -1510,7 +1512,8 @@ export const registerRoutes = async (
|
||||
identityAccessTokenDAL,
|
||||
identityUaClientSecretDAL,
|
||||
identityUaDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const identityKubernetesAuthService = identityKubernetesAuthServiceFactory({
|
||||
@@ -1744,7 +1747,8 @@ export const registerRoutes = async (
|
||||
const migrationService = externalMigrationServiceFactory({
|
||||
externalMigrationQueue,
|
||||
userDAL,
|
||||
permissionService
|
||||
permissionService,
|
||||
gatewayService
|
||||
});
|
||||
|
||||
const externalGroupOrgRoleMappingService = externalGroupOrgRoleMappingServiceFactory({
|
||||
|
@@ -15,6 +15,10 @@ import {
|
||||
} from "@app/services/app-connection/1password";
|
||||
import { Auth0ConnectionListItemSchema, SanitizedAuth0ConnectionSchema } from "@app/services/app-connection/auth0";
|
||||
import { AwsConnectionListItemSchema, SanitizedAwsConnectionSchema } from "@app/services/app-connection/aws";
|
||||
import {
|
||||
AzureADCSConnectionListItemSchema,
|
||||
SanitizedAzureADCSConnectionSchema
|
||||
} from "@app/services/app-connection/azure-adcs/azure-adcs-connection-schemas";
|
||||
import {
|
||||
AzureAppConfigurationConnectionListItemSchema,
|
||||
SanitizedAzureAppConfigurationConnectionSchema
|
||||
@@ -150,7 +154,8 @@ const SanitizedAppConnectionSchema = z.union([
|
||||
...SanitizedSupabaseConnectionSchema.options,
|
||||
...SanitizedDigitalOceanConnectionSchema.options,
|
||||
...SanitizedNetlifyConnectionSchema.options,
|
||||
...SanitizedOktaConnectionSchema.options
|
||||
...SanitizedOktaConnectionSchema.options,
|
||||
...SanitizedAzureADCSConnectionSchema.options
|
||||
]);
|
||||
|
||||
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
@@ -190,7 +195,8 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
SupabaseConnectionListItemSchema,
|
||||
DigitalOceanConnectionListItemSchema,
|
||||
NetlifyConnectionListItemSchema,
|
||||
OktaConnectionListItemSchema
|
||||
OktaConnectionListItemSchema,
|
||||
AzureADCSConnectionListItemSchema
|
||||
]);
|
||||
|
||||
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
|
@@ -0,0 +1,18 @@
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateAzureADCSConnectionSchema,
|
||||
SanitizedAzureADCSConnectionSchema,
|
||||
UpdateAzureADCSConnectionSchema
|
||||
} from "@app/services/app-connection/azure-adcs";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerAzureADCSConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.AzureADCS,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedAzureADCSConnectionSchema,
|
||||
createSchema: CreateAzureADCSConnectionSchema,
|
||||
updateSchema: UpdateAzureADCSConnectionSchema
|
||||
});
|
||||
};
|
@@ -5,6 +5,7 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
|
||||
import { registerOnePassConnectionRouter } from "./1password-connection-router";
|
||||
import { registerAuth0ConnectionRouter } from "./auth0-connection-router";
|
||||
import { registerAwsConnectionRouter } from "./aws-connection-router";
|
||||
import { registerAzureADCSConnectionRouter } from "./azure-adcs-connection-router";
|
||||
import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-configuration-connection-router";
|
||||
import { registerAzureClientSecretsConnectionRouter } from "./azure-client-secrets-connection-router";
|
||||
import { registerAzureDevOpsConnectionRouter } from "./azure-devops-connection-router";
|
||||
@@ -50,6 +51,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
||||
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
|
||||
[AppConnection.AzureClientSecrets]: registerAzureClientSecretsConnectionRouter,
|
||||
[AppConnection.AzureDevOps]: registerAzureDevOpsConnectionRouter,
|
||||
[AppConnection.AzureADCS]: registerAzureADCSConnectionRouter,
|
||||
[AppConnection.Databricks]: registerDatabricksConnectionRouter,
|
||||
[AppConnection.Humanitec]: registerHumanitecConnectionRouter,
|
||||
[AppConnection.TerraformCloud]: registerTerraformCloudConnectionRouter,
|
||||
|
@@ -0,0 +1,78 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import {
|
||||
AzureAdCsCertificateAuthoritySchema,
|
||||
CreateAzureAdCsCertificateAuthoritySchema,
|
||||
UpdateAzureAdCsCertificateAuthoritySchema
|
||||
} from "@app/services/certificate-authority/azure-ad-cs/azure-ad-cs-certificate-authority-schemas";
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
|
||||
import { registerCertificateAuthorityEndpoints } from "./certificate-authority-endpoints";
|
||||
|
||||
export const registerAzureAdCsCertificateAuthorityRouter = async (server: FastifyZodProvider) => {
|
||||
registerCertificateAuthorityEndpoints({
|
||||
caType: CaType.AZURE_AD_CS,
|
||||
server,
|
||||
responseSchema: AzureAdCsCertificateAuthoritySchema,
|
||||
createSchema: CreateAzureAdCsCertificateAuthoritySchema,
|
||||
updateSchema: UpdateAzureAdCsCertificateAuthoritySchema
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:caId/templates",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
description: "Get available certificate templates from Azure AD CS CA",
|
||||
params: z.object({
|
||||
caId: z.string().describe("Azure AD CS CA ID")
|
||||
}),
|
||||
querystring: z.object({
|
||||
projectId: z.string().describe("Project ID")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
templates: z.array(
|
||||
z.object({
|
||||
id: z.string().describe("Template identifier"),
|
||||
name: z.string().describe("Template display name"),
|
||||
description: z.string().optional().describe("Template description")
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const templates = await server.services.certificateAuthority.getAzureAdcsTemplates({
|
||||
caId: req.params.caId,
|
||||
projectId: req.query.projectId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_AZURE_AD_TEMPLATES,
|
||||
metadata: {
|
||||
caId: req.params.caId,
|
||||
amount: templates.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { templates };
|
||||
}
|
||||
});
|
||||
};
|
@@ -1,6 +1,7 @@
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
|
||||
import { registerAcmeCertificateAuthorityRouter } from "./acme-certificate-authority-router";
|
||||
import { registerAzureAdCsCertificateAuthorityRouter } from "./azure-ad-cs-certificate-authority-router";
|
||||
import { registerInternalCertificateAuthorityRouter } from "./internal-certificate-authority-router";
|
||||
|
||||
export * from "./internal-certificate-authority-router";
|
||||
@@ -8,5 +9,6 @@ export * from "./internal-certificate-authority-router";
|
||||
export const CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP: Record<CaType, (server: FastifyZodProvider) => Promise<void>> =
|
||||
{
|
||||
[CaType.INTERNAL]: registerInternalCertificateAuthorityRouter,
|
||||
[CaType.ACME]: registerAcmeCertificateAuthorityRouter
|
||||
[CaType.ACME]: registerAcmeCertificateAuthorityRouter,
|
||||
[CaType.AZURE_AD_CS]: registerAzureAdCsCertificateAuthorityRouter
|
||||
};
|
||||
|
@@ -703,6 +703,9 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
// prevent older projects from accessing endpoint
|
||||
if (!shouldUseSecretV2Bridge) throw new BadRequestError({ message: "Project version not supported" });
|
||||
|
||||
// verify folder exists and user has project permission
|
||||
await server.services.folder.getFolderByPath({ projectId, environment, secretPath }, req.permission);
|
||||
|
||||
const tags = req.query.tags?.split(",") ?? [];
|
||||
|
||||
let remainingLimit = limit;
|
||||
|
@@ -250,7 +250,8 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
||||
description: true
|
||||
}).optional(),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, hasDeleteProtection: true }).extend({
|
||||
authMethods: z.array(z.string())
|
||||
authMethods: z.array(z.string()),
|
||||
activeLockoutAuthMethods: z.array(z.string())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@@ -137,7 +137,21 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
.min(0)
|
||||
.default(0)
|
||||
.describe(UNIVERSAL_AUTH.ATTACH.accessTokenNumUsesLimit),
|
||||
accessTokenPeriod: z.number().int().min(0).default(0).describe(UNIVERSAL_AUTH.ATTACH.accessTokenPeriod)
|
||||
accessTokenPeriod: z.number().int().min(0).default(0).describe(UNIVERSAL_AUTH.ATTACH.accessTokenPeriod),
|
||||
lockoutEnabled: z.boolean().default(true).describe(UNIVERSAL_AUTH.ATTACH.lockoutEnabled),
|
||||
lockoutThreshold: z.number().min(1).max(30).default(3).describe(UNIVERSAL_AUTH.ATTACH.lockoutThreshold),
|
||||
lockoutDurationSeconds: z
|
||||
.number()
|
||||
.min(30)
|
||||
.max(86400)
|
||||
.default(300)
|
||||
.describe(UNIVERSAL_AUTH.ATTACH.lockoutDurationSeconds),
|
||||
lockoutCounterResetSeconds: z
|
||||
.number()
|
||||
.min(5)
|
||||
.max(3600)
|
||||
.default(30)
|
||||
.describe(UNIVERSAL_AUTH.ATTACH.lockoutCounterResetSeconds)
|
||||
})
|
||||
.refine(
|
||||
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
|
||||
@@ -171,7 +185,11 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
accessTokenMaxTTL: identityUniversalAuth.accessTokenMaxTTL,
|
||||
accessTokenTrustedIps: identityUniversalAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
|
||||
clientSecretTrustedIps: identityUniversalAuth.clientSecretTrustedIps as TIdentityTrustedIp[],
|
||||
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit
|
||||
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit,
|
||||
lockoutEnabled: identityUniversalAuth.lockoutEnabled,
|
||||
lockoutThreshold: identityUniversalAuth.lockoutThreshold,
|
||||
lockoutDurationSeconds: identityUniversalAuth.lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds: identityUniversalAuth.lockoutCounterResetSeconds
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -243,7 +261,21 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
.min(0)
|
||||
.max(315360000)
|
||||
.optional()
|
||||
.describe(UNIVERSAL_AUTH.UPDATE.accessTokenPeriod)
|
||||
.describe(UNIVERSAL_AUTH.UPDATE.accessTokenPeriod),
|
||||
lockoutEnabled: z.boolean().optional().describe(UNIVERSAL_AUTH.UPDATE.lockoutEnabled),
|
||||
lockoutThreshold: z.number().min(1).max(30).optional().describe(UNIVERSAL_AUTH.UPDATE.lockoutThreshold),
|
||||
lockoutDurationSeconds: z
|
||||
.number()
|
||||
.min(30)
|
||||
.max(86400)
|
||||
.optional()
|
||||
.describe(UNIVERSAL_AUTH.UPDATE.lockoutDurationSeconds),
|
||||
lockoutCounterResetSeconds: z
|
||||
.number()
|
||||
.min(5)
|
||||
.max(3600)
|
||||
.optional()
|
||||
.describe(UNIVERSAL_AUTH.UPDATE.lockoutCounterResetSeconds)
|
||||
})
|
||||
.refine(
|
||||
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
|
||||
@@ -276,7 +308,11 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
accessTokenMaxTTL: identityUniversalAuth.accessTokenMaxTTL,
|
||||
accessTokenTrustedIps: identityUniversalAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
|
||||
clientSecretTrustedIps: identityUniversalAuth.clientSecretTrustedIps as TIdentityTrustedIp[],
|
||||
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit
|
||||
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit,
|
||||
lockoutEnabled: identityUniversalAuth.lockoutEnabled,
|
||||
lockoutThreshold: identityUniversalAuth.lockoutThreshold,
|
||||
lockoutDurationSeconds: identityUniversalAuth.lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds: identityUniversalAuth.lockoutCounterResetSeconds
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -594,4 +630,53 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
return { clientSecretData };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/universal-auth/identities/:identityId/clear-lockouts",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.UniversalAuth],
|
||||
description: "Clear Universal Auth Lockouts for identity",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
identityId: z.string().describe(UNIVERSAL_AUTH.CLEAR_CLIENT_LOCKOUTS.identityId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
deleted: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const clearLockoutsData = await server.services.identityUa.clearUniversalAuthLockouts({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
identityId: req.params.identityId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: clearLockoutsData.orgId,
|
||||
event: {
|
||||
type: EventType.CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS,
|
||||
metadata: {
|
||||
identityId: clearLockoutsData.identityId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return clearLockoutsData;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import RE2 from "re2";
|
||||
import { z } from "zod";
|
||||
|
||||
import { CertificatesSchema } from "@app/db/schemas";
|
||||
@@ -112,7 +113,88 @@ export const registerPkiSubscriberRouter = async (server: FastifyZodProvider) =>
|
||||
.transform((arr) => Array.from(new Set(arr)))
|
||||
.describe(PKI_SUBSCRIBERS.CREATE.extendedKeyUsages),
|
||||
enableAutoRenewal: z.boolean().optional().describe(PKI_SUBSCRIBERS.CREATE.enableAutoRenewal),
|
||||
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.CREATE.autoRenewalPeriodInDays)
|
||||
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.CREATE.autoRenewalPeriodInDays),
|
||||
properties: z
|
||||
.object({
|
||||
azureTemplateType: z.string().optional().describe("Azure ADCS Certificate Template Type"),
|
||||
organization: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Organization cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Organization contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Organization cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Organization (O) - Maximum 64 characters, no special DN characters"),
|
||||
organizationalUnit: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Organizational Unit cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Organizational Unit contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Organizational Unit cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Organizational Unit (OU) - Maximum 64 characters, no special DN characters"),
|
||||
country: z
|
||||
.string()
|
||||
.trim()
|
||||
.length(2, "Country must be exactly 2 characters")
|
||||
.regex(new RE2("^[A-Z]{2}$"), "Country must be exactly 2 uppercase letters")
|
||||
.optional()
|
||||
.describe("Country (C) - Two uppercase letter country code (e.g., US, CA, GB)"),
|
||||
state: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "State cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'State contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"State cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("State/Province (ST) - Maximum 64 characters, no special DN characters"),
|
||||
locality: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Locality cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Locality contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Locality cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Locality (L) - Maximum 64 characters, no special DN characters"),
|
||||
emailAddress: z
|
||||
.string()
|
||||
.trim()
|
||||
.email("Email Address must be a valid email format")
|
||||
.min(6, "Email Address must be at least 6 characters")
|
||||
.max(64, "Email Address cannot exceed 64 characters")
|
||||
.optional()
|
||||
.describe("Email Address - Valid email format between 6 and 64 characters")
|
||||
})
|
||||
.optional()
|
||||
.describe("Additional subscriber properties and subject fields")
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedPkiSubscriber
|
||||
@@ -199,7 +281,88 @@ export const registerPkiSubscriberRouter = async (server: FastifyZodProvider) =>
|
||||
.optional()
|
||||
.describe(PKI_SUBSCRIBERS.UPDATE.extendedKeyUsages),
|
||||
enableAutoRenewal: z.boolean().optional().describe(PKI_SUBSCRIBERS.UPDATE.enableAutoRenewal),
|
||||
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.UPDATE.autoRenewalPeriodInDays)
|
||||
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.UPDATE.autoRenewalPeriodInDays),
|
||||
properties: z
|
||||
.object({
|
||||
azureTemplateType: z.string().optional().describe("Azure ADCS Certificate Template Type"),
|
||||
organization: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Organization cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Organization contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Organization cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Organization (O) - Maximum 64 characters, no special DN characters"),
|
||||
organizationalUnit: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Organizational Unit cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Organizational Unit contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Organizational Unit cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Organizational Unit (OU) - Maximum 64 characters, no special DN characters"),
|
||||
country: z
|
||||
.string()
|
||||
.trim()
|
||||
.length(2, "Country must be exactly 2 characters")
|
||||
.regex(new RE2("^[A-Z]{2}$"), "Country must be exactly 2 uppercase letters")
|
||||
.optional()
|
||||
.describe("Country (C) - Two uppercase letter country code (e.g., US, CA, GB)"),
|
||||
state: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "State cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'State contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"State cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("State/Province (ST) - Maximum 64 characters, no special DN characters"),
|
||||
locality: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Locality cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Locality contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Locality cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Locality (L) - Maximum 64 characters, no special DN characters"),
|
||||
emailAddress: z
|
||||
.string()
|
||||
.trim()
|
||||
.email("Email Address must be a valid email format")
|
||||
.min(6, "Email Address must be at least 6 characters")
|
||||
.max(64, "Email Address cannot exceed 64 characters")
|
||||
.optional()
|
||||
.describe("Email Address - Valid email format between 6 and 64 characters")
|
||||
})
|
||||
.optional()
|
||||
.describe("Additional subscriber properties and subject fields")
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedPkiSubscriber
|
||||
|
@@ -6,12 +6,14 @@ import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { AcmeCertificateAuthoritySchema } from "@app/services/certificate-authority/acme/acme-certificate-authority-schemas";
|
||||
import { AzureAdCsCertificateAuthoritySchema } from "@app/services/certificate-authority/azure-ad-cs/azure-ad-cs-certificate-authority-schemas";
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
import { InternalCertificateAuthoritySchema } from "@app/services/certificate-authority/internal/internal-certificate-authority-schemas";
|
||||
|
||||
const CertificateAuthoritySchema = z.discriminatedUnion("type", [
|
||||
InternalCertificateAuthoritySchema,
|
||||
AcmeCertificateAuthoritySchema
|
||||
AcmeCertificateAuthoritySchema,
|
||||
AzureAdCsCertificateAuthoritySchema
|
||||
]);
|
||||
|
||||
export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
@@ -52,19 +54,31 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
req.permission
|
||||
);
|
||||
|
||||
const azureAdCsCas = await server.services.certificateAuthority.listCertificateAuthoritiesByProjectId(
|
||||
{
|
||||
projectId: req.query.projectId,
|
||||
type: CaType.AZURE_AD_CS
|
||||
},
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_CAS,
|
||||
metadata: {
|
||||
caIds: [...(internalCas ?? []).map((ca) => ca.id), ...(acmeCas ?? []).map((ca) => ca.id)]
|
||||
caIds: [
|
||||
...(internalCas ?? []).map((ca) => ca.id),
|
||||
...(acmeCas ?? []).map((ca) => ca.id),
|
||||
...(azureAdCsCas ?? []).map((ca) => ca.id)
|
||||
]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
certificateAuthorities: [...(internalCas ?? []), ...(acmeCas ?? [])]
|
||||
certificateAuthorities: [...(internalCas ?? []), ...(acmeCas ?? []), ...(azureAdCsCas ?? [])]
|
||||
};
|
||||
}
|
||||
});
|
||||
|
@@ -2,10 +2,13 @@ import fastifyMultipart from "@fastify/multipart";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { VaultMappingType } from "@app/services/external-migration/external-migration-types";
|
||||
import {
|
||||
ExternalMigrationProviders,
|
||||
VaultMappingType
|
||||
} from "@app/services/external-migration/external-migration-types";
|
||||
|
||||
const MB25_IN_BYTES = 26214400;
|
||||
|
||||
@@ -66,7 +69,8 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
|
||||
vaultAccessToken: z.string(),
|
||||
vaultNamespace: z.string().trim().optional(),
|
||||
vaultUrl: z.string(),
|
||||
mappingType: z.nativeEnum(VaultMappingType)
|
||||
mappingType: z.nativeEnum(VaultMappingType),
|
||||
gatewayId: z.string().optional()
|
||||
})
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
@@ -80,4 +84,33 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/custom-migration-enabled/:provider",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
provider: z.nativeEnum(ExternalMigrationProviders)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
enabled: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const enabled = await server.services.migration.hasCustomVaultMigration({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
provider: req.params.provider
|
||||
});
|
||||
return { enabled };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -419,6 +419,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
200: z.object({
|
||||
secret: secretRawSchema.extend({
|
||||
secretValueHidden: z.boolean(),
|
||||
secretPath: z.string(),
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.optional()
|
||||
})
|
||||
|
@@ -8,6 +8,7 @@ export enum AppConnection {
|
||||
AzureAppConfiguration = "azure-app-configuration",
|
||||
AzureClientSecrets = "azure-client-secrets",
|
||||
AzureDevOps = "azure-devops",
|
||||
AzureADCS = "azure-adcs",
|
||||
Humanitec = "humanitec",
|
||||
TerraformCloud = "terraform-cloud",
|
||||
Vercel = "vercel",
|
||||
|
@@ -31,6 +31,11 @@ import {
|
||||
} from "./app-connection-types";
|
||||
import { Auth0ConnectionMethod, getAuth0ConnectionListItem, validateAuth0ConnectionCredentials } from "./auth0";
|
||||
import { AwsConnectionMethod, getAwsConnectionListItem, validateAwsConnectionCredentials } from "./aws";
|
||||
import { AzureADCSConnectionMethod } from "./azure-adcs";
|
||||
import {
|
||||
getAzureADCSConnectionListItem,
|
||||
validateAzureADCSConnectionCredentials
|
||||
} from "./azure-adcs/azure-adcs-connection-fns";
|
||||
import {
|
||||
AzureAppConfigurationConnectionMethod,
|
||||
getAzureAppConfigurationConnectionListItem,
|
||||
@@ -136,6 +141,7 @@ export const listAppConnectionOptions = () => {
|
||||
getAzureKeyVaultConnectionListItem(),
|
||||
getAzureAppConfigurationConnectionListItem(),
|
||||
getAzureDevopsConnectionListItem(),
|
||||
getAzureADCSConnectionListItem(),
|
||||
getDatabricksConnectionListItem(),
|
||||
getHumanitecConnectionListItem(),
|
||||
getTerraformCloudConnectionListItem(),
|
||||
@@ -227,6 +233,7 @@ export const validateAppConnectionCredentials = async (
|
||||
[AppConnection.AzureClientSecrets]:
|
||||
validateAzureClientSecretsConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.AzureDevOps]: validateAzureDevOpsConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.AzureADCS]: validateAzureADCSConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Humanitec]: validateHumanitecConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Postgres]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.MsSql]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
@@ -300,6 +307,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
|
||||
case MsSqlConnectionMethod.UsernameAndPassword:
|
||||
case MySqlConnectionMethod.UsernameAndPassword:
|
||||
case OracleDBConnectionMethod.UsernameAndPassword:
|
||||
case AzureADCSConnectionMethod.UsernamePassword:
|
||||
return "Username & Password";
|
||||
case WindmillConnectionMethod.AccessToken:
|
||||
case HCVaultConnectionMethod.AccessToken:
|
||||
@@ -357,6 +365,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
|
||||
[AppConnection.AzureKeyVault]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureAppConfiguration]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureDevOps]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureADCS]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Humanitec]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Postgres]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
|
||||
[AppConnection.MsSql]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
|
||||
|
@@ -9,6 +9,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
[AppConnection.AzureAppConfiguration]: "Azure App Configuration",
|
||||
[AppConnection.AzureClientSecrets]: "Azure Client Secrets",
|
||||
[AppConnection.AzureDevOps]: "Azure DevOps",
|
||||
[AppConnection.AzureADCS]: "Azure ADCS",
|
||||
[AppConnection.Databricks]: "Databricks",
|
||||
[AppConnection.Humanitec]: "Humanitec",
|
||||
[AppConnection.TerraformCloud]: "Terraform Cloud",
|
||||
@@ -49,6 +50,7 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
|
||||
[AppConnection.AzureAppConfiguration]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureClientSecrets]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureDevOps]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureADCS]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Databricks]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Humanitec]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.TerraformCloud]: AppConnectionPlanType.Regular,
|
||||
|
@@ -45,6 +45,7 @@ import {
|
||||
import { ValidateAuth0ConnectionCredentialsSchema } from "./auth0";
|
||||
import { ValidateAwsConnectionCredentialsSchema } from "./aws";
|
||||
import { awsConnectionService } from "./aws/aws-connection-service";
|
||||
import { ValidateAzureADCSConnectionCredentialsSchema } from "./azure-adcs/azure-adcs-connection-schemas";
|
||||
import { ValidateAzureAppConfigurationConnectionCredentialsSchema } from "./azure-app-configuration";
|
||||
import { ValidateAzureClientSecretsConnectionCredentialsSchema } from "./azure-client-secrets";
|
||||
import { azureClientSecretsConnectionService } from "./azure-client-secrets/azure-client-secrets-service";
|
||||
@@ -122,6 +123,7 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
|
||||
[AppConnection.AzureKeyVault]: ValidateAzureKeyVaultConnectionCredentialsSchema,
|
||||
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema,
|
||||
[AppConnection.AzureDevOps]: ValidateAzureDevOpsConnectionCredentialsSchema,
|
||||
[AppConnection.AzureADCS]: ValidateAzureADCSConnectionCredentialsSchema,
|
||||
[AppConnection.Databricks]: ValidateDatabricksConnectionCredentialsSchema,
|
||||
[AppConnection.Humanitec]: ValidateHumanitecConnectionCredentialsSchema,
|
||||
[AppConnection.TerraformCloud]: ValidateTerraformCloudConnectionCredentialsSchema,
|
||||
@@ -598,7 +600,7 @@ export const appConnectionServiceFactory = ({
|
||||
azureClientSecrets: azureClientSecretsConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
azureDevOps: azureDevOpsConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
auth0: auth0ConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
hcvault: hcVaultConnectionService(connectAppConnectionById),
|
||||
hcvault: hcVaultConnectionService(connectAppConnectionById, gatewayService),
|
||||
windmill: windmillConnectionService(connectAppConnectionById),
|
||||
teamcity: teamcityConnectionService(connectAppConnectionById),
|
||||
oci: ociConnectionService(connectAppConnectionById, licenseService),
|
||||
|
@@ -33,6 +33,12 @@ import {
|
||||
TAwsConnectionInput,
|
||||
TValidateAwsConnectionCredentialsSchema
|
||||
} from "./aws";
|
||||
import {
|
||||
TAzureADCSConnection,
|
||||
TAzureADCSConnectionConfig,
|
||||
TAzureADCSConnectionInput,
|
||||
TValidateAzureADCSConnectionCredentialsSchema
|
||||
} from "./azure-adcs/azure-adcs-connection-types";
|
||||
import {
|
||||
TAzureAppConfigurationConnection,
|
||||
TAzureAppConfigurationConnectionConfig,
|
||||
@@ -223,6 +229,7 @@ export type TAppConnection = { id: string } & (
|
||||
| TAzureKeyVaultConnection
|
||||
| TAzureAppConfigurationConnection
|
||||
| TAzureDevOpsConnection
|
||||
| TAzureADCSConnection
|
||||
| TDatabricksConnection
|
||||
| THumanitecConnection
|
||||
| TTerraformCloudConnection
|
||||
@@ -267,6 +274,7 @@ export type TAppConnectionInput = { id: string } & (
|
||||
| TAzureKeyVaultConnectionInput
|
||||
| TAzureAppConfigurationConnectionInput
|
||||
| TAzureDevOpsConnectionInput
|
||||
| TAzureADCSConnectionInput
|
||||
| TDatabricksConnectionInput
|
||||
| THumanitecConnectionInput
|
||||
| TTerraformCloudConnectionInput
|
||||
@@ -322,6 +330,7 @@ export type TAppConnectionConfig =
|
||||
| TAzureKeyVaultConnectionConfig
|
||||
| TAzureAppConfigurationConnectionConfig
|
||||
| TAzureDevOpsConnectionConfig
|
||||
| TAzureADCSConnectionConfig
|
||||
| TAzureClientSecretsConnectionConfig
|
||||
| TDatabricksConnectionConfig
|
||||
| THumanitecConnectionConfig
|
||||
@@ -359,6 +368,7 @@ export type TValidateAppConnectionCredentialsSchema =
|
||||
| TValidateAzureAppConfigurationConnectionCredentialsSchema
|
||||
| TValidateAzureClientSecretsConnectionCredentialsSchema
|
||||
| TValidateAzureDevOpsConnectionCredentialsSchema
|
||||
| TValidateAzureADCSConnectionCredentialsSchema
|
||||
| TValidateDatabricksConnectionCredentialsSchema
|
||||
| TValidateHumanitecConnectionCredentialsSchema
|
||||
| TValidatePostgresConnectionCredentialsSchema
|
||||
|
@@ -91,7 +91,7 @@ export const validateAuth0ConnectionCredentials = async ({ credentials }: TAuth0
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: (e as Error).message ?? `Unable to validate connection: verify credentials`
|
||||
message: (e as Error).message ?? "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -0,0 +1,3 @@
|
||||
export enum AzureADCSConnectionMethod {
|
||||
UsernamePassword = "username-password"
|
||||
}
|
@@ -0,0 +1,455 @@
|
||||
/* eslint-disable no-case-declarations, @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-var-requires, no-await-in-loop, no-continue */
|
||||
import { NtlmClient } from "axios-ntlm";
|
||||
import https from "https";
|
||||
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator/validate-url";
|
||||
import { decryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
import { TAppConnectionDALFactory } from "../app-connection-dal";
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import { AzureADCSConnectionMethod } from "./azure-adcs-connection-enums";
|
||||
import { TAzureADCSConnectionConfig } from "./azure-adcs-connection-types";
|
||||
|
||||
// Type definitions for axios-ntlm
|
||||
interface AxiosNtlmConfig {
|
||||
ntlm: {
|
||||
domain: string;
|
||||
username: string;
|
||||
password: string;
|
||||
};
|
||||
httpsAgent?: https.Agent;
|
||||
url: string;
|
||||
method?: string;
|
||||
data?: string;
|
||||
headers?: Record<string, string>;
|
||||
}
|
||||
|
||||
interface AxiosNtlmResponse {
|
||||
status: number;
|
||||
data: string;
|
||||
headers: unknown;
|
||||
}
|
||||
|
||||
// Types for credential parsing
|
||||
interface ParsedCredentials {
|
||||
domain: string;
|
||||
username: string;
|
||||
fullUsername: string; // domain\username format
|
||||
}
|
||||
|
||||
// Helper function to parse and normalize credentials for Windows authentication
|
||||
const parseCredentials = (inputUsername: string): ParsedCredentials => {
|
||||
// Ensure inputUsername is a string
|
||||
if (typeof inputUsername !== "string" || !inputUsername.trim()) {
|
||||
throw new BadRequestError({
|
||||
message: "Username must be a non-empty string"
|
||||
});
|
||||
}
|
||||
|
||||
let domain = "";
|
||||
let username = "";
|
||||
let fullUsername = "";
|
||||
|
||||
if (inputUsername.includes("\\")) {
|
||||
// Already in domain\username format
|
||||
const parts = inputUsername.split("\\");
|
||||
if (parts.length === 2) {
|
||||
[domain, username] = parts;
|
||||
fullUsername = inputUsername;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid domain\\username format. Expected format: DOMAIN\\username"
|
||||
});
|
||||
}
|
||||
} else if (inputUsername.includes("@")) {
|
||||
// UPN format: user@domain.com
|
||||
const [user, domainPart] = inputUsername.split("@");
|
||||
if (!user || !domainPart) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid UPN format. Expected format: user@domain.com"
|
||||
});
|
||||
}
|
||||
|
||||
username = user;
|
||||
// Extract NetBIOS name from FQDN
|
||||
domain = domainPart.split(".")[0].toUpperCase();
|
||||
fullUsername = `${domain}\\${username}`;
|
||||
} else {
|
||||
// Plain username - assume local account or current domain
|
||||
username = inputUsername;
|
||||
domain = "";
|
||||
fullUsername = inputUsername;
|
||||
}
|
||||
|
||||
return { domain, username, fullUsername };
|
||||
};
|
||||
|
||||
// Helper to normalize URL
|
||||
const normalizeAdcsUrl = (url: string): string => {
|
||||
let normalizedUrl = url.trim();
|
||||
|
||||
// Remove trailing slash
|
||||
normalizedUrl = normalizedUrl.replace(/\/$/, "");
|
||||
|
||||
// Ensure HTTPS protocol
|
||||
if (normalizedUrl.startsWith("http://")) {
|
||||
normalizedUrl = normalizedUrl.replace("http://", "https://");
|
||||
} else if (!normalizedUrl.startsWith("https://")) {
|
||||
normalizedUrl = `https://${normalizedUrl}`;
|
||||
}
|
||||
|
||||
return normalizedUrl;
|
||||
};
|
||||
|
||||
// NTLM request wrapper
|
||||
const createHttpsAgent = (sslRejectUnauthorized: boolean, sslCertificate?: string): https.Agent => {
|
||||
const agentOptions: https.AgentOptions = {
|
||||
rejectUnauthorized: sslRejectUnauthorized,
|
||||
keepAlive: true, // axios-ntlm needs keepAlive for NTLM handshake
|
||||
ca: sslCertificate ? [sslCertificate.trim()] : undefined,
|
||||
// Disable hostname verification as Microsoft servers by default use local IPs for certificates
|
||||
// which may not match the hostname used to connect
|
||||
checkServerIdentity: () => undefined
|
||||
};
|
||||
|
||||
return new https.Agent(agentOptions);
|
||||
};
|
||||
|
||||
const axiosNtlmRequest = async (config: AxiosNtlmConfig): Promise<AxiosNtlmResponse> => {
|
||||
const method = config.method || "GET";
|
||||
|
||||
const credentials = {
|
||||
username: config.ntlm.username,
|
||||
password: config.ntlm.password,
|
||||
domain: config.ntlm.domain || "",
|
||||
workstation: ""
|
||||
};
|
||||
|
||||
const axiosConfig = {
|
||||
httpsAgent: config.httpsAgent,
|
||||
timeout: 30000
|
||||
};
|
||||
|
||||
const client = NtlmClient(credentials, axiosConfig);
|
||||
|
||||
const requestOptions: { url: string; method: string; data?: string; headers?: Record<string, string> } = {
|
||||
url: config.url,
|
||||
method
|
||||
};
|
||||
|
||||
if (config.data) {
|
||||
requestOptions.data = config.data;
|
||||
}
|
||||
|
||||
if (config.headers) {
|
||||
requestOptions.headers = config.headers;
|
||||
}
|
||||
|
||||
const response = await client(requestOptions);
|
||||
|
||||
return {
|
||||
status: response.status,
|
||||
data: response.data,
|
||||
headers: response.headers
|
||||
};
|
||||
};
|
||||
|
||||
// Test ADCS connectivity and authentication using NTLM
|
||||
const testAdcsConnection = async (
|
||||
credentials: ParsedCredentials,
|
||||
password: string,
|
||||
baseUrl: string,
|
||||
sslRejectUnauthorized: boolean = true,
|
||||
sslCertificate?: string
|
||||
): Promise<boolean> => {
|
||||
// Test endpoints in order of preference
|
||||
const testEndpoints = [
|
||||
"/certsrv/certrqus.asp", // Certificate request status (most reliable)
|
||||
"/certsrv/certfnsh.asp", // Certificate finalization
|
||||
"/certsrv/default.asp", // Main ADCS page
|
||||
"/certsrv/" // Root certsrv
|
||||
];
|
||||
|
||||
for (const endpoint of testEndpoints) {
|
||||
try {
|
||||
const testUrl = `${baseUrl}${endpoint}`;
|
||||
|
||||
const shouldRejectUnauthorized = sslRejectUnauthorized;
|
||||
|
||||
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
|
||||
|
||||
const response = await axiosNtlmRequest({
|
||||
url: testUrl,
|
||||
method: "GET",
|
||||
httpsAgent,
|
||||
ntlm: {
|
||||
domain: credentials.domain,
|
||||
username: credentials.username,
|
||||
password
|
||||
}
|
||||
});
|
||||
|
||||
// Check if we got a successful response
|
||||
if (response.status === 200) {
|
||||
const responseText = response.data;
|
||||
|
||||
// Verify this is actually an ADCS server by checking content
|
||||
const adcsIndicators = [
|
||||
"Microsoft Active Directory Certificate Services",
|
||||
"Certificate Services",
|
||||
"Request a certificate",
|
||||
"certsrv",
|
||||
"Certificate Template",
|
||||
"Web Enrollment"
|
||||
];
|
||||
|
||||
const isAdcsServer = adcsIndicators.some((indicator) =>
|
||||
responseText.toLowerCase().includes(indicator.toLowerCase())
|
||||
);
|
||||
|
||||
if (isAdcsServer) {
|
||||
// Successfully authenticated and confirmed ADCS
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (response.status === 401) {
|
||||
throw new BadRequestError({
|
||||
message: "Authentication failed. Please verify your credentials are correct."
|
||||
});
|
||||
}
|
||||
|
||||
if (response.status === 403) {
|
||||
throw new BadRequestError({
|
||||
message: "Access denied. Your account may not have permission to access ADCS web enrollment."
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof BadRequestError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Handle network and connection errors
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes("ENOTFOUND")) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot resolve ADCS server hostname. Please verify the URL is correct."
|
||||
});
|
||||
}
|
||||
if (error.message.includes("ECONNREFUSED")) {
|
||||
throw new BadRequestError({
|
||||
message: "Connection refused by ADCS server. Please verify the server is running and accessible."
|
||||
});
|
||||
}
|
||||
if (error.message.includes("ETIMEDOUT") || error.message.includes("timeout")) {
|
||||
throw new BadRequestError({
|
||||
message: "Connection timeout. Please verify the server is accessible and not blocked by firewall."
|
||||
});
|
||||
}
|
||||
if (error.message.includes("certificate") || error.message.includes("SSL") || error.message.includes("TLS")) {
|
||||
throw new BadRequestError({
|
||||
message: `SSL/TLS certificate error: ${error.message}. This may indicate a certificate verification failure.`
|
||||
});
|
||||
}
|
||||
if (error.message.includes("DEPTH_ZERO_SELF_SIGNED_CERT")) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Self-signed certificate detected. Either provide the server's certificate or set 'sslRejectUnauthorized' to false."
|
||||
});
|
||||
}
|
||||
if (error.message.includes("UNABLE_TO_VERIFY_LEAF_SIGNATURE")) {
|
||||
throw new BadRequestError({
|
||||
message: "Unable to verify certificate signature. Please provide the correct CA certificate."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Continue to next endpoint for other errors
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// If we get here, no endpoint worked
|
||||
throw new BadRequestError({
|
||||
message: "Could not connect to ADCS server. Please verify the server URL and that Web Enrollment is enabled."
|
||||
});
|
||||
};
|
||||
|
||||
// Create authenticated NTLM client for ADCS operations
|
||||
const createNtlmClient = (
|
||||
username: string,
|
||||
password: string,
|
||||
baseUrl: string,
|
||||
sslRejectUnauthorized: boolean = true,
|
||||
sslCertificate?: string
|
||||
) => {
|
||||
const parsedCredentials = parseCredentials(username);
|
||||
const normalizedUrl = normalizeAdcsUrl(baseUrl);
|
||||
|
||||
return {
|
||||
get: async (endpoint: string, additionalHeaders: Record<string, string> = {}) => {
|
||||
const shouldRejectUnauthorized = sslRejectUnauthorized;
|
||||
|
||||
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
|
||||
|
||||
return axiosNtlmRequest({
|
||||
url: `${normalizedUrl}${endpoint}`,
|
||||
method: "GET",
|
||||
httpsAgent,
|
||||
headers: additionalHeaders,
|
||||
ntlm: {
|
||||
domain: parsedCredentials.domain,
|
||||
username: parsedCredentials.username,
|
||||
password
|
||||
}
|
||||
});
|
||||
},
|
||||
post: async (endpoint: string, body: string, additionalHeaders: Record<string, string> = {}) => {
|
||||
const shouldRejectUnauthorized = sslRejectUnauthorized;
|
||||
|
||||
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
|
||||
|
||||
return axiosNtlmRequest({
|
||||
url: `${normalizedUrl}${endpoint}`,
|
||||
method: "POST",
|
||||
httpsAgent,
|
||||
data: body,
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
...additionalHeaders
|
||||
},
|
||||
ntlm: {
|
||||
domain: parsedCredentials.domain,
|
||||
username: parsedCredentials.username,
|
||||
password
|
||||
}
|
||||
});
|
||||
},
|
||||
baseUrl: normalizedUrl,
|
||||
credentials: parsedCredentials
|
||||
};
|
||||
};
|
||||
|
||||
export const getAzureADCSConnectionCredentials = async (
|
||||
connectionId: string,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const appConnection = await appConnectionDAL.findById(connectionId);
|
||||
|
||||
if (!appConnection) {
|
||||
throw new NotFoundError({ message: `Connection with ID '${connectionId}' not found` });
|
||||
}
|
||||
|
||||
if (appConnection.app !== AppConnection.AzureADCS) {
|
||||
throw new BadRequestError({ message: `Connection with ID '${connectionId}' is not an Azure ADCS connection` });
|
||||
}
|
||||
|
||||
switch (appConnection.method) {
|
||||
case AzureADCSConnectionMethod.UsernamePassword:
|
||||
const credentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as {
|
||||
username: string;
|
||||
password: string;
|
||||
adcsUrl: string;
|
||||
sslRejectUnauthorized?: boolean;
|
||||
sslCertificate?: string;
|
||||
};
|
||||
|
||||
return {
|
||||
username: credentials.username,
|
||||
password: credentials.password,
|
||||
adcsUrl: credentials.adcsUrl,
|
||||
sslRejectUnauthorized: credentials.sslRejectUnauthorized ?? true,
|
||||
sslCertificate: credentials.sslCertificate
|
||||
};
|
||||
|
||||
default:
|
||||
throw new BadRequestError({
|
||||
message: `Unsupported Azure ADCS connection method: ${appConnection.method}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const validateAzureADCSConnectionCredentials = async (appConnection: TAzureADCSConnectionConfig) => {
|
||||
const { credentials } = appConnection;
|
||||
|
||||
try {
|
||||
// Parse and validate credentials
|
||||
const parsedCredentials = parseCredentials(credentials.username);
|
||||
const normalizedUrl = normalizeAdcsUrl(credentials.adcsUrl);
|
||||
|
||||
// Validate URL to prevent DNS manipulation attacks and SSRF
|
||||
await blockLocalAndPrivateIpAddresses(normalizedUrl);
|
||||
|
||||
// Test the connection using NTLM
|
||||
await testAdcsConnection(
|
||||
parsedCredentials,
|
||||
credentials.password,
|
||||
normalizedUrl,
|
||||
credentials.sslRejectUnauthorized ?? true,
|
||||
credentials.sslCertificate
|
||||
);
|
||||
|
||||
// If we get here, authentication was successful
|
||||
return {
|
||||
username: credentials.username,
|
||||
password: credentials.password,
|
||||
adcsUrl: credentials.adcsUrl,
|
||||
sslRejectUnauthorized: credentials.sslRejectUnauthorized ?? true,
|
||||
sslCertificate: credentials.sslCertificate
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof BadRequestError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Handle unexpected errors
|
||||
let errorMessage = "Unable to validate ADCS connection.";
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes("401") || error.message.includes("Unauthorized")) {
|
||||
errorMessage = "NTLM authentication failed. Please verify your username, password, and domain are correct.";
|
||||
} else if (error.message.includes("ENOTFOUND") || error.message.includes("ECONNREFUSED")) {
|
||||
errorMessage = "Cannot connect to the ADCS server. Please verify the server URL is correct and accessible.";
|
||||
} else if (error.message.includes("timeout")) {
|
||||
errorMessage = "Connection to ADCS server timed out. Please verify the server is accessible.";
|
||||
} else if (
|
||||
error.message.includes("certificate") ||
|
||||
error.message.includes("SSL") ||
|
||||
error.message.includes("TLS") ||
|
||||
error.message.includes("DEPTH_ZERO_SELF_SIGNED_CERT") ||
|
||||
error.message.includes("UNABLE_TO_VERIFY_LEAF_SIGNATURE")
|
||||
) {
|
||||
errorMessage = `SSL/TLS certificate error: ${error.message}. The server certificate may be self-signed or the CA certificate may be incorrect.`;
|
||||
}
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate Azure ADCS connection: ${errorMessage} Details: ${
|
||||
error instanceof Error ? error.message : "Unknown error"
|
||||
}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const getAzureADCSConnectionListItem = () => ({
|
||||
name: "Azure ADCS" as const,
|
||||
app: AppConnection.AzureADCS as const,
|
||||
methods: [AzureADCSConnectionMethod.UsernamePassword] as [AzureADCSConnectionMethod.UsernamePassword]
|
||||
});
|
||||
|
||||
// Export helper functions for use in certificate ordering
|
||||
export const createAdcsHttpClient = (
|
||||
username: string,
|
||||
password: string,
|
||||
baseUrl: string,
|
||||
sslRejectUnauthorized: boolean = true,
|
||||
sslCertificate?: string
|
||||
) => {
|
||||
return createNtlmClient(username, password, baseUrl, sslRejectUnauthorized, sslCertificate);
|
||||
};
|
@@ -0,0 +1,88 @@
|
||||
import z from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { AzureADCSConnectionMethod } from "./azure-adcs-connection-enums";
|
||||
|
||||
export const AzureADCSUsernamePasswordCredentialsSchema = z.object({
|
||||
adcsUrl: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "ADCS URL required")
|
||||
.max(255)
|
||||
.refine((value) => value.startsWith("https://"), "ADCS URL must use HTTPS")
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.adcsUrl),
|
||||
username: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Username required")
|
||||
.max(255)
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.username),
|
||||
password: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Password required")
|
||||
.max(255)
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.password),
|
||||
sslRejectUnauthorized: z.boolean().optional().describe(AppConnections.CREDENTIALS.AZURE_ADCS.sslRejectUnauthorized),
|
||||
sslCertificate: z
|
||||
.string()
|
||||
.trim()
|
||||
.transform((value) => value || undefined)
|
||||
.optional()
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.sslCertificate)
|
||||
});
|
||||
|
||||
const BaseAzureADCSConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.AzureADCS) });
|
||||
|
||||
export const AzureADCSConnectionSchema = BaseAzureADCSConnectionSchema.extend({
|
||||
method: z.literal(AzureADCSConnectionMethod.UsernamePassword),
|
||||
credentials: AzureADCSUsernamePasswordCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedAzureADCSConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseAzureADCSConnectionSchema.extend({
|
||||
method: z.literal(AzureADCSConnectionMethod.UsernamePassword),
|
||||
credentials: AzureADCSUsernamePasswordCredentialsSchema.pick({
|
||||
username: true,
|
||||
adcsUrl: true,
|
||||
sslRejectUnauthorized: true,
|
||||
sslCertificate: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
export const ValidateAzureADCSConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
.literal(AzureADCSConnectionMethod.UsernamePassword)
|
||||
.describe(AppConnections.CREATE(AppConnection.AzureADCS).method),
|
||||
credentials: AzureADCSUsernamePasswordCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureADCS).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateAzureADCSConnectionSchema = ValidateAzureADCSConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.AzureADCS)
|
||||
);
|
||||
|
||||
export const UpdateAzureADCSConnectionSchema = z
|
||||
.object({
|
||||
credentials: AzureADCSUsernamePasswordCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.AzureADCS).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureADCS));
|
||||
|
||||
export const AzureADCSConnectionListItemSchema = z.object({
|
||||
name: z.literal("Azure ADCS"),
|
||||
app: z.literal(AppConnection.AzureADCS),
|
||||
methods: z.nativeEnum(AzureADCSConnectionMethod).array()
|
||||
});
|
@@ -0,0 +1,23 @@
|
||||
import z from "zod";
|
||||
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
AzureADCSConnectionSchema,
|
||||
CreateAzureADCSConnectionSchema,
|
||||
ValidateAzureADCSConnectionCredentialsSchema
|
||||
} from "./azure-adcs-connection-schemas";
|
||||
|
||||
export type TAzureADCSConnection = z.infer<typeof AzureADCSConnectionSchema>;
|
||||
|
||||
export type TAzureADCSConnectionInput = z.infer<typeof CreateAzureADCSConnectionSchema> & {
|
||||
app: AppConnection.AzureADCS;
|
||||
};
|
||||
|
||||
export type TValidateAzureADCSConnectionCredentialsSchema = typeof ValidateAzureADCSConnectionCredentialsSchema;
|
||||
|
||||
export type TAzureADCSConnectionConfig = DiscriminativePick<
|
||||
TAzureADCSConnectionInput,
|
||||
"method" | "app" | "credentials"
|
||||
>;
|
4
backend/src/services/app-connection/azure-adcs/index.ts
Normal file
4
backend/src/services/app-connection/azure-adcs/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from "./azure-adcs-connection-enums";
|
||||
export * from "./azure-adcs-connection-fns";
|
||||
export * from "./azure-adcs-connection-schemas";
|
||||
export * from "./azure-adcs-connection-types";
|
@@ -70,7 +70,7 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -186,7 +186,7 @@ export const validateAzureClientSecretsConnectionCredentials = async (config: TA
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -204,7 +204,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -186,7 +186,7 @@ export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureK
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -82,7 +82,7 @@ export const validateCamundaConnectionCredentials = async (appConnection: TCamun
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -89,7 +89,7 @@ export const validateDatabricksConnectionCredentials = async (appConnection: TDa
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -114,7 +114,7 @@ export const validateGitHubRadarConnectionCredentials = async (config: TGitHubRa
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
|
||||
|
@@ -447,7 +447,7 @@ export const validateGitHubConnectionCredentials = async (
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
|
||||
|
@@ -1,18 +1,18 @@
|
||||
import { AxiosError } from "axios";
|
||||
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
|
||||
import https from "https";
|
||||
|
||||
import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
import { HCVaultConnectionMethod } from "./hc-vault-connection-enums";
|
||||
import {
|
||||
THCVaultConnection,
|
||||
THCVaultConnectionConfig,
|
||||
THCVaultMountResponse,
|
||||
TValidateHCVaultConnectionCredentials
|
||||
} from "./hc-vault-connection-types";
|
||||
import { THCVaultConnection, THCVaultConnectionConfig, THCVaultMountResponse } from "./hc-vault-connection-types";
|
||||
|
||||
export const getHCVaultInstanceUrl = async (config: THCVaultConnectionConfig) => {
|
||||
const instanceUrl = removeTrailingSlash(config.credentials.instanceUrl);
|
||||
@@ -37,7 +37,78 @@ type TokenRespData = {
|
||||
};
|
||||
};
|
||||
|
||||
export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnectionCredentials) => {
|
||||
export const requestWithHCVaultGateway = async <T>(
|
||||
appConnection: { gatewayId?: string | null },
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
requestConfig: AxiosRequestConfig
|
||||
): Promise<AxiosResponse<T>> => {
|
||||
const { gatewayId } = appConnection;
|
||||
|
||||
// If gateway isn't set up, don't proxy request
|
||||
if (!gatewayId) {
|
||||
return request.request(requestConfig);
|
||||
}
|
||||
|
||||
const url = new URL(requestConfig.url as string);
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(url.toString());
|
||||
|
||||
const [targetHost] = await verifyHostInputValidity(url.hostname, true);
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(gatewayId);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
|
||||
return withGatewayProxy(
|
||||
async (proxyPort) => {
|
||||
const httpsAgent = new https.Agent({
|
||||
servername: targetHost
|
||||
});
|
||||
|
||||
url.protocol = "https:";
|
||||
url.host = `localhost:${proxyPort}`;
|
||||
|
||||
const finalRequestConfig: AxiosRequestConfig = {
|
||||
...requestConfig,
|
||||
url: url.toString(),
|
||||
httpsAgent,
|
||||
headers: {
|
||||
...requestConfig.headers,
|
||||
Host: targetHost
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
return await request.request(finalRequestConfig);
|
||||
} catch (error) {
|
||||
if (error instanceof AxiosError) {
|
||||
logger.error(
|
||||
{ message: error.message, data: (error.response as undefined | { data: unknown })?.data },
|
||||
"Error during HashiCorp Vault gateway request:"
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Tcp,
|
||||
targetHost,
|
||||
targetPort: url.port ? Number(url.port) : 8200, // 8200 is the default port for Vault self-hosted/dedicated
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export const getHCVaultAccessToken = async (
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
// Return access token directly if not using AppRole method
|
||||
if (connection.method !== HCVaultConnectionMethod.AppRole) {
|
||||
return connection.credentials.accessToken;
|
||||
@@ -46,16 +117,16 @@ export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnecti
|
||||
// Generate temporary token for AppRole method
|
||||
try {
|
||||
const { instanceUrl, roleId, secretId } = connection.credentials;
|
||||
const tokenResp = await request.post<TokenRespData>(
|
||||
`${removeTrailingSlash(instanceUrl)}/v1/auth/approle/login`,
|
||||
{ role_id: roleId, secret_id: secretId },
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const tokenResp = await requestWithHCVaultGateway<TokenRespData>(connection, gatewayService, {
|
||||
url: `${removeTrailingSlash(instanceUrl)}/v1/auth/approle/login`,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
|
||||
},
|
||||
data: { role_id: roleId, secret_id: secretId }
|
||||
});
|
||||
|
||||
if (tokenResp.status !== 200) {
|
||||
throw new BadRequestError({
|
||||
@@ -71,38 +142,55 @@ export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnecti
|
||||
}
|
||||
};
|
||||
|
||||
export const validateHCVaultConnectionCredentials = async (config: THCVaultConnectionConfig) => {
|
||||
const instanceUrl = await getHCVaultInstanceUrl(config);
|
||||
export const validateHCVaultConnectionCredentials = async (
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const instanceUrl = await getHCVaultInstanceUrl(connection);
|
||||
|
||||
try {
|
||||
const accessToken = await getHCVaultAccessToken(config);
|
||||
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
|
||||
|
||||
// Verify token
|
||||
await request.get(`${instanceUrl}/v1/auth/token/lookup-self`, {
|
||||
await requestWithHCVaultGateway(connection, gatewayService, {
|
||||
url: `${instanceUrl}/v1/auth/token/lookup-self`,
|
||||
method: "GET",
|
||||
headers: { "X-Vault-Token": accessToken }
|
||||
});
|
||||
|
||||
return config.credentials;
|
||||
return connection.credentials;
|
||||
} catch (error: unknown) {
|
||||
logger.error(error, "Unable to verify HC Vault connection");
|
||||
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
|
||||
});
|
||||
}
|
||||
|
||||
if (error instanceof BadRequestError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const listHCVaultMounts = async (appConnection: THCVaultConnection) => {
|
||||
const instanceUrl = await getHCVaultInstanceUrl(appConnection);
|
||||
const accessToken = await getHCVaultAccessToken(appConnection);
|
||||
export const listHCVaultMounts = async (
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const instanceUrl = await getHCVaultInstanceUrl(connection);
|
||||
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
|
||||
|
||||
const { data } = await request.get<THCVaultMountResponse>(`${instanceUrl}/v1/sys/mounts`, {
|
||||
const { data } = await requestWithHCVaultGateway<THCVaultMountResponse>(connection, gatewayService, {
|
||||
url: `${instanceUrl}/v1/sys/mounts`,
|
||||
method: "GET",
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(appConnection.credentials.namespace ? { "X-Vault-Namespace": appConnection.credentials.namespace } : {})
|
||||
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
|
||||
}
|
||||
});
|
||||
|
||||
|
@@ -55,11 +55,18 @@ export const HCVaultConnectionSchema = z.intersection(
|
||||
export const SanitizedHCVaultConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseHCVaultConnectionSchema.extend({
|
||||
method: z.literal(HCVaultConnectionMethod.AccessToken),
|
||||
credentials: HCVaultConnectionAccessTokenCredentialsSchema.pick({})
|
||||
credentials: HCVaultConnectionAccessTokenCredentialsSchema.pick({
|
||||
namespace: true,
|
||||
instanceUrl: true
|
||||
})
|
||||
}),
|
||||
BaseHCVaultConnectionSchema.extend({
|
||||
method: z.literal(HCVaultConnectionMethod.AppRole),
|
||||
credentials: HCVaultConnectionAppRoleCredentialsSchema.pick({})
|
||||
credentials: HCVaultConnectionAppRoleCredentialsSchema.pick({
|
||||
namespace: true,
|
||||
instanceUrl: true,
|
||||
roleId: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
@@ -81,7 +88,7 @@ export const ValidateHCVaultConnectionCredentialsSchema = z.discriminatedUnion("
|
||||
]);
|
||||
|
||||
export const CreateHCVaultConnectionSchema = ValidateHCVaultConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.HCVault)
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.HCVault, { supportsGateways: true })
|
||||
);
|
||||
|
||||
export const UpdateHCVaultConnectionSchema = z
|
||||
@@ -91,7 +98,7 @@ export const UpdateHCVaultConnectionSchema = z
|
||||
.optional()
|
||||
.describe(AppConnections.UPDATE(AppConnection.HCVault).credentials)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.HCVault));
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.HCVault, { supportsGateways: true }));
|
||||
|
||||
export const HCVaultConnectionListItemSchema = z.object({
|
||||
name: z.literal("HCVault"),
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
|
||||
@@ -11,12 +12,15 @@ type TGetAppConnectionFunc = (
|
||||
actor: OrgServiceActor
|
||||
) => Promise<THCVaultConnection>;
|
||||
|
||||
export const hcVaultConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||
export const hcVaultConnectionService = (
|
||||
getAppConnection: TGetAppConnectionFunc,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const listMounts = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.HCVault, connectionId, actor);
|
||||
|
||||
try {
|
||||
const mounts = await listHCVaultMounts(appConnection);
|
||||
const mounts = await listHCVaultMounts(appConnection, gatewayService);
|
||||
return mounts;
|
||||
} catch (error) {
|
||||
logger.error(error, "Failed to establish connection with Hashicorp Vault");
|
||||
|
@@ -453,19 +453,24 @@ export const authLoginServiceFactory = ({
|
||||
|
||||
const selectedOrg = await orgDAL.findById(organizationId);
|
||||
|
||||
// Check if authEnforced is true, if that's the case, throw an error
|
||||
if (selectedOrg.authEnforced) {
|
||||
throw new BadRequestError({
|
||||
message: "Authentication is required by your organization before you can log in."
|
||||
});
|
||||
}
|
||||
|
||||
if (!selectedOrgMembership) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: `User does not have access to the organization named ${selectedOrg?.name}`
|
||||
});
|
||||
}
|
||||
|
||||
// Check if authEnforced is true and the current auth method is not an enforced method
|
||||
if (
|
||||
selectedOrg.authEnforced &&
|
||||
!isAuthMethodSaml(decodedToken.authMethod) &&
|
||||
decodedToken.authMethod !== AuthMethod.OIDC &&
|
||||
!(selectedOrg.bypassOrgAuthEnabled && selectedOrgMembership.userRole === OrgMembershipRole.Admin)
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with the auth method required by your organization."
|
||||
});
|
||||
}
|
||||
|
||||
if (selectedOrg.googleSsoAuthEnforced && decodedToken.authMethod !== AuthMethod.GOOGLE) {
|
||||
const canBypass = selectedOrg.bypassOrgAuthEnabled && selectedOrgMembership.userRole === OrgMembershipRole.Admin;
|
||||
|
||||
|
@@ -64,6 +64,8 @@ type DBConfigurationColumn = {
|
||||
directoryUrl: string;
|
||||
accountEmail: string;
|
||||
hostedZoneId: string;
|
||||
eabKid?: string;
|
||||
eabHmacKey?: string;
|
||||
};
|
||||
|
||||
export const castDbEntryToAcmeCertificateAuthority = (
|
||||
@@ -89,7 +91,9 @@ export const castDbEntryToAcmeCertificateAuthority = (
|
||||
hostedZoneId: dbConfigurationCol.hostedZoneId
|
||||
},
|
||||
directoryUrl: dbConfigurationCol.directoryUrl,
|
||||
accountEmail: dbConfigurationCol.accountEmail
|
||||
accountEmail: dbConfigurationCol.accountEmail,
|
||||
eabKid: dbConfigurationCol.eabKid,
|
||||
eabHmacKey: dbConfigurationCol.eabHmacKey
|
||||
},
|
||||
status: ca.status as CaStatus
|
||||
};
|
||||
@@ -128,7 +132,7 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
});
|
||||
}
|
||||
|
||||
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig } = configuration;
|
||||
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig, eabKid, eabHmacKey } = configuration;
|
||||
const appConnection = await appConnectionDAL.findById(dnsAppConnectionId);
|
||||
|
||||
if (!appConnection) {
|
||||
@@ -171,7 +175,9 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
directoryUrl,
|
||||
accountEmail,
|
||||
dnsProvider: dnsProviderConfig.provider,
|
||||
hostedZoneId: dnsProviderConfig.hostedZoneId
|
||||
hostedZoneId: dnsProviderConfig.hostedZoneId,
|
||||
eabKid,
|
||||
eabHmacKey
|
||||
}
|
||||
},
|
||||
tx
|
||||
@@ -214,7 +220,7 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
}) => {
|
||||
const updatedCa = await certificateAuthorityDAL.transaction(async (tx) => {
|
||||
if (configuration) {
|
||||
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig } = configuration;
|
||||
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig, eabKid, eabHmacKey } = configuration;
|
||||
const appConnection = await appConnectionDAL.findById(dnsAppConnectionId);
|
||||
|
||||
if (!appConnection) {
|
||||
@@ -254,7 +260,9 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
directoryUrl,
|
||||
accountEmail,
|
||||
dnsProvider: dnsProviderConfig.provider,
|
||||
hostedZoneId: dnsProviderConfig.hostedZoneId
|
||||
hostedZoneId: dnsProviderConfig.hostedZoneId,
|
||||
eabKid,
|
||||
eabHmacKey
|
||||
}
|
||||
},
|
||||
tx
|
||||
@@ -354,10 +362,19 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(acmeCa.configuration.directoryUrl);
|
||||
|
||||
const acmeClient = new acme.Client({
|
||||
const acmeClientOptions: acme.ClientOptions = {
|
||||
directoryUrl: acmeCa.configuration.directoryUrl,
|
||||
accountKey
|
||||
});
|
||||
};
|
||||
|
||||
if (acmeCa.configuration.eabKid && acmeCa.configuration.eabHmacKey) {
|
||||
acmeClientOptions.externalAccountBinding = {
|
||||
kid: acmeCa.configuration.eabKid,
|
||||
hmacKey: acmeCa.configuration.eabHmacKey
|
||||
};
|
||||
}
|
||||
|
||||
const acmeClient = new acme.Client(acmeClientOptions);
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048);
|
||||
|
||||
|
@@ -18,7 +18,9 @@ export const AcmeCertificateAuthorityConfigurationSchema = z.object({
|
||||
hostedZoneId: z.string().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.hostedZoneId)
|
||||
}),
|
||||
directoryUrl: z.string().url().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.directoryUrl),
|
||||
accountEmail: z.string().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.accountEmail)
|
||||
accountEmail: z.string().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.accountEmail),
|
||||
eabKid: z.string().trim().max(64).optional().describe(CertificateAuthorities.CONFIGURATIONS.ACME.eabKid),
|
||||
eabHmacKey: z.string().trim().max(512).optional().describe(CertificateAuthorities.CONFIGURATIONS.ACME.eabHmacKey)
|
||||
});
|
||||
|
||||
export const AcmeCertificateAuthorityCredentialsSchema = z.object({
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,29 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { CaType } from "../certificate-authority-enums";
|
||||
import {
|
||||
BaseCertificateAuthoritySchema,
|
||||
GenericCreateCertificateAuthorityFieldsSchema,
|
||||
GenericUpdateCertificateAuthorityFieldsSchema
|
||||
} from "../certificate-authority-schemas";
|
||||
|
||||
export const AzureAdCsCertificateAuthorityConfigurationSchema = z.object({
|
||||
azureAdcsConnectionId: z.string().uuid().trim().describe("Azure ADCS Connection ID")
|
||||
});
|
||||
|
||||
export const AzureAdCsCertificateAuthoritySchema = BaseCertificateAuthoritySchema.extend({
|
||||
type: z.literal(CaType.AZURE_AD_CS),
|
||||
configuration: AzureAdCsCertificateAuthorityConfigurationSchema
|
||||
});
|
||||
|
||||
export const CreateAzureAdCsCertificateAuthoritySchema = GenericCreateCertificateAuthorityFieldsSchema(
|
||||
CaType.AZURE_AD_CS
|
||||
).extend({
|
||||
configuration: AzureAdCsCertificateAuthorityConfigurationSchema
|
||||
});
|
||||
|
||||
export const UpdateAzureAdCsCertificateAuthoritySchema = GenericUpdateCertificateAuthorityFieldsSchema(
|
||||
CaType.AZURE_AD_CS
|
||||
).extend({
|
||||
configuration: AzureAdCsCertificateAuthorityConfigurationSchema.optional()
|
||||
});
|
@@ -0,0 +1,13 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
AzureAdCsCertificateAuthoritySchema,
|
||||
CreateAzureAdCsCertificateAuthoritySchema,
|
||||
UpdateAzureAdCsCertificateAuthoritySchema
|
||||
} from "./azure-ad-cs-certificate-authority-schemas";
|
||||
|
||||
export type TAzureAdCsCertificateAuthority = z.infer<typeof AzureAdCsCertificateAuthoritySchema>;
|
||||
|
||||
export type TCreateAzureAdCsCertificateAuthorityDTO = z.infer<typeof CreateAzureAdCsCertificateAuthoritySchema>;
|
||||
|
||||
export type TUpdateAzureAdCsCertificateAuthorityDTO = z.infer<typeof UpdateAzureAdCsCertificateAuthoritySchema>;
|
@@ -1,6 +1,7 @@
|
||||
export enum CaType {
|
||||
INTERNAL = "internal",
|
||||
ACME = "acme"
|
||||
ACME = "acme",
|
||||
AZURE_AD_CS = "azure-ad-cs"
|
||||
}
|
||||
|
||||
export enum InternalCaType {
|
||||
@@ -17,3 +18,9 @@ export enum CaStatus {
|
||||
export enum CaRenewalType {
|
||||
EXISTING = "existing"
|
||||
}
|
||||
|
||||
export enum CaCapability {
|
||||
ISSUE_CERTIFICATES = "issue-certificates",
|
||||
REVOKE_CERTIFICATES = "revoke-certificates",
|
||||
RENEW_CERTIFICATES = "renew-certificates"
|
||||
}
|
||||
|
@@ -1,6 +1,29 @@
|
||||
import { CaType } from "./certificate-authority-enums";
|
||||
import { CaCapability, CaType } from "./certificate-authority-enums";
|
||||
|
||||
export const CERTIFICATE_AUTHORITIES_TYPE_MAP: Record<CaType, string> = {
|
||||
[CaType.INTERNAL]: "Internal",
|
||||
[CaType.ACME]: "ACME"
|
||||
[CaType.ACME]: "ACME",
|
||||
[CaType.AZURE_AD_CS]: "Azure AD Certificate Service"
|
||||
};
|
||||
|
||||
export const CERTIFICATE_AUTHORITIES_CAPABILITIES_MAP: Record<CaType, CaCapability[]> = {
|
||||
[CaType.INTERNAL]: [
|
||||
CaCapability.ISSUE_CERTIFICATES,
|
||||
CaCapability.REVOKE_CERTIFICATES,
|
||||
CaCapability.RENEW_CERTIFICATES
|
||||
],
|
||||
[CaType.ACME]: [CaCapability.ISSUE_CERTIFICATES, CaCapability.REVOKE_CERTIFICATES, CaCapability.RENEW_CERTIFICATES],
|
||||
[CaType.AZURE_AD_CS]: [
|
||||
CaCapability.ISSUE_CERTIFICATES,
|
||||
CaCapability.RENEW_CERTIFICATES
|
||||
// Note: REVOKE_CERTIFICATES intentionally omitted - not supported by ADCS connector
|
||||
]
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if a certificate authority type supports a specific capability
|
||||
*/
|
||||
export const caSupportsCapability = (caType: CaType, capability: CaCapability): boolean => {
|
||||
const capabilities = CERTIFICATE_AUTHORITIES_CAPABILITIES_MAP[caType] || [];
|
||||
return capabilities.includes(capability);
|
||||
};
|
||||
|
@@ -21,6 +21,7 @@ import { TCertificateSecretDALFactory } from "../certificate/certificate-secret-
|
||||
import { TPkiSubscriberDALFactory } from "../pki-subscriber/pki-subscriber-dal";
|
||||
import { SubscriberOperationStatus } from "../pki-subscriber/pki-subscriber-types";
|
||||
import { AcmeCertificateAuthorityFns } from "./acme/acme-certificate-authority-fns";
|
||||
import { AzureAdCsCertificateAuthorityFns } from "./azure-ad-cs/azure-ad-cs-certificate-authority-fns";
|
||||
import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal";
|
||||
import { CaType } from "./certificate-authority-enums";
|
||||
import { keyAlgorithmToAlgCfg } from "./certificate-authority-fns";
|
||||
@@ -33,7 +34,7 @@ import {
|
||||
|
||||
type TCertificateAuthorityQueueFactoryDep = {
|
||||
certificateAuthorityDAL: TCertificateAuthorityDALFactory;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
|
||||
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
||||
externalCertificateAuthorityDAL: Pick<TExternalCertificateAuthorityDALFactory, "create" | "update">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem">;
|
||||
@@ -82,6 +83,19 @@ export const certificateAuthorityQueueFactory = ({
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const azureAdCsFns = AzureAdCsCertificateAuthorityFns({
|
||||
appConnectionDAL,
|
||||
appConnectionService,
|
||||
certificateAuthorityDAL,
|
||||
externalCertificateAuthorityDAL,
|
||||
certificateDAL,
|
||||
certificateBodyDAL,
|
||||
certificateSecretDAL,
|
||||
kmsService,
|
||||
pkiSubscriberDAL,
|
||||
projectDAL
|
||||
});
|
||||
|
||||
// TODO 1: auto-periodic rotation
|
||||
// TODO 2: manual rotation
|
||||
|
||||
@@ -158,6 +172,13 @@ export const certificateAuthorityQueueFactory = ({
|
||||
lastOperationMessage: "Certificate ordered successfully",
|
||||
lastOperationAt: new Date()
|
||||
});
|
||||
} else if (caType === CaType.AZURE_AD_CS) {
|
||||
await azureAdCsFns.orderSubscriberCertificate(subscriberId);
|
||||
await pkiSubscriberDAL.updateById(subscriberId, {
|
||||
lastOperationStatus: SubscriberOperationStatus.SUCCESS,
|
||||
lastOperationMessage: "Certificate ordered successfully",
|
||||
lastOperationAt: new Date()
|
||||
});
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof Error) {
|
||||
|
@@ -22,6 +22,14 @@ import {
|
||||
TCreateAcmeCertificateAuthorityDTO,
|
||||
TUpdateAcmeCertificateAuthorityDTO
|
||||
} from "./acme/acme-certificate-authority-types";
|
||||
import {
|
||||
AzureAdCsCertificateAuthorityFns,
|
||||
castDbEntryToAzureAdCsCertificateAuthority
|
||||
} from "./azure-ad-cs/azure-ad-cs-certificate-authority-fns";
|
||||
import {
|
||||
TCreateAzureAdCsCertificateAuthorityDTO,
|
||||
TUpdateAzureAdCsCertificateAuthorityDTO
|
||||
} from "./azure-ad-cs/azure-ad-cs-certificate-authority-types";
|
||||
import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal";
|
||||
import { CaType } from "./certificate-authority-enums";
|
||||
import {
|
||||
@@ -34,7 +42,7 @@ import { TInternalCertificateAuthorityServiceFactory } from "./internal/internal
|
||||
import { TCreateInternalCertificateAuthorityDTO } from "./internal/internal-certificate-authority-types";
|
||||
|
||||
type TCertificateAuthorityServiceFactoryDep = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
|
||||
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
||||
certificateAuthorityDAL: Pick<
|
||||
TCertificateAuthorityDALFactory,
|
||||
@@ -91,6 +99,19 @@ export const certificateAuthorityServiceFactory = ({
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const azureAdCsFns = AzureAdCsCertificateAuthorityFns({
|
||||
appConnectionDAL,
|
||||
appConnectionService,
|
||||
certificateAuthorityDAL,
|
||||
externalCertificateAuthorityDAL,
|
||||
certificateDAL,
|
||||
certificateBodyDAL,
|
||||
certificateSecretDAL,
|
||||
kmsService,
|
||||
pkiSubscriberDAL,
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const createCertificateAuthority = async (
|
||||
{ type, projectId, name, enableDirectIssuance, configuration, status }: TCreateCertificateAuthorityDTO,
|
||||
actor: OrgServiceActor
|
||||
@@ -146,6 +167,17 @@ export const certificateAuthorityServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return azureAdCsFns.createCertificateAuthority({
|
||||
name,
|
||||
projectId,
|
||||
configuration: configuration as TCreateAzureAdCsCertificateAuthorityDTO["configuration"],
|
||||
enableDirectIssuance,
|
||||
status,
|
||||
actor
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
@@ -205,6 +237,10 @@ export const certificateAuthorityServiceFactory = ({
|
||||
return castDbEntryToAcmeCertificateAuthority(certificateAuthority);
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return castDbEntryToAzureAdCsCertificateAuthority(certificateAuthority);
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
@@ -249,6 +285,10 @@ export const certificateAuthorityServiceFactory = ({
|
||||
return acmeFns.listCertificateAuthorities({ projectId });
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return azureAdCsFns.listCertificateAuthorities({ projectId });
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
@@ -323,6 +363,17 @@ export const certificateAuthorityServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return azureAdCsFns.updateCertificateAuthority({
|
||||
id: certificateAuthority.id,
|
||||
configuration: configuration as TUpdateAzureAdCsCertificateAuthorityDTO["configuration"],
|
||||
enableDirectIssuance,
|
||||
actor,
|
||||
status,
|
||||
name
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
@@ -384,14 +435,54 @@ export const certificateAuthorityServiceFactory = ({
|
||||
return castDbEntryToAcmeCertificateAuthority(certificateAuthority);
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return castDbEntryToAzureAdCsCertificateAuthority(certificateAuthority);
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
const getAzureAdcsTemplates = async ({
|
||||
caId,
|
||||
projectId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: {
|
||||
caId: string;
|
||||
projectId: string;
|
||||
actor: OrgServiceActor["type"];
|
||||
actorId: string;
|
||||
actorAuthMethod: OrgServiceActor["authMethod"];
|
||||
actorOrgId?: string;
|
||||
}) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.CertificateManager
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
return azureAdCsFns.getTemplates({
|
||||
caId,
|
||||
projectId
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
createCertificateAuthority,
|
||||
findCertificateAuthorityByNameAndProjectId,
|
||||
listCertificateAuthoritiesByProjectId,
|
||||
updateCertificateAuthority,
|
||||
deleteCertificateAuthority
|
||||
deleteCertificateAuthority,
|
||||
getAzureAdcsTemplates
|
||||
};
|
||||
};
|
||||
|
@@ -1,13 +1,23 @@
|
||||
import { TAcmeCertificateAuthority, TAcmeCertificateAuthorityInput } from "./acme/acme-certificate-authority-types";
|
||||
import {
|
||||
TAzureAdCsCertificateAuthority,
|
||||
TCreateAzureAdCsCertificateAuthorityDTO
|
||||
} from "./azure-ad-cs/azure-ad-cs-certificate-authority-types";
|
||||
import { CaType } from "./certificate-authority-enums";
|
||||
import {
|
||||
TInternalCertificateAuthority,
|
||||
TInternalCertificateAuthorityInput
|
||||
} from "./internal/internal-certificate-authority-types";
|
||||
|
||||
export type TCertificateAuthority = TInternalCertificateAuthority | TAcmeCertificateAuthority;
|
||||
export type TCertificateAuthority =
|
||||
| TInternalCertificateAuthority
|
||||
| TAcmeCertificateAuthority
|
||||
| TAzureAdCsCertificateAuthority;
|
||||
|
||||
export type TCertificateAuthorityInput = TInternalCertificateAuthorityInput | TAcmeCertificateAuthorityInput;
|
||||
export type TCertificateAuthorityInput =
|
||||
| TInternalCertificateAuthorityInput
|
||||
| TAcmeCertificateAuthorityInput
|
||||
| TCreateAzureAdCsCertificateAuthorityDTO;
|
||||
|
||||
export type TCreateCertificateAuthorityDTO = Omit<TCertificateAuthority, "id">;
|
||||
|
||||
|
@@ -36,12 +36,18 @@ import { validateAndMapAltNameType } from "../certificate-authority-validators";
|
||||
import { TIssueCertWithTemplateDTO } from "./internal-certificate-authority-types";
|
||||
|
||||
type TInternalCertificateAuthorityFnsDeps = {
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findByIdWithAssociatedCa" | "findById">;
|
||||
certificateAuthorityDAL: Pick<
|
||||
TCertificateAuthorityDALFactory,
|
||||
"findByIdWithAssociatedCa" | "findById" | "create" | "transaction" | "updateById" | "findWithAssociatedCa"
|
||||
>;
|
||||
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "findById">;
|
||||
certificateAuthoritySecretDAL: Pick<TCertificateAuthoritySecretDALFactory, "findOne">;
|
||||
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "findOne">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById" | "transaction" | "findOne" | "updateById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "encryptWithKmsKey" | "generateKmsKey">;
|
||||
kmsService: Pick<
|
||||
TKmsServiceFactory,
|
||||
"decryptWithKmsKey" | "encryptWithKmsKey" | "generateKmsKey" | "createCipherPairWithDataKey"
|
||||
>;
|
||||
certificateDAL: Pick<TCertificateDALFactory, "create" | "transaction">;
|
||||
certificateBodyDAL: Pick<TCertificateBodyDALFactory, "create">;
|
||||
certificateSecretDAL: Pick<TCertificateSecretDALFactory, "create">;
|
||||
|
@@ -14,6 +14,8 @@ import { TCertificateBodyDALFactory } from "@app/services/certificate/certificat
|
||||
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
|
||||
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { CaCapability, CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
import { caSupportsCapability } from "@app/services/certificate-authority/certificate-authority-maps";
|
||||
import { TCertificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TPkiCollectionDALFactory } from "@app/services/pki-collection/pki-collection-dal";
|
||||
@@ -184,9 +186,11 @@ export const certificateServiceFactory = ({
|
||||
|
||||
const ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(cert.caId);
|
||||
|
||||
if (ca.externalCa?.id) {
|
||||
// Check if the CA type supports revocation
|
||||
const caType = (ca.externalCa?.type as CaType) ?? CaType.INTERNAL;
|
||||
if (!caSupportsCapability(caType, CaCapability.REVOKE_CERTIFICATES)) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot revoke external certificates"
|
||||
message: "Certificate revocation is not supported by this certificate authority type"
|
||||
});
|
||||
}
|
||||
|
||||
@@ -218,18 +222,37 @@ export const certificateServiceFactory = ({
|
||||
}
|
||||
);
|
||||
|
||||
// rebuild CRL (TODO: move to interval-based cron job)
|
||||
await rebuildCaCrl({
|
||||
caId: ca.id,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCrlDAL,
|
||||
certificateAuthoritySecretDAL,
|
||||
projectDAL,
|
||||
certificateDAL,
|
||||
kmsService
|
||||
});
|
||||
// Note: External CA revocation handling would go here for supported CA types
|
||||
// Currently, only internal CAs and ACME CAs support revocation
|
||||
|
||||
return { revokedAt, cert, ca: expandInternalCa(ca) };
|
||||
// rebuild CRL (TODO: move to interval-based cron job)
|
||||
// Only rebuild CRL for internal CAs - external CAs manage their own CRLs
|
||||
if (!ca.externalCa?.id) {
|
||||
await rebuildCaCrl({
|
||||
caId: ca.id,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCrlDAL,
|
||||
certificateAuthoritySecretDAL,
|
||||
projectDAL,
|
||||
certificateDAL,
|
||||
kmsService
|
||||
});
|
||||
}
|
||||
|
||||
// Return appropriate CA format based on CA type
|
||||
const caResult = ca.externalCa?.id
|
||||
? {
|
||||
id: ca.id,
|
||||
name: ca.name,
|
||||
projectId: ca.projectId,
|
||||
status: ca.status,
|
||||
enableDirectIssuance: ca.enableDirectIssuance,
|
||||
type: ca.externalCa.type,
|
||||
externalCa: ca.externalCa
|
||||
}
|
||||
: expandInternalCa(ca);
|
||||
|
||||
return { revokedAt, cert, ca: caResult };
|
||||
};
|
||||
|
||||
/**
|
||||
|
@@ -1,12 +1,21 @@
|
||||
import https from "node:https";
|
||||
|
||||
import axios, { AxiosInstance } from "axios";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
|
||||
import { InfisicalImportData, VaultMappingType } from "../external-migration-types";
|
||||
|
||||
enum KvVersion {
|
||||
V1 = "1",
|
||||
V2 = "2"
|
||||
}
|
||||
|
||||
type VaultData = {
|
||||
namespace: string;
|
||||
mount: string;
|
||||
@@ -14,7 +23,42 @@ type VaultData = {
|
||||
secretData: Record<string, string>;
|
||||
};
|
||||
|
||||
const vaultFactory = () => {
|
||||
const vaultFactory = (gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">) => {
|
||||
const $gatewayProxyWrapper = async <T>(
|
||||
inputs: {
|
||||
gatewayId: string;
|
||||
targetHost?: string;
|
||||
targetPort?: number;
|
||||
},
|
||||
gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent) => Promise<T>
|
||||
): Promise<T> => {
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
|
||||
const callbackResult = await withGatewayProxy(
|
||||
async (port, httpsAgent) => {
|
||||
const res = await gatewayCallback("http://localhost", port, httpsAgent);
|
||||
return res;
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Http,
|
||||
targetHost: inputs.targetHost,
|
||||
targetPort: inputs.targetPort,
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return callbackResult;
|
||||
};
|
||||
|
||||
const getMounts = async (request: AxiosInstance) => {
|
||||
const response = await request
|
||||
.get<{
|
||||
@@ -31,11 +75,24 @@ const vaultFactory = () => {
|
||||
|
||||
const getPaths = async (
|
||||
request: AxiosInstance,
|
||||
{ mountPath, secretPath = "" }: { mountPath: string; secretPath?: string }
|
||||
{ mountPath, secretPath = "" }: { mountPath: string; secretPath?: string },
|
||||
kvVersion: KvVersion
|
||||
) => {
|
||||
try {
|
||||
// For KV v2: /v1/{mount}/metadata/{path}?list=true
|
||||
const path = secretPath ? `${mountPath}/metadata/${secretPath}` : `${mountPath}/metadata`;
|
||||
if (kvVersion === KvVersion.V2) {
|
||||
// For KV v2: /v1/{mount}/metadata/{path}?list=true
|
||||
const path = secretPath ? `${mountPath}/metadata/${secretPath}` : `${mountPath}/metadata`;
|
||||
const response = await request.get<{
|
||||
data: {
|
||||
keys: string[];
|
||||
};
|
||||
}>(`/v1/${path}?list=true`);
|
||||
|
||||
return response.data.data.keys;
|
||||
}
|
||||
|
||||
// kv version v1: /v1/{mount}?list=true
|
||||
const path = secretPath ? `${mountPath}/${secretPath}` : mountPath;
|
||||
const response = await request.get<{
|
||||
data: {
|
||||
keys: string[];
|
||||
@@ -56,21 +113,42 @@ const vaultFactory = () => {
|
||||
|
||||
const getSecrets = async (
|
||||
request: AxiosInstance,
|
||||
{ mountPath, secretPath }: { mountPath: string; secretPath: string }
|
||||
{ mountPath, secretPath }: { mountPath: string; secretPath: string },
|
||||
kvVersion: KvVersion
|
||||
) => {
|
||||
// For KV v2: /v1/{mount}/data/{path}
|
||||
if (kvVersion === KvVersion.V2) {
|
||||
// For KV v2: /v1/{mount}/data/{path}
|
||||
const response = await request
|
||||
.get<{
|
||||
data: {
|
||||
data: Record<string, string>; // KV v2 has nested data structure
|
||||
metadata: {
|
||||
created_time: string;
|
||||
deletion_time: string;
|
||||
destroyed: boolean;
|
||||
version: number;
|
||||
};
|
||||
};
|
||||
}>(`/v1/${mountPath}/data/${secretPath}`)
|
||||
.catch((err) => {
|
||||
if (axios.isAxiosError(err)) {
|
||||
logger.error(err.response?.data, "External migration: Failed to get Vault secret");
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
|
||||
return response.data.data.data;
|
||||
}
|
||||
|
||||
// kv version v1
|
||||
|
||||
const response = await request
|
||||
.get<{
|
||||
data: {
|
||||
data: Record<string, string>; // KV v2 has nested data structure
|
||||
metadata: {
|
||||
created_time: string;
|
||||
deletion_time: string;
|
||||
destroyed: boolean;
|
||||
version: number;
|
||||
};
|
||||
};
|
||||
}>(`/v1/${mountPath}/data/${secretPath}`)
|
||||
data: Record<string, string>; // KV v1 has flat data structure
|
||||
lease_duration: number;
|
||||
lease_id: string;
|
||||
renewable: boolean;
|
||||
}>(`/v1/${mountPath}/${secretPath}`)
|
||||
.catch((err) => {
|
||||
if (axios.isAxiosError(err)) {
|
||||
logger.error(err.response?.data, "External migration: Failed to get Vault secret");
|
||||
@@ -78,7 +156,7 @@ const vaultFactory = () => {
|
||||
throw err;
|
||||
});
|
||||
|
||||
return response.data.data.data;
|
||||
return response.data.data;
|
||||
};
|
||||
|
||||
// helper function to check if a mount is KV v2 (will be useful if we add support for Vault KV v1)
|
||||
@@ -89,9 +167,10 @@ const vaultFactory = () => {
|
||||
const recursivelyGetAllPaths = async (
|
||||
request: AxiosInstance,
|
||||
mountPath: string,
|
||||
kvVersion: KvVersion,
|
||||
currentPath: string = ""
|
||||
): Promise<string[]> => {
|
||||
const paths = await getPaths(request, { mountPath, secretPath: currentPath });
|
||||
const paths = await getPaths(request, { mountPath, secretPath: currentPath }, kvVersion);
|
||||
|
||||
if (paths === null || paths.length === 0) {
|
||||
return [];
|
||||
@@ -105,7 +184,7 @@ const vaultFactory = () => {
|
||||
|
||||
if (path.endsWith("/")) {
|
||||
// it's a folder so we recurse into it
|
||||
const subSecrets = await recursivelyGetAllPaths(request, mountPath, fullItemPath);
|
||||
const subSecrets = await recursivelyGetAllPaths(request, mountPath, kvVersion, fullItemPath);
|
||||
allSecrets.push(...subSecrets);
|
||||
} else {
|
||||
// it's a secret so we add it to our results
|
||||
@@ -119,60 +198,93 @@ const vaultFactory = () => {
|
||||
async function collectVaultData({
|
||||
baseUrl,
|
||||
namespace,
|
||||
accessToken
|
||||
accessToken,
|
||||
gatewayId
|
||||
}: {
|
||||
baseUrl: string;
|
||||
namespace?: string;
|
||||
accessToken: string;
|
||||
gatewayId?: string;
|
||||
}): Promise<VaultData[]> {
|
||||
const request = axios.create({
|
||||
baseURL: baseUrl,
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(namespace ? { "X-Vault-Namespace": namespace } : {})
|
||||
const getData = async (host: string, port?: number, httpsAgent?: https.Agent) => {
|
||||
const allData: VaultData[] = [];
|
||||
|
||||
const request = axios.create({
|
||||
baseURL: port ? `${host}:${port}` : host,
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(namespace ? { "X-Vault-Namespace": namespace } : {})
|
||||
},
|
||||
httpsAgent
|
||||
});
|
||||
|
||||
// Get all mounts in this namespace
|
||||
const mounts = await getMounts(request);
|
||||
|
||||
for (const mount of Object.keys(mounts)) {
|
||||
if (!mount.endsWith("/")) {
|
||||
delete mounts[mount];
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const allData: VaultData[] = [];
|
||||
for await (const [mountPath, mountInfo] of Object.entries(mounts)) {
|
||||
// skip non-KV mounts
|
||||
if (!mountInfo.type.startsWith("kv")) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get all mounts in this namespace
|
||||
const mounts = await getMounts(request);
|
||||
const kvVersion = mountInfo.options?.version === "2" ? KvVersion.V2 : KvVersion.V1;
|
||||
|
||||
for (const mount of Object.keys(mounts)) {
|
||||
if (!mount.endsWith("/")) {
|
||||
delete mounts[mount];
|
||||
// get all paths in this mount
|
||||
const paths = await recursivelyGetAllPaths(request, `${mountPath.replace(/\/$/, "")}`, kvVersion);
|
||||
|
||||
const cleanMountPath = mountPath.replace(/\/$/, "");
|
||||
|
||||
for await (const secretPath of paths) {
|
||||
// get the actual secret data
|
||||
const secretData = await getSecrets(
|
||||
request,
|
||||
{
|
||||
mountPath: cleanMountPath,
|
||||
secretPath: secretPath.replace(`${cleanMountPath}/`, "")
|
||||
},
|
||||
kvVersion
|
||||
);
|
||||
|
||||
allData.push({
|
||||
namespace: namespace || "",
|
||||
mount: mountPath.replace(/\/$/, ""),
|
||||
path: secretPath.replace(`${cleanMountPath}/`, ""),
|
||||
secretData
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return allData;
|
||||
};
|
||||
|
||||
let data;
|
||||
|
||||
if (gatewayId) {
|
||||
const url = new URL(baseUrl);
|
||||
|
||||
const { port, protocol, hostname } = url;
|
||||
const cleanedProtocol = protocol.slice(0, -1);
|
||||
|
||||
data = await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId,
|
||||
targetHost: `${cleanedProtocol}://${hostname}`,
|
||||
targetPort: port ? Number(port) : 8200 // 8200, default port for Vault self-hosted/dedicated
|
||||
},
|
||||
getData
|
||||
);
|
||||
} else {
|
||||
data = await getData(baseUrl);
|
||||
}
|
||||
|
||||
for await (const [mountPath, mountInfo] of Object.entries(mounts)) {
|
||||
// skip non-KV mounts
|
||||
if (!mountInfo.type.startsWith("kv")) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// get all paths in this mount
|
||||
const paths = await recursivelyGetAllPaths(request, `${mountPath.replace(/\/$/, "")}`);
|
||||
|
||||
const cleanMountPath = mountPath.replace(/\/$/, "");
|
||||
|
||||
for await (const secretPath of paths) {
|
||||
// get the actual secret data
|
||||
const secretData = await getSecrets(request, {
|
||||
mountPath: cleanMountPath,
|
||||
secretPath: secretPath.replace(`${cleanMountPath}/`, "")
|
||||
});
|
||||
|
||||
allData.push({
|
||||
namespace: namespace || "",
|
||||
mount: mountPath.replace(/\/$/, ""),
|
||||
path: secretPath.replace(`${cleanMountPath}/`, ""),
|
||||
secretData
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return allData;
|
||||
return data;
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -296,17 +408,126 @@ export const transformToInfisicalFormatNamespaceToProjects = (
|
||||
};
|
||||
};
|
||||
|
||||
export const importVaultDataFn = async ({
|
||||
vaultAccessToken,
|
||||
vaultNamespace,
|
||||
vaultUrl,
|
||||
mappingType
|
||||
}: {
|
||||
vaultAccessToken: string;
|
||||
vaultNamespace?: string;
|
||||
vaultUrl: string;
|
||||
mappingType: VaultMappingType;
|
||||
}) => {
|
||||
export const transformToInfisicalFormatKeyVaultToProjectsCustomC1 = (vaultData: VaultData[]): InfisicalImportData => {
|
||||
const projects: Array<{ name: string; id: string }> = [];
|
||||
const environments: Array<{ name: string; id: string; projectId: string; envParentId?: string }> = [];
|
||||
const folders: Array<{ id: string; name: string; environmentId: string; parentFolderId?: string }> = [];
|
||||
const secrets: Array<{ id: string; name: string; environmentId: string; value: string; folderId?: string }> = [];
|
||||
|
||||
// track created entities to avoid duplicates
|
||||
const projectMap = new Map<string, string>(); // team name -> projectId
|
||||
const environmentMap = new Map<string, string>(); // team-name:envName -> environmentId
|
||||
const folderMap = new Map<string, string>(); // team-name:envName:folderPath -> folderId
|
||||
|
||||
for (const data of vaultData) {
|
||||
const { path, secretData } = data;
|
||||
|
||||
const pathParts = path.split("/").filter(Boolean);
|
||||
if (pathParts.length < 2) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// first level: environment (dev, prod, staging, etc.)
|
||||
const environmentName = pathParts[0];
|
||||
// second level: team name (team1, team2, etc.)
|
||||
const teamName = pathParts[1];
|
||||
// remaining parts: folder structure
|
||||
const folderParts = pathParts.slice(2);
|
||||
|
||||
// create project (team) if if doesn't exist
|
||||
if (!projectMap.has(teamName)) {
|
||||
const projectId = uuidv4();
|
||||
projectMap.set(teamName, projectId);
|
||||
projects.push({
|
||||
name: teamName,
|
||||
id: projectId
|
||||
});
|
||||
}
|
||||
const projectId = projectMap.get(teamName)!;
|
||||
|
||||
// create environment (dev, prod, etc.) for team
|
||||
const envKey = `${teamName}:${environmentName}`;
|
||||
if (!environmentMap.has(envKey)) {
|
||||
const environmentId = uuidv4();
|
||||
environmentMap.set(envKey, environmentId);
|
||||
environments.push({
|
||||
name: environmentName,
|
||||
id: environmentId,
|
||||
projectId
|
||||
});
|
||||
}
|
||||
const environmentId = environmentMap.get(envKey)!;
|
||||
|
||||
// create folder structure for path segments
|
||||
let currentFolderId: string | undefined;
|
||||
let currentPath = "";
|
||||
|
||||
for (const folderName of folderParts) {
|
||||
currentPath = currentPath ? `${currentPath}/${folderName}` : folderName;
|
||||
const folderKey = `${teamName}:${environmentName}:${currentPath}`;
|
||||
|
||||
if (!folderMap.has(folderKey)) {
|
||||
const folderId = uuidv4();
|
||||
folderMap.set(folderKey, folderId);
|
||||
folders.push({
|
||||
id: folderId,
|
||||
name: folderName,
|
||||
environmentId,
|
||||
parentFolderId: currentFolderId || environmentId
|
||||
});
|
||||
currentFolderId = folderId;
|
||||
} else {
|
||||
currentFolderId = folderMap.get(folderKey)!;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(secretData)) {
|
||||
secrets.push({
|
||||
id: uuidv4(),
|
||||
name: key,
|
||||
environmentId,
|
||||
value: String(value),
|
||||
folderId: currentFolderId
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
projects,
|
||||
environments,
|
||||
folders,
|
||||
secrets
|
||||
};
|
||||
};
|
||||
|
||||
// refer to internal doc for more details on which ID's belong to which orgs.
|
||||
// when its a custom migration, then it doesn't matter which mapping type is used (as of now).
|
||||
export const vaultMigrationTransformMappings: Record<
|
||||
string,
|
||||
(vaultData: VaultData[], mappingType: VaultMappingType) => InfisicalImportData
|
||||
> = {
|
||||
"68c57ab3-cea5-41fc-ae38-e156b10c14d2": transformToInfisicalFormatKeyVaultToProjectsCustomC1
|
||||
} as const;
|
||||
|
||||
export const importVaultDataFn = async (
|
||||
{
|
||||
vaultAccessToken,
|
||||
vaultNamespace,
|
||||
vaultUrl,
|
||||
mappingType,
|
||||
gatewayId,
|
||||
orgId
|
||||
}: {
|
||||
vaultAccessToken: string;
|
||||
vaultNamespace?: string;
|
||||
vaultUrl: string;
|
||||
mappingType: VaultMappingType;
|
||||
gatewayId?: string;
|
||||
orgId: string;
|
||||
},
|
||||
{ gatewayService }: { gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId"> }
|
||||
) => {
|
||||
await blockLocalAndPrivateIpAddresses(vaultUrl);
|
||||
|
||||
if (mappingType === VaultMappingType.Namespace && !vaultNamespace) {
|
||||
@@ -315,15 +536,33 @@ export const importVaultDataFn = async ({
|
||||
});
|
||||
}
|
||||
|
||||
const vaultApi = vaultFactory();
|
||||
let transformFn: (vaultData: VaultData[], mappingType: VaultMappingType) => InfisicalImportData;
|
||||
|
||||
if (mappingType === VaultMappingType.Custom) {
|
||||
transformFn = vaultMigrationTransformMappings[orgId];
|
||||
|
||||
if (!transformFn) {
|
||||
throw new BadRequestError({
|
||||
message: "Please contact our sales team to enable custom vault migrations."
|
||||
});
|
||||
}
|
||||
} else {
|
||||
transformFn = transformToInfisicalFormatNamespaceToProjects;
|
||||
}
|
||||
|
||||
logger.info(
|
||||
{ orgId, mappingType },
|
||||
`[importVaultDataFn]: Running ${orgId in vaultMigrationTransformMappings ? "custom" : "default"} transform`
|
||||
);
|
||||
|
||||
const vaultApi = vaultFactory(gatewayService);
|
||||
|
||||
const vaultData = await vaultApi.collectVaultData({
|
||||
accessToken: vaultAccessToken,
|
||||
baseUrl: vaultUrl,
|
||||
namespace: vaultNamespace
|
||||
namespace: vaultNamespace,
|
||||
gatewayId
|
||||
});
|
||||
|
||||
const infisicalData = transformToInfisicalFormatNamespaceToProjects(vaultData, mappingType);
|
||||
|
||||
return infisicalData;
|
||||
return transformFn(vaultData, mappingType);
|
||||
};
|
||||
|
@@ -1,17 +1,30 @@
|
||||
import { OrgMembershipRole } from "@app/db/schemas";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { decryptEnvKeyDataFn, importVaultDataFn, parseEnvKeyDataFn } from "./external-migration-fns";
|
||||
import {
|
||||
decryptEnvKeyDataFn,
|
||||
importVaultDataFn,
|
||||
parseEnvKeyDataFn,
|
||||
vaultMigrationTransformMappings
|
||||
} from "./external-migration-fns";
|
||||
import { TExternalMigrationQueueFactory } from "./external-migration-queue";
|
||||
import { ExternalPlatforms, TImportEnvKeyDataDTO, TImportVaultDataDTO } from "./external-migration-types";
|
||||
import {
|
||||
ExternalMigrationProviders,
|
||||
ExternalPlatforms,
|
||||
THasCustomVaultMigrationDTO,
|
||||
TImportEnvKeyDataDTO,
|
||||
TImportVaultDataDTO
|
||||
} from "./external-migration-types";
|
||||
|
||||
type TExternalMigrationServiceFactoryDep = {
|
||||
permissionService: TPermissionServiceFactory;
|
||||
externalMigrationQueue: TExternalMigrationQueueFactory;
|
||||
userDAL: Pick<TUserDALFactory, "findById">;
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
};
|
||||
|
||||
export type TExternalMigrationServiceFactory = ReturnType<typeof externalMigrationServiceFactory>;
|
||||
@@ -19,7 +32,8 @@ export type TExternalMigrationServiceFactory = ReturnType<typeof externalMigrati
|
||||
export const externalMigrationServiceFactory = ({
|
||||
permissionService,
|
||||
externalMigrationQueue,
|
||||
userDAL
|
||||
userDAL,
|
||||
gatewayService
|
||||
}: TExternalMigrationServiceFactoryDep) => {
|
||||
const importEnvKeyData = async ({
|
||||
decryptionKey,
|
||||
@@ -72,6 +86,7 @@ export const externalMigrationServiceFactory = ({
|
||||
vaultNamespace,
|
||||
mappingType,
|
||||
vaultUrl,
|
||||
gatewayId,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
@@ -91,12 +106,19 @@ export const externalMigrationServiceFactory = ({
|
||||
|
||||
const user = await userDAL.findById(actorId);
|
||||
|
||||
const vaultData = await importVaultDataFn({
|
||||
vaultAccessToken,
|
||||
vaultNamespace,
|
||||
vaultUrl,
|
||||
mappingType
|
||||
});
|
||||
const vaultData = await importVaultDataFn(
|
||||
{
|
||||
vaultAccessToken,
|
||||
vaultNamespace,
|
||||
vaultUrl,
|
||||
mappingType,
|
||||
gatewayId,
|
||||
orgId: actorOrgId
|
||||
},
|
||||
{
|
||||
gatewayService
|
||||
}
|
||||
);
|
||||
|
||||
const stringifiedJson = JSON.stringify({
|
||||
data: vaultData,
|
||||
@@ -117,8 +139,37 @@ export const externalMigrationServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
const hasCustomVaultMigration = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
provider
|
||||
}: THasCustomVaultMigrationDTO) => {
|
||||
const { membership } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
if (membership.role !== OrgMembershipRole.Admin) {
|
||||
throw new ForbiddenRequestError({ message: "Only admins can check custom migration status" });
|
||||
}
|
||||
|
||||
if (provider !== ExternalMigrationProviders.Vault) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid provider. Vault is the only supported provider for custom migrations."
|
||||
});
|
||||
}
|
||||
|
||||
return actorOrgId in vaultMigrationTransformMappings;
|
||||
};
|
||||
|
||||
return {
|
||||
importEnvKeyData,
|
||||
importVaultData
|
||||
importVaultData,
|
||||
hasCustomVaultMigration
|
||||
};
|
||||
};
|
||||
|
@@ -4,7 +4,8 @@ import { ActorAuthMethod, ActorType } from "../auth/auth-type";
|
||||
|
||||
export enum VaultMappingType {
|
||||
Namespace = "namespace",
|
||||
KeyVault = "key-vault"
|
||||
KeyVault = "key-vault",
|
||||
Custom = "custom"
|
||||
}
|
||||
|
||||
export type InfisicalImportData = {
|
||||
@@ -26,11 +27,16 @@ export type TImportEnvKeyDataDTO = {
|
||||
encryptedJson: { nonce: string; data: string };
|
||||
} & Omit<TOrgPermission, "orgId">;
|
||||
|
||||
export type THasCustomVaultMigrationDTO = {
|
||||
provider: ExternalMigrationProviders;
|
||||
} & Omit<TOrgPermission, "orgId">;
|
||||
|
||||
export type TImportVaultDataDTO = {
|
||||
vaultAccessToken: string;
|
||||
vaultNamespace?: string;
|
||||
mappingType: VaultMappingType;
|
||||
vaultUrl: string;
|
||||
gatewayId?: string;
|
||||
} & Omit<TOrgPermission, "orgId">;
|
||||
|
||||
export type TImportInfisicalDataCreate = {
|
||||
@@ -110,3 +116,8 @@ export enum ExternalPlatforms {
|
||||
EnvKey = "EnvKey",
|
||||
Vault = "Vault"
|
||||
}
|
||||
|
||||
export enum ExternalMigrationProviders {
|
||||
Vault = "vault",
|
||||
EnvKey = "env-key"
|
||||
}
|
||||
|
@@ -8,10 +8,18 @@ import {
|
||||
validatePrivilegeChangeOperation
|
||||
} from "@app/ee/services/permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
||||
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
|
||||
import {
|
||||
BadRequestError,
|
||||
NotFoundError,
|
||||
PermissionBoundaryError,
|
||||
RateLimitError,
|
||||
UnauthorizedError
|
||||
} from "@app/lib/errors";
|
||||
import { checkIPAgainstBlocklist, extractIPDetails, isValidIpOrCidr, TIp } from "@app/lib/ip";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||
@@ -22,6 +30,7 @@ import { TIdentityUaClientSecretDALFactory } from "./identity-ua-client-secret-d
|
||||
import { TIdentityUaDALFactory } from "./identity-ua-dal";
|
||||
import {
|
||||
TAttachUaDTO,
|
||||
TClearUaLockoutsDTO,
|
||||
TCreateUaClientSecretDTO,
|
||||
TGetUaClientSecretsDTO,
|
||||
TGetUaDTO,
|
||||
@@ -38,30 +47,33 @@ type TIdentityUaServiceFactoryDep = {
|
||||
identityOrgMembershipDAL: TIdentityOrgDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
keyStore: Pick<
|
||||
TKeyStoreFactory,
|
||||
"setItemWithExpiry" | "getItem" | "deleteItem" | "getKeysByPattern" | "deleteItems" | "acquireLock"
|
||||
>;
|
||||
};
|
||||
|
||||
export type TIdentityUaServiceFactory = ReturnType<typeof identityUaServiceFactory>;
|
||||
|
||||
type LockoutObject = {
|
||||
lockedOut: boolean;
|
||||
failedAttempts: number;
|
||||
};
|
||||
|
||||
export const identityUaServiceFactory = ({
|
||||
identityUaDAL,
|
||||
identityUaClientSecretDAL,
|
||||
identityAccessTokenDAL,
|
||||
identityOrgMembershipDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
keyStore
|
||||
}: TIdentityUaServiceFactoryDep) => {
|
||||
const login = async (clientId: string, clientSecret: string, ip: string) => {
|
||||
const identityUa = await identityUaDAL.findOne({ clientId });
|
||||
if (!identityUa) {
|
||||
throw new NotFoundError({
|
||||
message: "No identity with specified client ID was found"
|
||||
});
|
||||
}
|
||||
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityUa.identityId });
|
||||
if (!identityMembershipOrg) {
|
||||
throw new NotFoundError({
|
||||
message: "No identity with the org membership was found"
|
||||
throw new UnauthorizedError({
|
||||
message: "Invalid credentials"
|
||||
});
|
||||
}
|
||||
|
||||
@@ -69,119 +81,184 @@ export const identityUaServiceFactory = ({
|
||||
ipAddress: ip,
|
||||
trustedIps: identityUa.clientSecretTrustedIps as TIp[]
|
||||
});
|
||||
const clientSecretPrefix = clientSecret.slice(0, 4);
|
||||
const clientSecrtInfo = await identityUaClientSecretDAL.find({
|
||||
identityUAId: identityUa.id,
|
||||
isClientSecretRevoked: false,
|
||||
clientSecretPrefix
|
||||
});
|
||||
|
||||
let validClientSecretInfo: (typeof clientSecrtInfo)[0] | null = null;
|
||||
for await (const info of clientSecrtInfo) {
|
||||
const isMatch = await crypto.hashing().compareHash(clientSecret, info.clientSecretHash);
|
||||
const LOCKOUT_KEY = `lockout:identity:${identityUa.identityId}:${IdentityAuthMethod.UNIVERSAL_AUTH}:${clientId}`;
|
||||
|
||||
if (isMatch) {
|
||||
validClientSecretInfo = info;
|
||||
break;
|
||||
}
|
||||
let lock: Awaited<ReturnType<typeof keyStore.acquireLock>>;
|
||||
try {
|
||||
lock = await keyStore.acquireLock([KeyStorePrefixes.IdentityLockoutLock(LOCKOUT_KEY)], 500, {
|
||||
retryCount: 3,
|
||||
retryDelay: 300,
|
||||
retryJitter: 100
|
||||
});
|
||||
} catch (e) {
|
||||
logger.info(
|
||||
`identity login failed to acquire lock [identityId=${identityUa.identityId}] [authMethod=${IdentityAuthMethod.UNIVERSAL_AUTH}]`
|
||||
);
|
||||
throw new RateLimitError({ message: "Rate limit exceeded" });
|
||||
}
|
||||
|
||||
if (!validClientSecretInfo) throw new UnauthorizedError({ message: "Invalid credentials" });
|
||||
try {
|
||||
const lockoutRaw = await keyStore.getItem(LOCKOUT_KEY);
|
||||
|
||||
const { clientSecretTTL, clientSecretNumUses, clientSecretNumUsesLimit } = validClientSecretInfo;
|
||||
if (Number(clientSecretTTL) > 0) {
|
||||
const clientSecretCreated = new Date(validClientSecretInfo.createdAt);
|
||||
const ttlInMilliseconds = Number(clientSecretTTL) * 1000;
|
||||
const currentDate = new Date();
|
||||
const expirationTime = new Date(clientSecretCreated.getTime() + ttlInMilliseconds);
|
||||
let lockout: LockoutObject | undefined;
|
||||
if (lockoutRaw) {
|
||||
lockout = JSON.parse(lockoutRaw) as LockoutObject;
|
||||
}
|
||||
|
||||
if (currentDate > expirationTime) {
|
||||
if (lockout && lockout.lockedOut) {
|
||||
throw new UnauthorizedError({
|
||||
message: "This identity auth method is temporarily locked, please try again later"
|
||||
});
|
||||
}
|
||||
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityUa.identityId });
|
||||
if (!identityMembershipOrg) {
|
||||
throw new UnauthorizedError({
|
||||
message: "Invalid credentials"
|
||||
});
|
||||
}
|
||||
|
||||
const clientSecretPrefix = clientSecret.slice(0, 4);
|
||||
const clientSecretInfo = await identityUaClientSecretDAL.find({
|
||||
identityUAId: identityUa.id,
|
||||
isClientSecretRevoked: false,
|
||||
clientSecretPrefix
|
||||
});
|
||||
|
||||
let validClientSecretInfo: (typeof clientSecretInfo)[0] | null = null;
|
||||
for await (const info of clientSecretInfo) {
|
||||
const isMatch = await crypto.hashing().compareHash(clientSecret, info.clientSecretHash);
|
||||
|
||||
if (isMatch) {
|
||||
validClientSecretInfo = info;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!validClientSecretInfo) {
|
||||
if (identityUa.lockoutEnabled) {
|
||||
if (!lockout) {
|
||||
lockout = {
|
||||
lockedOut: false,
|
||||
failedAttempts: 0
|
||||
};
|
||||
}
|
||||
|
||||
lockout.failedAttempts += 1;
|
||||
if (lockout.failedAttempts >= identityUa.lockoutThreshold) {
|
||||
lockout.lockedOut = true;
|
||||
}
|
||||
|
||||
await keyStore.setItemWithExpiry(
|
||||
LOCKOUT_KEY,
|
||||
lockout.lockedOut ? identityUa.lockoutDurationSeconds : identityUa.lockoutCounterResetSeconds,
|
||||
JSON.stringify(lockout)
|
||||
);
|
||||
}
|
||||
|
||||
throw new UnauthorizedError({ message: "Invalid credentials" });
|
||||
} else if (lockout) {
|
||||
await keyStore.deleteItem(LOCKOUT_KEY);
|
||||
}
|
||||
|
||||
const { clientSecretTTL, clientSecretNumUses, clientSecretNumUsesLimit } = validClientSecretInfo;
|
||||
if (Number(clientSecretTTL) > 0) {
|
||||
const clientSecretCreated = new Date(validClientSecretInfo.createdAt);
|
||||
const ttlInMilliseconds = Number(clientSecretTTL) * 1000;
|
||||
const currentDate = new Date();
|
||||
const expirationTime = new Date(clientSecretCreated.getTime() + ttlInMilliseconds);
|
||||
|
||||
if (currentDate > expirationTime) {
|
||||
await identityUaClientSecretDAL.updateById(validClientSecretInfo.id, {
|
||||
isClientSecretRevoked: true
|
||||
});
|
||||
|
||||
throw new UnauthorizedError({
|
||||
message: "Access denied due to expired client secret"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (clientSecretNumUsesLimit > 0 && clientSecretNumUses >= clientSecretNumUsesLimit) {
|
||||
// number of times client secret can be used for
|
||||
// a login operation reached
|
||||
await identityUaClientSecretDAL.updateById(validClientSecretInfo.id, {
|
||||
isClientSecretRevoked: true
|
||||
});
|
||||
|
||||
throw new UnauthorizedError({
|
||||
message: "Access denied due to expired client secret"
|
||||
message: "Access denied due to client secret usage limit reached"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (clientSecretNumUsesLimit > 0 && clientSecretNumUses === clientSecretNumUsesLimit) {
|
||||
// number of times client secret can be used for
|
||||
// a login operation reached
|
||||
await identityUaClientSecretDAL.updateById(validClientSecretInfo.id, {
|
||||
isClientSecretRevoked: true
|
||||
const accessTokenTTLParams =
|
||||
Number(identityUa.accessTokenPeriod) === 0
|
||||
? {
|
||||
accessTokenTTL: identityUa.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityUa.accessTokenMaxTTL
|
||||
}
|
||||
: {
|
||||
accessTokenTTL: identityUa.accessTokenPeriod,
|
||||
// We set a very large Max TTL for periodic tokens to ensure that clients (even outdated ones) can always renew their token
|
||||
// without them having to update their SDKs, CLIs, etc. This workaround sets it to 30 years to emulate "forever"
|
||||
accessTokenMaxTTL: 1000000000
|
||||
};
|
||||
|
||||
const identityAccessToken = await identityUaDAL.transaction(async (tx) => {
|
||||
const uaClientSecretDoc = await identityUaClientSecretDAL.incrementUsage(validClientSecretInfo!.id, tx);
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.UNIVERSAL_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityUa.identityId,
|
||||
isAccessTokenRevoked: false,
|
||||
identityUAClientSecretId: uaClientSecretDoc.id,
|
||||
accessTokenNumUses: 0,
|
||||
accessTokenNumUsesLimit: identityUa.accessTokenNumUsesLimit,
|
||||
accessTokenPeriod: identityUa.accessTokenPeriod,
|
||||
authMethod: IdentityAuthMethod.UNIVERSAL_AUTH,
|
||||
...accessTokenTTLParams
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return newToken;
|
||||
});
|
||||
throw new UnauthorizedError({
|
||||
message: "Access denied due to client secret usage limit reached"
|
||||
});
|
||||
}
|
||||
|
||||
const accessTokenTTLParams =
|
||||
Number(identityUa.accessTokenPeriod) === 0
|
||||
? {
|
||||
accessTokenTTL: identityUa.accessTokenTTL,
|
||||
accessTokenMaxTTL: identityUa.accessTokenMaxTTL
|
||||
}
|
||||
: {
|
||||
accessTokenTTL: identityUa.accessTokenPeriod,
|
||||
// We set a very large Max TTL for periodic tokens to ensure that clients (even outdated ones) can always renew their token
|
||||
// without them having to update their SDKs, CLIs, etc. This workaround sets it to 30 years to emulate "forever"
|
||||
accessTokenMaxTTL: 1000000000
|
||||
};
|
||||
|
||||
const identityAccessToken = await identityUaDAL.transaction(async (tx) => {
|
||||
const uaClientSecretDoc = await identityUaClientSecretDAL.incrementUsage(validClientSecretInfo!.id, tx);
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.UNIVERSAL_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
const appCfg = getConfig();
|
||||
const accessToken = crypto.jwt().sign(
|
||||
{
|
||||
identityId: identityUa.identityId,
|
||||
isAccessTokenRevoked: false,
|
||||
identityUAClientSecretId: uaClientSecretDoc.id,
|
||||
accessTokenNumUses: 0,
|
||||
accessTokenNumUsesLimit: identityUa.accessTokenNumUsesLimit,
|
||||
accessTokenPeriod: identityUa.accessTokenPeriod,
|
||||
authMethod: IdentityAuthMethod.UNIVERSAL_AUTH,
|
||||
...accessTokenTTLParams
|
||||
},
|
||||
tx
|
||||
clientSecretId: validClientSecretInfo.id,
|
||||
identityAccessTokenId: identityAccessToken.id,
|
||||
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
|
||||
} as TIdentityAccessTokenJwtPayload,
|
||||
appCfg.AUTH_SECRET,
|
||||
// akhilmhdh: for non-expiry tokens you should not even set the value, including undefined. Even for undefined jsonwebtoken throws error
|
||||
Number(identityAccessToken.accessTokenTTL) === 0
|
||||
? undefined
|
||||
: {
|
||||
expiresIn: Number(identityAccessToken.accessTokenTTL)
|
||||
}
|
||||
);
|
||||
|
||||
return newToken;
|
||||
});
|
||||
|
||||
const appCfg = getConfig();
|
||||
const accessToken = crypto.jwt().sign(
|
||||
{
|
||||
identityId: identityUa.identityId,
|
||||
clientSecretId: validClientSecretInfo.id,
|
||||
identityAccessTokenId: identityAccessToken.id,
|
||||
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
|
||||
} as TIdentityAccessTokenJwtPayload,
|
||||
appCfg.AUTH_SECRET,
|
||||
// akhilmhdh: for non-expiry tokens you should not even set the value, including undefined. Even for undefined jsonwebtoken throws error
|
||||
Number(identityAccessToken.accessTokenTTL) === 0
|
||||
? undefined
|
||||
: {
|
||||
expiresIn: Number(identityAccessToken.accessTokenTTL)
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
accessToken,
|
||||
identityUa,
|
||||
validClientSecretInfo,
|
||||
identityAccessToken,
|
||||
identityMembershipOrg,
|
||||
...accessTokenTTLParams
|
||||
};
|
||||
return {
|
||||
accessToken,
|
||||
identityUa,
|
||||
validClientSecretInfo,
|
||||
identityAccessToken,
|
||||
identityMembershipOrg,
|
||||
...accessTokenTTLParams
|
||||
};
|
||||
} finally {
|
||||
await lock.release();
|
||||
}
|
||||
};
|
||||
|
||||
const attachUniversalAuth = async ({
|
||||
@@ -196,7 +273,11 @@ export const identityUaServiceFactory = ({
|
||||
actor,
|
||||
actorOrgId,
|
||||
isActorSuperAdmin,
|
||||
accessTokenPeriod
|
||||
accessTokenPeriod,
|
||||
lockoutEnabled,
|
||||
lockoutThreshold,
|
||||
lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds
|
||||
}: TAttachUaDTO) => {
|
||||
await validateIdentityUpdateForSuperAdminPrivileges(identityId, isActorSuperAdmin);
|
||||
|
||||
@@ -266,7 +347,11 @@ export const identityUaServiceFactory = ({
|
||||
accessTokenTTL,
|
||||
accessTokenNumUsesLimit,
|
||||
accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps),
|
||||
accessTokenPeriod
|
||||
accessTokenPeriod,
|
||||
lockoutEnabled,
|
||||
lockoutThreshold,
|
||||
lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -286,7 +371,11 @@ export const identityUaServiceFactory = ({
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
actorOrgId,
|
||||
lockoutEnabled,
|
||||
lockoutThreshold,
|
||||
lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds
|
||||
}: TUpdateUaDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
@@ -362,7 +451,11 @@ export const identityUaServiceFactory = ({
|
||||
accessTokenPeriod,
|
||||
accessTokenTrustedIps: reformattedAccessTokenTrustedIps
|
||||
? JSON.stringify(reformattedAccessTokenTrustedIps)
|
||||
: undefined
|
||||
: undefined,
|
||||
lockoutEnabled,
|
||||
lockoutThreshold,
|
||||
lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds
|
||||
});
|
||||
return { ...updatedUaAuth, orgId: identityMembershipOrg.orgId };
|
||||
};
|
||||
@@ -713,6 +806,38 @@ export const identityUaServiceFactory = ({
|
||||
return { ...updatedClientSecret, identityId, orgId: identityMembershipOrg.orgId };
|
||||
};
|
||||
|
||||
const clearUniversalAuthLockouts = async ({
|
||||
identityId,
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}: TClearUaLockoutsDTO) => {
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have universal auth"
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
identityMembershipOrg.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
|
||||
|
||||
const deleted = await keyStore.deleteItems({
|
||||
pattern: `lockout:identity:${identityId}:${IdentityAuthMethod.UNIVERSAL_AUTH}:*`
|
||||
});
|
||||
|
||||
return { deleted, identityId, orgId: identityMembershipOrg.orgId };
|
||||
};
|
||||
|
||||
return {
|
||||
login,
|
||||
attachUniversalAuth,
|
||||
@@ -722,6 +847,7 @@ export const identityUaServiceFactory = ({
|
||||
createUniversalAuthClientSecret,
|
||||
getUniversalAuthClientSecrets,
|
||||
revokeUniversalAuthClientSecret,
|
||||
getUniversalAuthClientSecretById
|
||||
getUniversalAuthClientSecretById,
|
||||
clearUniversalAuthLockouts
|
||||
};
|
||||
};
|
||||
|
@@ -9,6 +9,10 @@ export type TAttachUaDTO = {
|
||||
clientSecretTrustedIps: { ipAddress: string }[];
|
||||
accessTokenTrustedIps: { ipAddress: string }[];
|
||||
isActorSuperAdmin?: boolean;
|
||||
lockoutEnabled: boolean;
|
||||
lockoutThreshold: number;
|
||||
lockoutDurationSeconds: number;
|
||||
lockoutCounterResetSeconds: number;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateUaDTO = {
|
||||
@@ -19,6 +23,10 @@ export type TUpdateUaDTO = {
|
||||
accessTokenPeriod?: number;
|
||||
clientSecretTrustedIps?: { ipAddress: string }[];
|
||||
accessTokenTrustedIps?: { ipAddress: string }[];
|
||||
lockoutEnabled?: boolean;
|
||||
lockoutThreshold?: number;
|
||||
lockoutDurationSeconds?: number;
|
||||
lockoutCounterResetSeconds?: number;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetUaDTO = {
|
||||
@@ -45,6 +53,10 @@ export type TRevokeUaClientSecretDTO = {
|
||||
clientSecretId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TClearUaLockoutsDTO = {
|
||||
identityId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetUniversalAuthClientSecretByIdDTO = {
|
||||
identityId: string;
|
||||
clientSecretId: string;
|
||||
|
@@ -8,6 +8,7 @@ import {
|
||||
validatePrivilegeChangeOperation
|
||||
} from "@app/ee/services/permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
|
||||
@@ -32,6 +33,7 @@ type TIdentityServiceFactoryDep = {
|
||||
identityProjectDAL: Pick<TIdentityProjectDALFactory, "findByIdentityId">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getOrgPermissionByRole">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
keyStore: Pick<TKeyStoreFactory, "getKeysByPattern">;
|
||||
};
|
||||
|
||||
export type TIdentityServiceFactory = ReturnType<typeof identityServiceFactory>;
|
||||
@@ -42,7 +44,8 @@ export const identityServiceFactory = ({
|
||||
identityOrgMembershipDAL,
|
||||
identityProjectDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
keyStore
|
||||
}: TIdentityServiceFactoryDep) => {
|
||||
const createIdentity = async ({
|
||||
name,
|
||||
@@ -255,7 +258,20 @@ export const identityServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
|
||||
|
||||
return identity;
|
||||
const activeLockouts = await keyStore.getKeysByPattern(`lockout:identity:${id}:*`);
|
||||
|
||||
const activeLockoutAuthMethods = new Set<string>();
|
||||
activeLockouts.forEach((key) => {
|
||||
const parts = key.split(":");
|
||||
if (parts.length > 3) {
|
||||
activeLockoutAuthMethods.add(parts[3]);
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
...identity,
|
||||
identity: { ...identity.identity, activeLockoutAuthMethods: Array.from(activeLockoutAuthMethods) }
|
||||
};
|
||||
};
|
||||
|
||||
const deleteIdentity = async ({
|
||||
|
@@ -153,10 +153,64 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findOrgMembershipsWithUsersByOrgId = async (orgId: string) => {
|
||||
try {
|
||||
const members = await db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin<TUserEncryptionKeys>(
|
||||
TableName.UserEncryptionKey,
|
||||
`${TableName.UserEncryptionKey}.userId`,
|
||||
`${TableName.Users}.id`
|
||||
)
|
||||
.leftJoin(TableName.IdentityMetadata, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.OrgMembership}.userId`, `${TableName.IdentityMetadata}.userId`)
|
||||
.andOn(`${TableName.OrgMembership}.orgId`, `${TableName.IdentityMetadata}.orgId`);
|
||||
})
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.OrgMembership),
|
||||
db.ref("inviteEmail").withSchema(TableName.OrgMembership),
|
||||
db.ref("orgId").withSchema(TableName.OrgMembership),
|
||||
db.ref("role").withSchema(TableName.OrgMembership),
|
||||
db.ref("roleId").withSchema(TableName.OrgMembership),
|
||||
db.ref("status").withSchema(TableName.OrgMembership),
|
||||
db.ref("isActive").withSchema(TableName.OrgMembership),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
db.ref("isEmailVerified").withSchema(TableName.Users),
|
||||
db.ref("id").withSchema(TableName.Users).as("userId")
|
||||
)
|
||||
.where({ isGhost: false });
|
||||
|
||||
return members.map((member) => ({
|
||||
id: member.id,
|
||||
orgId: member.orgId,
|
||||
role: member.role,
|
||||
status: member.status,
|
||||
isActive: member.isActive,
|
||||
inviteEmail: member.inviteEmail,
|
||||
user: {
|
||||
id: member.userId,
|
||||
email: member.email,
|
||||
username: member.username,
|
||||
firstName: member.firstName,
|
||||
lastName: member.lastName
|
||||
}
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find org memberships with users by org id" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...orgMembershipOrm,
|
||||
findOrgMembershipById,
|
||||
findRecentInvitedMemberships,
|
||||
updateLastInvitedAtByIds
|
||||
updateLastInvitedAtByIds,
|
||||
findOrgMembershipsWithUsersByOrgId
|
||||
};
|
||||
};
|
||||
|
@@ -18,7 +18,8 @@ export const sanitizedPkiSubscriber = PkiSubscribersSchema.pick({
|
||||
lastOperationAt: true,
|
||||
enableAutoRenewal: true,
|
||||
autoRenewalPeriodInDays: true,
|
||||
lastAutoRenewAt: true
|
||||
lastAutoRenewAt: true,
|
||||
properties: true
|
||||
}).extend({
|
||||
supportsImmediateCertIssuance: z.boolean().optional()
|
||||
});
|
||||
|
@@ -109,6 +109,7 @@ export const pkiSubscriberServiceFactory = ({
|
||||
extendedKeyUsages,
|
||||
enableAutoRenewal,
|
||||
autoRenewalPeriodInDays,
|
||||
properties,
|
||||
projectId,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
@@ -157,7 +158,8 @@ export const pkiSubscriberServiceFactory = ({
|
||||
keyUsages,
|
||||
extendedKeyUsages,
|
||||
enableAutoRenewal,
|
||||
autoRenewalPeriodInDays
|
||||
autoRenewalPeriodInDays,
|
||||
properties
|
||||
});
|
||||
|
||||
return newSubscriber;
|
||||
@@ -221,6 +223,7 @@ export const pkiSubscriberServiceFactory = ({
|
||||
extendedKeyUsages,
|
||||
enableAutoRenewal,
|
||||
autoRenewalPeriodInDays,
|
||||
properties,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
@@ -275,7 +278,8 @@ export const pkiSubscriberServiceFactory = ({
|
||||
keyUsages,
|
||||
extendedKeyUsages,
|
||||
enableAutoRenewal,
|
||||
autoRenewalPeriodInDays
|
||||
autoRenewalPeriodInDays,
|
||||
properties
|
||||
});
|
||||
|
||||
return updatedSubscriber;
|
||||
@@ -360,7 +364,7 @@ export const pkiSubscriberServiceFactory = ({
|
||||
throw new BadRequestError({ message: "CA is disabled" });
|
||||
}
|
||||
|
||||
if (ca.externalCa?.id && ca.externalCa.type === CaType.ACME) {
|
||||
if (ca.externalCa?.id && (ca.externalCa.type === CaType.ACME || ca.externalCa.type === CaType.AZURE_AD_CS)) {
|
||||
await certificateAuthorityQueue.orderCertificateForSubscriber({
|
||||
subscriberId: subscriber.id,
|
||||
caType: ca.externalCa.type
|
||||
|
@@ -18,6 +18,7 @@ export type TCreatePkiSubscriberDTO = {
|
||||
extendedKeyUsages: CertExtendedKeyUsage[];
|
||||
enableAutoRenewal?: boolean;
|
||||
autoRenewalPeriodInDays?: number;
|
||||
properties?: TPkiSubscriberProperties;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetPkiSubscriberDTO = {
|
||||
@@ -36,6 +37,7 @@ export type TUpdatePkiSubscriberDTO = {
|
||||
extendedKeyUsages?: CertExtendedKeyUsage[];
|
||||
enableAutoRenewal?: boolean;
|
||||
autoRenewalPeriodInDays?: number;
|
||||
properties?: TPkiSubscriberProperties;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TDeletePkiSubscriberDTO = {
|
||||
@@ -69,3 +71,13 @@ export enum SubscriberOperationStatus {
|
||||
SUCCESS = "success",
|
||||
FAILED = "failed"
|
||||
}
|
||||
|
||||
export type TPkiSubscriberProperties = {
|
||||
azureTemplateType?: string;
|
||||
organization?: string;
|
||||
organizationalUnit?: string;
|
||||
country?: string;
|
||||
state?: string;
|
||||
locality?: string;
|
||||
emailAddress?: string;
|
||||
};
|
||||
|
@@ -30,6 +30,7 @@ import {
|
||||
TDeleteFolderDTO,
|
||||
TDeleteManyFoldersDTO,
|
||||
TGetFolderByIdDTO,
|
||||
TGetFolderByPathDTO,
|
||||
TGetFolderDTO,
|
||||
TGetFoldersDeepByEnvsDTO,
|
||||
TUpdateFolderDTO,
|
||||
@@ -1398,6 +1399,31 @@ export const secretFolderServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
const getFolderByPath = async (
|
||||
{ projectId, environment, secretPath }: TGetFolderByPathDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
// folder check is allowed to be read by anyone
|
||||
// permission is to check if user has access
|
||||
await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
projectId,
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
message: `Could not find folder with path "${secretPath}" in environment "${environment}" for project with ID "${projectId}"`
|
||||
});
|
||||
|
||||
return folder;
|
||||
};
|
||||
|
||||
return {
|
||||
createFolder,
|
||||
updateFolder,
|
||||
@@ -1405,6 +1431,7 @@ export const secretFolderServiceFactory = ({
|
||||
deleteFolder,
|
||||
getFolders,
|
||||
getFolderById,
|
||||
getFolderByPath,
|
||||
getProjectFolderCount,
|
||||
getFoldersMultiEnv,
|
||||
getFoldersDeepByEnvs,
|
||||
|
@@ -91,3 +91,9 @@ export type TDeleteManyFoldersDTO = {
|
||||
idOrName: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
export type TGetFolderByPathDTO = {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
};
|
||||
|
@@ -1,9 +1,13 @@
|
||||
import { isAxiosError } from "axios";
|
||||
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { getHCVaultAccessToken, getHCVaultInstanceUrl } from "@app/services/app-connection/hc-vault";
|
||||
import {
|
||||
getHCVaultAccessToken,
|
||||
getHCVaultInstanceUrl,
|
||||
requestWithHCVaultGateway,
|
||||
THCVaultConnection
|
||||
} from "@app/services/app-connection/hc-vault";
|
||||
import {
|
||||
THCVaultListVariables,
|
||||
THCVaultListVariablesResponse,
|
||||
@@ -14,19 +18,20 @@ import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
|
||||
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
|
||||
import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
const listHCVaultVariables = async ({ instanceUrl, namespace, mount, accessToken, path }: THCVaultListVariables) => {
|
||||
await blockLocalAndPrivateIpAddresses(instanceUrl);
|
||||
|
||||
const listHCVaultVariables = async (
|
||||
{ instanceUrl, namespace, mount, accessToken, path }: THCVaultListVariables,
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
try {
|
||||
const { data } = await request.get<THCVaultListVariablesResponse>(
|
||||
`${instanceUrl}/v1/${removeTrailingSlash(mount)}/data/${path}`,
|
||||
{
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(namespace ? { "X-Vault-Namespace": namespace } : {})
|
||||
}
|
||||
const { data } = await requestWithHCVaultGateway<THCVaultListVariablesResponse>(connection, gatewayService, {
|
||||
url: `${instanceUrl}/v1/${removeTrailingSlash(mount)}/data/${path}`,
|
||||
method: "GET",
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(namespace ? { "X-Vault-Namespace": namespace } : {})
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
return data.data.data;
|
||||
} catch (error: unknown) {
|
||||
@@ -39,33 +44,29 @@ const listHCVaultVariables = async ({ instanceUrl, namespace, mount, accessToken
|
||||
};
|
||||
|
||||
// Hashicorp Vault updates all variables in one batch. This is to respect their versioning
|
||||
const updateHCVaultVariables = async ({
|
||||
path,
|
||||
instanceUrl,
|
||||
namespace,
|
||||
accessToken,
|
||||
mount,
|
||||
data
|
||||
}: TPostHCVaultVariable) => {
|
||||
await blockLocalAndPrivateIpAddresses(instanceUrl);
|
||||
|
||||
return request.post(
|
||||
`${instanceUrl}/v1/${removeTrailingSlash(mount)}/data/${path}`,
|
||||
{
|
||||
data
|
||||
const updateHCVaultVariables = async (
|
||||
{ path, instanceUrl, namespace, accessToken, mount, data }: TPostHCVaultVariable,
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
return requestWithHCVaultGateway(connection, gatewayService, {
|
||||
url: `${instanceUrl}/v1/${removeTrailingSlash(mount)}/data/${path}`,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(namespace ? { "X-Vault-Namespace": namespace } : {}),
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(namespace ? { "X-Vault-Namespace": namespace } : {}),
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
data: { data }
|
||||
});
|
||||
};
|
||||
|
||||
export const HCVaultSyncFns = {
|
||||
syncSecrets: async (secretSync: THCVaultSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
syncSecrets: async (
|
||||
secretSync: THCVaultSyncWithCredentials,
|
||||
secretMap: TSecretMap,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const {
|
||||
connection,
|
||||
environment,
|
||||
@@ -74,16 +75,20 @@ export const HCVaultSyncFns = {
|
||||
} = secretSync;
|
||||
|
||||
const { namespace } = connection.credentials;
|
||||
const accessToken = await getHCVaultAccessToken(connection);
|
||||
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
|
||||
const instanceUrl = await getHCVaultInstanceUrl(connection);
|
||||
|
||||
const variables = await listHCVaultVariables({
|
||||
instanceUrl,
|
||||
accessToken,
|
||||
namespace,
|
||||
mount,
|
||||
path
|
||||
});
|
||||
const variables = await listHCVaultVariables(
|
||||
{
|
||||
instanceUrl,
|
||||
accessToken,
|
||||
namespace,
|
||||
mount,
|
||||
path
|
||||
},
|
||||
connection,
|
||||
gatewayService
|
||||
);
|
||||
let tainted = false;
|
||||
|
||||
for (const entry of Object.entries(secretMap)) {
|
||||
@@ -110,24 +115,36 @@ export const HCVaultSyncFns = {
|
||||
if (!tainted) return;
|
||||
|
||||
try {
|
||||
await updateHCVaultVariables({ accessToken, instanceUrl, namespace, mount, path, data: variables });
|
||||
await updateHCVaultVariables(
|
||||
{ accessToken, instanceUrl, namespace, mount, path, data: variables },
|
||||
connection,
|
||||
gatewayService
|
||||
);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error
|
||||
});
|
||||
}
|
||||
},
|
||||
removeSecrets: async (secretSync: THCVaultSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
removeSecrets: async (
|
||||
secretSync: THCVaultSyncWithCredentials,
|
||||
secretMap: TSecretMap,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const {
|
||||
connection,
|
||||
destinationConfig: { mount, path }
|
||||
} = secretSync;
|
||||
|
||||
const { namespace } = connection.credentials;
|
||||
const accessToken = await getHCVaultAccessToken(connection);
|
||||
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
|
||||
const instanceUrl = await getHCVaultInstanceUrl(connection);
|
||||
|
||||
const variables = await listHCVaultVariables({ instanceUrl, namespace, accessToken, mount, path });
|
||||
const variables = await listHCVaultVariables(
|
||||
{ instanceUrl, namespace, accessToken, mount, path },
|
||||
connection,
|
||||
gatewayService
|
||||
);
|
||||
|
||||
for await (const [key] of Object.entries(variables)) {
|
||||
if (key in secretMap) {
|
||||
@@ -136,30 +153,41 @@ export const HCVaultSyncFns = {
|
||||
}
|
||||
|
||||
try {
|
||||
await updateHCVaultVariables({ accessToken, instanceUrl, namespace, mount, path, data: variables });
|
||||
await updateHCVaultVariables(
|
||||
{ accessToken, instanceUrl, namespace, mount, path, data: variables },
|
||||
connection,
|
||||
gatewayService
|
||||
);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error
|
||||
});
|
||||
}
|
||||
},
|
||||
getSecrets: async (secretSync: THCVaultSyncWithCredentials) => {
|
||||
getSecrets: async (
|
||||
secretSync: THCVaultSyncWithCredentials,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const {
|
||||
connection,
|
||||
destinationConfig: { mount, path }
|
||||
} = secretSync;
|
||||
|
||||
const { namespace } = connection.credentials;
|
||||
const accessToken = await getHCVaultAccessToken(connection);
|
||||
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
|
||||
const instanceUrl = await getHCVaultInstanceUrl(connection);
|
||||
|
||||
const variables = await listHCVaultVariables({
|
||||
instanceUrl,
|
||||
namespace,
|
||||
accessToken,
|
||||
mount,
|
||||
path
|
||||
});
|
||||
const variables = await listHCVaultVariables(
|
||||
{
|
||||
instanceUrl,
|
||||
namespace,
|
||||
accessToken,
|
||||
mount,
|
||||
path
|
||||
},
|
||||
connection,
|
||||
gatewayService
|
||||
);
|
||||
|
||||
return Object.fromEntries(Object.entries(variables).map(([key, value]) => [key, { value }]));
|
||||
}
|
||||
|
@@ -244,7 +244,7 @@ export const SecretSyncFns = {
|
||||
case SecretSync.Windmill:
|
||||
return WindmillSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.HCVault:
|
||||
return HCVaultSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
return HCVaultSyncFns.syncSecrets(secretSync, schemaSecretMap, gatewayService);
|
||||
case SecretSync.TeamCity:
|
||||
return TeamCitySyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.OCIVault:
|
||||
@@ -283,7 +283,7 @@ export const SecretSyncFns = {
|
||||
},
|
||||
getSecrets: async (
|
||||
secretSync: TSecretSyncWithCredentials,
|
||||
{ kmsService, appConnectionDAL }: TSyncSecretDeps
|
||||
{ kmsService, appConnectionDAL, gatewayService }: TSyncSecretDeps
|
||||
): Promise<TSecretMap> => {
|
||||
let secretMap: TSecretMap;
|
||||
switch (secretSync.destination) {
|
||||
@@ -341,7 +341,7 @@ export const SecretSyncFns = {
|
||||
secretMap = await WindmillSyncFns.getSecrets(secretSync);
|
||||
break;
|
||||
case SecretSync.HCVault:
|
||||
secretMap = await HCVaultSyncFns.getSecrets(secretSync);
|
||||
secretMap = await HCVaultSyncFns.getSecrets(secretSync, gatewayService);
|
||||
break;
|
||||
case SecretSync.TeamCity:
|
||||
secretMap = await TeamCitySyncFns.getSecrets(secretSync);
|
||||
@@ -451,7 +451,7 @@ export const SecretSyncFns = {
|
||||
case SecretSync.Windmill:
|
||||
return WindmillSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.HCVault:
|
||||
return HCVaultSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
return HCVaultSyncFns.removeSecrets(secretSync, schemaSecretMap, gatewayService);
|
||||
case SecretSync.TeamCity:
|
||||
return TeamCitySyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.OCIVault:
|
||||
|
40
docs/.eslintrc.js
Normal file
40
docs/.eslintrc.js
Normal file
@@ -0,0 +1,40 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
browser: true,
|
||||
es2021: true,
|
||||
node: true,
|
||||
},
|
||||
extends: [
|
||||
'eslint:recommended',
|
||||
'plugin:react/recommended',
|
||||
'plugin:react/jsx-runtime',
|
||||
],
|
||||
parser: '@babel/eslint-parser',
|
||||
parserOptions: {
|
||||
ecmaVersion: 2021,
|
||||
sourceType: 'module',
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
requireConfigFile: false,
|
||||
babelOptions: {
|
||||
presets: ['@babel/preset-react'],
|
||||
},
|
||||
},
|
||||
plugins: ['react'],
|
||||
rules: {
|
||||
'react/jsx-uses-react': 'error',
|
||||
'react/jsx-uses-vars': 'error',
|
||||
},
|
||||
settings: {
|
||||
react: {
|
||||
version: 'detect',
|
||||
},
|
||||
},
|
||||
ignorePatterns: [
|
||||
'node_modules/',
|
||||
'dist/',
|
||||
'build/',
|
||||
'*.config.js',
|
||||
],
|
||||
};
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Available"
|
||||
openapi: "GET /api/v1/app-connections/azure-adcs/available"
|
||||
---
|
@@ -0,0 +1,10 @@
|
||||
---
|
||||
title: "Create"
|
||||
openapi: "POST /api/v1/app-connections/azure-adcs"
|
||||
---
|
||||
|
||||
<Note>
|
||||
Azure ADCS Connections must be created through the Infisical UI.
|
||||
Check out the configuration docs for [Azure ADCS Connections](/integrations/app-connections/azure-adcs) for a step-by-step
|
||||
guide.
|
||||
</Note>
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Delete"
|
||||
openapi: "DELETE /api/v1/app-connections/azure-adcs/{connectionId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by ID"
|
||||
openapi: "GET /api/v1/app-connections/azure-adcs/{connectionId}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get by Name"
|
||||
openapi: "GET /api/v1/app-connections/azure-adcs/connection-name/{connectionName}"
|
||||
---
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "List"
|
||||
openapi: "GET /api/v1/app-connections/azure-adcs"
|
||||
---
|
@@ -0,0 +1,10 @@
|
||||
---
|
||||
title: "Update"
|
||||
openapi: "PATCH /api/v1/app-connections/azure-adcs/{connectionId}"
|
||||
---
|
||||
|
||||
<Note>
|
||||
Azure ADCS Connections must be updated through the Infisical UI.
|
||||
Check out the configuration docs for [Azure ADCS Connections](/integrations/app-connections/azure-adcs) for a step-by-step
|
||||
guide.
|
||||
</Note>
|
@@ -26,7 +26,7 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
- Revamped UI for Access Controls, Access Tree, Policies, and Approval Workflows.
|
||||
- Released [TLS Certificate Authentication method](https://infisical.com/docs/documentation/platform/identities/tls-cert-auth).
|
||||
- Added ability to copy session tokens in the Infisical Dashboard.
|
||||
- Expanded resource support for [Infisical Terraform Provider](https://infisical.com/docs/integrations/frameworks/terraform).
|
||||
- Expanded resource support for [Infisical Terraform Provider](https://registry.terraform.io/providers/Infisical/infisical/latest/docs).
|
||||
|
||||
|
||||
## May 2025
|
||||
@@ -62,7 +62,7 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
## March 2025
|
||||
|
||||
- Released [Infisical Gateway](https://infisical.com/docs/documentation/platform/gateways/overview) for secure access to private resources without needing direct inbound connections to private networks.
|
||||
- Enhanced [Terraform](https://infisical.com/docs/integrations/frameworks/terraform#terraform) capabilities with token authentication, ability to import existing Infisical secrets as resources, and support for project templates.
|
||||
- Enhanced [Terraform](https://registry.terraform.io/providers/Infisical/infisical/latest/docs) capabilities with token authentication, ability to import existing Infisical secrets as resources, and support for project templates.
|
||||
- Self-hosted improvements: Usage and billing visibility for enabled features, ability to delete users, and support for multiple super admins.
|
||||
- UI and UX updates: Improved secret import interface on the overview page, password reset without backup PDF.
|
||||
- CLI enhancements: Various improvements including multiline secret support and ability to pass headers.
|
||||
@@ -93,7 +93,7 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
- Added support for OIDC group mapping in [Keycloak](https://infisical.com/docs/documentation/platform/sso/keycloak-oidc/overview), enabling automatic mapping of Keycloak groups to Infisical for role-based access control.
|
||||
- Enhanced [Kubernetes operator](https://infisical.com/docs/integrations/platforms/kubernetes/overview#kubernetes-operator) with namespaced group support, bi-directional secret sync (push to Infisical), [dynamic secrets](https://infisical.com/docs/documentation/platform/dynamic-secrets/overview#dynamic-secrets) capabilities, and support for multiple operator instances.
|
||||
- Restructured navigation with dedicated sections for Secrets Management, [Certificate Management (PKI)](https://infisical.com/docs/documentation/platform/pki/overview), [Key Management (KMS)](https://infisical.com/docs/documentation/platform/kms/overview#key-management-service-kms), and [SSH Key Management](https://infisical.com/docs/documentation/platform/ssh).
|
||||
- Added [ephemeral Terraform resource](https://infisical.com/docs/integrations/frameworks/terraform#terraform-provider) support and improved secret sync architecture.
|
||||
- Added [ephemeral Terraform resource](https://registry.terraform.io/providers/Infisical/infisical/latest/docs) support and improved secret sync architecture.
|
||||
- Released [.NET provider](https://github.com/Infisical/infisical-dotnet-configuration) with first-party Azure authentication support and Azure CLI integration.
|
||||
- Implemented secret Access Visibility allowing users to view all entities with access to specific secrets in the secret side panel.
|
||||
- Added secret filtering by metadata and SSH assigned certificates (Version 1).
|
||||
@@ -212,7 +212,7 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
- Completed Postgres migration initiative with restructed Fastify-based backend.
|
||||
- Reduced size of Infisical Node.js SDK by ≈90%.
|
||||
- Added secret fallback support to all SDK's.
|
||||
- Added Machine Identity support to [Terraform Provider](https://github.com/Infisical/terraform-provider-infisical).
|
||||
- Added Machine Identity support to [Terraform Provider](https://registry.terraform.io/providers/Infisical/infisical/latest/docs).
|
||||
- Released [.NET SDK](https://infisical.com/docs/sdks/languages/csharp).
|
||||
- Added symmetric encryption support to all SDK's.
|
||||
- Fixed secret reminders bug, where reminders were not being updated correctly.
|
||||
@@ -276,7 +276,7 @@ The changelog below reflects new product developments and updates on a monthly b
|
||||
|
||||
## June 2023
|
||||
|
||||
- Released the [Terraform Provider](https://infisical.com/docs/integrations/frameworks/terraform#5-run-terraform).
|
||||
- Released the [Terraform Provider](https://registry.terraform.io/providers/Infisical/infisical/latest/docs).
|
||||
- Updated the usage and billing page. Added the free trial for the professional tier.
|
||||
- Added native integrations with [Checkly](https://infisical.com/docs/integrations/cloud/checkly), [Hashicorp Vault](https://infisical.com/docs/integrations/cloud/hashicorp-vault), and [Cloudflare Pages](https://infisical.com/docs/integrations/cloud/cloudflare-pages).
|
||||
- Completed a penetration test with a `very good` result.
|
||||
|
@@ -10,7 +10,7 @@ should approach the development and contribution process.
|
||||
Infisical has two major code-bases. One for the platform code, and one for SDKs. The contribution process has some key differences between the two, so we've split the documentation into two sections:
|
||||
|
||||
- The [Infisical Platform](https://github.com/Infisical/infisical), the Infisical platform itself.
|
||||
- The [Infisical SDK](https://github.com/Infisical/sdk), the official Infisical client SDKs.
|
||||
- The [Infisical SDK](https://infisical.com/docs/sdks/overview), the official Infisical client SDKs.
|
||||
|
||||
|
||||
<CardGroup cols={2}>
|
||||
|
@@ -1,408 +0,0 @@
|
||||
---
|
||||
title: "Local development"
|
||||
description: "This guide will help you contribute to the Infisical SDK."
|
||||
---
|
||||
|
||||
## Fork and clone the repo
|
||||
|
||||
[Fork](https://docs.github.com/en/get-started/quickstart/fork-a-repo) the [repository](https://github.com/Infisical/sdk) to your own GitHub account and then [clone](https://docs.github.com/en/repositories/creating-and-managing-repositories/cloning-a-repository) it to your local device.
|
||||
|
||||
Once, you've done that, create a new branch:
|
||||
|
||||
```console
|
||||
git checkout -b MY_BRANCH_NAME
|
||||
```
|
||||
|
||||
## Set up environment variables
|
||||
|
||||
Start by creating a .env file at the root of the Infisical directory then copy the contents of the file below into the .env file.
|
||||
|
||||
<Accordion title=".env file content">
|
||||
```env
|
||||
# This is required for running tests locally.
|
||||
# Rename this file to ".env" and fill in the values below.
|
||||
|
||||
# Please make sure that the machine identity has access to the project you are testing in.
|
||||
# https://infisical.com/docs/documentation/platform/identities/universal-auth
|
||||
INFISICAL_UNIVERSAL_CLIENT_ID=MACHINE_IDENTITY_CLIENT_ID
|
||||
INFISICAL_UNIVERSAL_CLIENT_SECRET=MACHINE_IDENTITY_CLIENT_SECRET
|
||||
|
||||
# The ID of the Infisical project where we will create the test secrets.
|
||||
# NOTE: The project must have a dev environment. (This is created by default when you create a project.)
|
||||
INFISICAL_PROJECT_ID=INFISICAL_TEST_PROJECT_ID
|
||||
|
||||
# The Infisical site URL. If you are testing with a local Infisical instance, then this should be set to "http://localhost:8080".
|
||||
INFISICAL_SITE_URL=https://app.infisical.com
|
||||
|
||||
````
|
||||
</Accordion>
|
||||
|
||||
<Warning>
|
||||
The above values are required for running tests locally. Before opening a pull request, make sure to run `cargo test` to ensure that all tests pass.
|
||||
</Warning>
|
||||
|
||||
|
||||
## Guidelines
|
||||
|
||||
### Predictable and consistent
|
||||
When adding new functionality (such as new functions), it's very important that the functionality is added to _all_ the SDK's. This is to ensure that the SDK's are predictable and consistent across all languages. If you are adding new functionality, please make sure to add it to all the SDK's.
|
||||
|
||||
### Handling errors
|
||||
Error handling is very important when writing SDK's. We want to make sure that the SDK's are easy to use, and that the user gets a good understanding of what went wrong when something fails. When adding new functionality, please make sure to add proper error handling. [Read more about error handling here](#error-handling).
|
||||
|
||||
### Tests
|
||||
If you add new functionality or modify existing functionality, please write tests thats properly cover the new functionality. You can run tests locally by running `cargo test` from the root directory. You must always run tests before opening a pull request.
|
||||
|
||||
### Code style
|
||||
Please follow the default rust styling guide when writing code for the base SDK. [Read more about rust code style here](https://doc.rust-lang.org/nightly/style-guide/#the-default-rust-style).
|
||||
|
||||
|
||||
## Prerequisites for contributing
|
||||
|
||||
### Understanding the terms
|
||||
|
||||
In the guide we use some terms that might be unfamiliar to you. Here's a quick explanation of the terms we use:
|
||||
- **Base SDK**: The base SDK is the SDK that all other SDK's are built on top of. The base SDK is written in Rust, and is responsible for executing commands and parsing the input and output to and from JSON.
|
||||
- **Commands**: Commands are what's being sent from the target language to the command handler. The command handler uses the command to execute the corresponding function in the base SDK. Commands are in reality just a JSON string that tells the command handler what function to execute, and what input to use.
|
||||
- **Command handler**: The command handler is the part of the base SDK that takes care of executing commands. It also takes care of parsing the input and output to and from JSON.
|
||||
- **Target language**: The target language refers to the actual SDK code. For example, the [Node.js SDK](https://www.npmjs.com/package/@infisical/sdk) is a "target language", and so is the [Python SDK](https://pypi.org/project/infisical-python/).
|
||||
|
||||
|
||||
### Understanding the execution flow
|
||||
After the target language SDK is initiated, it uses language-specific bindings to interact with the base SDK.
|
||||
These bindings are instantiated, setting up the interface for command execution. A client within the command handler is created, which issues commands to the base SDK.
|
||||
When a command is executed, it is first validated. If valid, the command handler locates the corresponding command to perform. If the command executes successfully, the command handler returns the output to the target language SDK, where it is parsed and returned to the user.
|
||||
If the command handler fails to validate the input, an error will be returned to the target language SDK.
|
||||
|
||||
|
||||
<Frame caption="Execution flow diagram for the SDK from the target language to the base SDK. The execution flow is the same for all target languages.">
|
||||
<img height="640" width="520" src="/images/sdk-flow.png" />
|
||||
</Frame>
|
||||
|
||||
|
||||
|
||||
### Rust knowledge
|
||||
|
||||
Contributing to the SDK requires intermediate to advanced knowledge of Rust concepts such as lifetimes, traits, generics, and async/await _(futures)_, and more.
|
||||
|
||||
### Rust setup
|
||||
The base SDK is written in rust. Therefore you must have rustc and cargo installed. You can install rustc and cargo by following the instructions [here](https://www.rust-lang.org/tools/install).
|
||||
|
||||
You shouldn't have to use the rust cross compilation toolchain, as all compilation is done through a collection of Github Actions. However. If you need to test cross compilation, please do so with Github Actions.
|
||||
|
||||
### Tests
|
||||
If you add new functionality or modify existing functionality, please write tests thats properly cover the new functionality. You can run tests locally by running `cargo test` from the root directory.
|
||||
|
||||
### Language-specific crates
|
||||
The language-specific crates should ideally never have to be modified, as they are simply a wrapper for the `infisical-json` crate, which executes "commands" from the base SDK. If you need to create a new target-language specific crate, please try to create native bindings for the target language. Some languages don't have direct support for native bindings (Java as an example). In those cases we can use the C bindings (`crates/infisical-c`) in the target language.
|
||||
|
||||
|
||||
|
||||
|
||||
## Generate types
|
||||
Having almost seemless type safety from the base SDK to the target language is critical, as writing types for each language has a lot of drawbacks such as duplicated code, and lots of overhead trying to keep the types up-to-date and in sync across a large collection of languages. Therefore we decided to use [QuickType](https://quicktype.io/) and [Serde](https://serde.rs/) to help us generate types for each language. In our Rust base SDK (`crates/infisical`), we define all the inputs/outputs.
|
||||
|
||||
If you are interested in reading about QuickType works under the hood, you can [read more here](http://blog.quicktype.io/under-the-hood/).
|
||||
|
||||
This is an example of a type defined in Rust (both input and output). For this to become a generated type, you'll need to add it to our schema generator. More on that further down.
|
||||
```rust
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, JsonSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
// Input:
|
||||
pub struct CreateSecretOptions {
|
||||
pub environment: String, // environment
|
||||
pub secret_comment: Option<String>, // secretComment
|
||||
pub path: Option<String>, // secretPath
|
||||
pub secret_value: String, // secretValue
|
||||
pub skip_multiline_encoding: Option<bool>, // skipMultilineEncoding
|
||||
pub r#type: Option<String>, // shared / personal
|
||||
pub project_id: String, // workspaceId
|
||||
pub secret_name: String, // secretName (PASSED AS PARAMETER IN REQUEST)
|
||||
}
|
||||
|
||||
// Output:
|
||||
#[derive(Serialize, Deserialize, Debug, JsonSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CreateSecretResponse {
|
||||
pub secret: Secret, // "Secret" is defined elsewhere.
|
||||
}
|
||||
````
|
||||
|
||||
### Adding input types to the schema generator
|
||||
|
||||
You will _only_ have to define outputs in our schema generator, then QuickType will take care of the rest behind the scenes. You can find the Rust crate that takes care of type generation here: `crates/sdk-schemas/src/main.rs`.
|
||||
|
||||
Simply add the output _(also called response)_, to the `write_schema_for_response!` macro. This will let QuickType know that it should generate types for the given structs. The main function will look something like this:
|
||||
|
||||
```rust
|
||||
fn main() -> Result<()> {
|
||||
// Input types for new Client
|
||||
write_schema_for!(infisical_json::client::ClientSettings);
|
||||
// Input types for Client::run_command
|
||||
write_schema_for!(infisical_json::command::Command);
|
||||
|
||||
// Output types for Client::run_command
|
||||
// Only add structs which are direct results of SDK commands.
|
||||
write_schema_for_response! {
|
||||
infisical::manager::secrets::GetSecretResponse,
|
||||
infisical::manager::secrets::ListSecretsResponse,
|
||||
infisical::manager::secrets::UpdateSecretResponse,
|
||||
infisical::manager::secrets::DeleteSecretResponse,
|
||||
infisical::manager::secrets::CreateSecretResponse, // <-- This is the output from the above example!
|
||||
infisical::auth::AccessTokenSuccessResponse
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
### Generating the types for the target language
|
||||
|
||||
Once you've added the output to the schema generator, you can generate the types for the target language by running the following command from the root directory:
|
||||
|
||||
```console
|
||||
$ npm install
|
||||
$ npm run schemas
|
||||
```
|
||||
|
||||
<Warning>If you change any of the structs defined in the base SDK, you will need to run this script to re-generate the types.</Warning>
|
||||
|
||||
This command will run the `schemas.ts` file found in the `support/scripts` folder. If you are adding a new language, it's important that you add the language to the code.
|
||||
|
||||
This is an example of how how we generate types for Node.js:
|
||||
|
||||
```ts
|
||||
const ts = await quicktype({
|
||||
inputData,
|
||||
lang: "typescript",
|
||||
rendererOptions: {}
|
||||
});
|
||||
await ensureDir("./languages/node/src/infisical_client");
|
||||
writeToFile("./languages/node/src/infisical_client/schemas.ts", ts.lines);
|
||||
```
|
||||
|
||||
## Building bindings
|
||||
We've tried to streamline the building process as much as possible. So you shouldn't have to worry much about building bindings, as it should just be a few commands.
|
||||
|
||||
### Node.js
|
||||
Building bindings for Node.js is very straight foward. The command below will generate NAPI bindings for Node.js, and move the bindings to the correct folder. We use [NAPI-RS](https://napi.rs/) to generate the bindings.
|
||||
|
||||
```console
|
||||
$ cd languages/node
|
||||
$ npm run build
|
||||
```
|
||||
|
||||
### Python
|
||||
To generate and use python bindings you will need to run the following commands.
|
||||
The Python SDK is located inside the crates folder. This is a limitation of the maturin tool, forcing us to structure the project in this way.
|
||||
|
||||
```console
|
||||
$ pip install -U pip maturin
|
||||
$ cd crates/infisical-py
|
||||
$ python3 -m venv .venv
|
||||
$ source .venv/bin/activate
|
||||
$ maturin develop
|
||||
```
|
||||
|
||||
<Warning>
|
||||
After running the commands above, it's very important that you rename the generated .so file to `infisical_py.so`. After renaming it you also need to move it into the root of the `crates/infisical-py` folder.
|
||||
</Warning>
|
||||
|
||||
### Java
|
||||
Java uses the C bindings to interact with the base SDK. To build and use the C bindings in Java, please follow the instructions below.
|
||||
|
||||
```console
|
||||
$ cd crates/infisical-c
|
||||
$ cargo build --release
|
||||
$ cd ../../languages/java
|
||||
```
|
||||
<Warning>
|
||||
After generating the C bindings, the generated .so or .dll has been created in the `/target` directory at the root of the project.
|
||||
You have to manually move the generated file into the `languages/java/src/main/resources` directory.
|
||||
</Warning>
|
||||
|
||||
## Error handling
|
||||
|
||||
### Error handling in the base SDK
|
||||
|
||||
The base SDK should never panic. If an error occurs, we should return a `Result` with an error message. We have a custom Result type defined in the `error.rs` file in the base SDK.
|
||||
|
||||
All our errors are defined in an enum called `Error`. The `Error` enum is defined in the `error.rs` file in the base SDK. The `Error` enum is used in the `Result` type, which is used as the return type for all functions in the base SDK.
|
||||
|
||||
```rust
|
||||
#[derive(Debug, Error)]
|
||||
pub enum Error {
|
||||
// Secret not found
|
||||
#[error("Secret with name '{}' not found.", .secret_name)]
|
||||
SecretNotFound { secret_name: String },
|
||||
|
||||
// .. other errors
|
||||
|
||||
// Errors that are not specific to the base SDK.
|
||||
#[error(transparent)]
|
||||
Reqwest(#[from] reqwest::Error),
|
||||
#[error(transparent)]
|
||||
Serde(#[from] serde_json::Error),
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
}
|
||||
```
|
||||
|
||||
### Returning an error
|
||||
|
||||
You can find many examples of how we return errors in the SDK code. A relevant example is for creating secrets, which can be found in `crates/infisical/src/api/secrets/create_secret.rs`. When the error happened due to a request error to our API, we have an API error handler. This prevents duplicate code and keeps error handling consistent across the SDK. You can find the api error handler in the `error.rs` file.
|
||||
|
||||
### Error handling in the target language SDK's.
|
||||
|
||||
All data sent to the target language SDK has the same format. The format is an object with 3 fields: `success (boolean)`, `data (could be anything or nothing)`, and `errorMessage (string or null)`.
|
||||
|
||||
The `success` field is used to determine if the request was successful or not. The `data` field is used to return data from the SDK. The `errorMessage` field is used to return an error message if the request was not successful.
|
||||
|
||||
This means that if the success if false or if the error message is not null, something went wrong and we should throw an error on the target-language level, with the error message.
|
||||
|
||||
## Command handler
|
||||
|
||||
### What is the command handler
|
||||
|
||||
The command handler (the `infisical-json` crate), takes care of executing commands sent from the target language. It also takes care of parsing the input and output to and from JSON. The command handler is the only part of the base SDK that should be aware of JSON. The rest of the base SDK should be completely unaware of JSON, and only work with the Rust structs defined in the base SDK.
|
||||
|
||||
The command handler exposes a function called `run_command`, which is what we use in the target language to execute commands. The function takes a json string as input, and returns a json string as output. We use helper functions generated by QuickType to convert the input and output to and from JSON.
|
||||
|
||||
### Creating new SDK methods
|
||||
|
||||
Creating new commands is necessary when adding new methods to the SDK's. Defining a new command is a 3-step process in most cases.
|
||||
|
||||
#### 1. Define the input and output structs
|
||||
|
||||
Earlier in this guide, we defined the input and output structs for the `CreateSecret` command. We will use that as an example here as well.
|
||||
|
||||
#### 2. Creating the method in the base SDK
|
||||
|
||||
The first step is to create the method in the base SDK. This step will be different depending on what method you are adding. In this example we're going to assume you're adding a function for creating a new secret.
|
||||
|
||||
After you created the function for creating the secret, you'll need need to add it to the ClientSecrets implementation. We do it this way to keep the code organized and easy to read. The ClientSecrets struct is located in the `crates/infisical/src/manager/secrets.rs` file.
|
||||
|
||||
```rust
|
||||
pub struct ClientSecrets<'a> {
|
||||
pub(crate) client: &'a mut crate::Client,
|
||||
}
|
||||
|
||||
impl<'a> ClientSecrets<'a> {
|
||||
pub async fn create(&mut self, input: &CreateSecretOptions) -> Result<CreateSecretResponse> {
|
||||
create_secret(self.client, input).await // <-- This is the function you created!
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Client {
|
||||
pub fn secrets(&'a mut self) -> ClientSecrets<'a> {
|
||||
ClientSecrets { client: self }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### 3. Define a new command
|
||||
|
||||
We define new commands in the `crates/infisical-json/src/command.rs` file. The `Command` enum is what we use to define new commands.
|
||||
|
||||
In the codesnippet below we define a new command called `CreateSecret`. The `CreateSecret` command takes a `CreateSecretOptions` struct as input. We don't have to define the output, because QuickType's converter helps us with figuring out the return type for each command.
|
||||
|
||||
````rust
|
||||
```rust
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub enum Command {
|
||||
GetSecret(GetSecretOptions),
|
||||
ListSecrets(ListSecretsOptions),
|
||||
CreateSecret(CreateSecretOptions), // <-- The new command!
|
||||
UpdateSecret(UpdateSecretOptions),
|
||||
DeleteSecret(DeleteSecretOptions),
|
||||
}
|
||||
````
|
||||
|
||||
#### 4. Add the command to the command handler
|
||||
|
||||
After defining the command, we need to add it to the command handler itself. This takes place in the `crates/infisical-json/src/client.rs` file. The `run_command` function is what we use to execute commands.
|
||||
|
||||
In the Client implementation we try to parse the JSON string into a `Command` enum. If the parsing is successful, we match the command and execute the corresponding function.
|
||||
|
||||
```rust
|
||||
match cmd {
|
||||
Command::GetSecret(req) => self.0.secrets().get(&req).await.into_string(),
|
||||
Command::ListSecrets(req) => self.0.secrets().list(&req).await.into_string(),
|
||||
Command::UpdateSecret(req) => self.0.secrets().update(&req).await.into_string(),
|
||||
Command::DeleteSecret(req) => self.0.secrets().delete(&req).await.into_string(),
|
||||
|
||||
// This is the new command:
|
||||
Command::CreateSecret(req) => self.0.secrets().create(&req).await.into_string(),
|
||||
}
|
||||
```
|
||||
|
||||
#### 5. Implementing the new command in the target language SDK's
|
||||
|
||||
We did it! We've now added a new command to the base SDK. The last step is to implement the new command in the target language SDK's. The process is a little different from language to language, but in this example we're going to assume that we're adding a new command to the Node.js SDK.
|
||||
|
||||
First you'll need to generate the new type schemas, we added a new command, input struct, and output struct. [Read more about generating types here](#generating-the-types-for-the-target-language).
|
||||
|
||||
Secondly you need to build the new node bindings so we can use the new functionality in the Node.js SDK. You can do this by running the following command from the `languages/node` directory:
|
||||
|
||||
```console
|
||||
$ npm install
|
||||
$ npm run build
|
||||
```
|
||||
|
||||
The build command will execute a build script in the `infisical-napi` crate, and move the generated bindings to the appropriate folder.
|
||||
|
||||
After building the new bindings, you can access the new functionality in the Node.js SDK source.
|
||||
|
||||
```ts
|
||||
// 'binding' is a js file that makes it easier to access the methods in the bindings. (it's auto generated when running npm run build)
|
||||
import * as rust from "../../binding";
|
||||
// We can import the newly generated types from the schemas.ts file. (Generated with QuickType!)
|
||||
import type { CreateSecretOptions, CreateSecretResponse } from "./schemas";
|
||||
// This is the QuickType converter that we use to create commands with! It takes care of all JSON parsing and serialization.
|
||||
import { Convert, ClientSettings } from "./schemas";
|
||||
|
||||
export class InfisicalClient {
|
||||
#client: rust.Client;
|
||||
|
||||
constructor(settings: ClientSettings) {
|
||||
const settingsJson = settings == null ? null : Convert.clientSettingsToJson(settings);
|
||||
this.#client = new rust.InfisicalClient(settingsJson);
|
||||
}
|
||||
|
||||
// ... getSecret
|
||||
// ... listSecrets
|
||||
// ... updateSecret
|
||||
// ... deleteSecret
|
||||
|
||||
async createSecret(options: CreateSecretOptions): Promise<CreateSecretResponse["secret"]> {
|
||||
// The runCommand will return a JSON string, which we can parse into a CreateSecretResponse.
|
||||
const command = await this.#client.runCommand(
|
||||
Convert.commandToJson({
|
||||
createSecret: options
|
||||
})
|
||||
);
|
||||
const response = Convert.toResponseForCreateSecretResponse(command); // <-- This is the QuickType converter in action!
|
||||
|
||||
// If the response is not successful or the data is null, we throw an error.
|
||||
if (!response.success || response.data == null) {
|
||||
throw new Error(response.errorMessage ?? "Something went wrong");
|
||||
}
|
||||
|
||||
// To make it easier to work with the response, we return the secret directly.
|
||||
return response.data.secret;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
And that's it! We've now added a new command to the base SDK, and implemented it in the Node.js SDK. The process is very similar for all other languages, but the code will look a little different.
|
||||
|
||||
## Conclusion
|
||||
|
||||
The SDK has a lot of moving parts, and it can be a little overwhelming at first. But once you get the hang of it, it's actually quite simple. If you have any questions, feel free to reach out to us on [Slack](https://infisical.com/slack), or [open an issue](https://github.com/Infisical/sdk/issues) on GitHub.
|
@@ -98,6 +98,7 @@
|
||||
{
|
||||
"group": "App Connections",
|
||||
"pages": [
|
||||
"integrations/app-connections",
|
||||
"integrations/app-connections/overview",
|
||||
{
|
||||
"group": "Connections",
|
||||
@@ -106,6 +107,7 @@
|
||||
"integrations/app-connections/auth0",
|
||||
"integrations/app-connections/aws",
|
||||
"integrations/app-connections/azure-app-configuration",
|
||||
"integrations/app-connections/azure-adcs",
|
||||
"integrations/app-connections/azure-client-secrets",
|
||||
"integrations/app-connections/azure-devops",
|
||||
"integrations/app-connections/azure-key-vault",
|
||||
@@ -183,6 +185,7 @@
|
||||
{
|
||||
"group": "User Authentication",
|
||||
"pages": [
|
||||
"integrations/user-authentication",
|
||||
"documentation/platform/auth-methods/email-password",
|
||||
{
|
||||
"group": "SSO",
|
||||
@@ -242,6 +245,7 @@
|
||||
{
|
||||
"group": "Machine Identities",
|
||||
"pages": [
|
||||
"integrations/machine-authentication",
|
||||
"documentation/platform/identities/alicloud-auth",
|
||||
"documentation/platform/identities/aws-auth",
|
||||
"documentation/platform/identities/azure-auth",
|
||||
@@ -342,10 +346,7 @@
|
||||
},
|
||||
{
|
||||
"group": "Architecture",
|
||||
"pages": [
|
||||
"internals/architecture/components",
|
||||
"internals/architecture/cloud"
|
||||
]
|
||||
"pages": ["internals/architecture/components", "internals/architecture/cloud"]
|
||||
},
|
||||
"internals/security",
|
||||
"internals/service-tokens"
|
||||
@@ -370,10 +371,6 @@
|
||||
"contributing/platform/backend/how-to-create-a-feature",
|
||||
"contributing/platform/backend/folder-structure"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Contributing to SDK",
|
||||
"pages": ["contributing/sdk/developing"]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -423,6 +420,7 @@
|
||||
{
|
||||
"group": "Secret Rotation",
|
||||
"pages": [
|
||||
"integrations/secret-rotations",
|
||||
"documentation/platform/secret-rotation/overview",
|
||||
"documentation/platform/secret-rotation/auth0-client-secret",
|
||||
"documentation/platform/secret-rotation/aws-iam-user-secret",
|
||||
@@ -438,6 +436,7 @@
|
||||
{
|
||||
"group": "Dynamic Secrets",
|
||||
"pages": [
|
||||
"integrations/dynamic-secrets",
|
||||
"documentation/platform/dynamic-secrets/overview",
|
||||
"documentation/platform/dynamic-secrets/aws-elasticache",
|
||||
"documentation/platform/dynamic-secrets/aws-iam",
|
||||
@@ -508,6 +507,7 @@
|
||||
{
|
||||
"group": "Secret Syncs",
|
||||
"pages": [
|
||||
"integrations/secret-syncs",
|
||||
"integrations/secret-syncs/overview",
|
||||
{
|
||||
"group": "Syncs",
|
||||
@@ -564,10 +564,7 @@
|
||||
"integrations/cloud/gcp-secret-manager",
|
||||
{
|
||||
"group": "Cloudflare",
|
||||
"pages": [
|
||||
"integrations/cloud/cloudflare-pages",
|
||||
"integrations/cloud/cloudflare-workers"
|
||||
]
|
||||
"pages": ["integrations/cloud/cloudflare-pages", "integrations/cloud/cloudflare-workers"]
|
||||
},
|
||||
"integrations/cloud/terraform-cloud",
|
||||
"integrations/cloud/databricks",
|
||||
@@ -616,6 +613,7 @@
|
||||
{
|
||||
"group": "Framework Integrations",
|
||||
"pages": [
|
||||
"integrations/framework-integrations",
|
||||
"integrations/frameworks/spring-boot-maven",
|
||||
"integrations/frameworks/react",
|
||||
"integrations/frameworks/vue",
|
||||
@@ -661,9 +659,7 @@
|
||||
"documentation/platform/secret-scanning/overview",
|
||||
{
|
||||
"group": "Concepts",
|
||||
"pages": [
|
||||
"documentation/platform/secret-scanning/concepts/secret-scanning"
|
||||
]
|
||||
"pages": ["documentation/platform/secret-scanning/concepts/secret-scanning"]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -690,6 +686,7 @@
|
||||
"documentation/platform/pki/subscribers",
|
||||
"documentation/platform/pki/certificates",
|
||||
"documentation/platform/pki/acme-ca",
|
||||
"documentation/platform/pki/azure-adcs",
|
||||
"documentation/platform/pki/est",
|
||||
"documentation/platform/pki/alerting",
|
||||
{
|
||||
@@ -712,18 +709,13 @@
|
||||
"documentation/platform/ssh/overview",
|
||||
{
|
||||
"group": "Concepts",
|
||||
"pages": [
|
||||
"documentation/platform/ssh/concepts/ssh-certificates"
|
||||
]
|
||||
"pages": ["documentation/platform/ssh/concepts/ssh-certificates"]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Platform Reference",
|
||||
"pages": [
|
||||
"documentation/platform/ssh/usage",
|
||||
"documentation/platform/ssh/host-groups"
|
||||
]
|
||||
"pages": ["documentation/platform/ssh/usage", "documentation/platform/ssh/host-groups"]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -770,11 +762,7 @@
|
||||
"cli/commands/reset",
|
||||
{
|
||||
"group": "infisical scan",
|
||||
"pages": [
|
||||
"cli/commands/scan",
|
||||
"cli/commands/scan-git-changes",
|
||||
"cli/commands/scan-install"
|
||||
]
|
||||
"pages": ["cli/commands/scan", "cli/commands/scan-git-changes", "cli/commands/scan-install"]
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -1108,9 +1096,7 @@
|
||||
"pages": [
|
||||
{
|
||||
"group": "Kubernetes",
|
||||
"pages": [
|
||||
"api-reference/endpoints/dynamic-secrets/kubernetes/create-lease"
|
||||
]
|
||||
"pages": ["api-reference/endpoints/dynamic-secrets/kubernetes/create-lease"]
|
||||
},
|
||||
"api-reference/endpoints/dynamic-secrets/create",
|
||||
"api-reference/endpoints/dynamic-secrets/update",
|
||||
@@ -1396,6 +1382,18 @@
|
||||
"api-reference/endpoints/app-connections/aws/delete"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Azure ADCS",
|
||||
"pages": [
|
||||
"api-reference/endpoints/app-connections/azure-adcs/list",
|
||||
"api-reference/endpoints/app-connections/azure-adcs/available",
|
||||
"api-reference/endpoints/app-connections/azure-adcs/get-by-id",
|
||||
"api-reference/endpoints/app-connections/azure-adcs/get-by-name",
|
||||
"api-reference/endpoints/app-connections/azure-adcs/create",
|
||||
"api-reference/endpoints/app-connections/azure-adcs/update",
|
||||
"api-reference/endpoints/app-connections/azure-adcs/delete"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Azure App Configuration",
|
||||
"pages": [
|
||||
@@ -2453,7 +2451,7 @@
|
||||
"sdks/languages/node",
|
||||
"sdks/languages/python",
|
||||
"sdks/languages/java",
|
||||
"sdks/languages/csharp",
|
||||
"sdks/languages/dotnet",
|
||||
"sdks/languages/cpp",
|
||||
"sdks/languages/rust",
|
||||
"sdks/languages/go",
|
||||
@@ -2569,7 +2567,7 @@
|
||||
},
|
||||
{
|
||||
"label": "Terraform",
|
||||
"href": "https://infisical.com/docs/integrations/frameworks/terraform"
|
||||
"href": "https://registry.terraform.io/providers/Infisical/infisical/latest/docs"
|
||||
},
|
||||
{
|
||||
"label": "Ansible",
|
||||
@@ -2681,5 +2679,11 @@
|
||||
"koala": {
|
||||
"publicApiKey": "pk_b50d7184e0e39ddd5cdb43cf6abeadd9b97d"
|
||||
}
|
||||
}
|
||||
},
|
||||
"redirects": [
|
||||
{
|
||||
"source": "/sdks/languages/csharp",
|
||||
"destination": "/sdks/languages/dotnet"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -8,12 +8,12 @@ description: "Learn how to migrate secrets from Vault to Infisical."
|
||||
|
||||
Migrating from Vault Self-Hosted or Dedicated Vault is a straight forward process with our inbuilt migration option. In order to migrate from Vault, you'll need to provide Infisical an access token to your Vault instance.
|
||||
|
||||
Currently the Vault migration only supports migrating secrets from the KV v2 secrets engine. If you're using a different secrets engine, please open an issue on our [GitHub repository](https://github.com/infisical/infisical/issues).
|
||||
Currently the Vault migration only supports migrating secrets from the KV V2 and V1 secrets engine. If you're using a different secrets engine, please open an issue on our [GitHub repository](https://github.com/infisical/infisical/issues).
|
||||
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- A Vault instance with the KV v2 secrets engine enabled.
|
||||
- A Vault instance with the KV secret engine enabled.
|
||||
- An access token to your Vault instance.
|
||||
|
||||
|
||||
|
@@ -45,6 +45,64 @@ Once configured, the GitHub Organization Synchronization feature functions as fo
|
||||
|
||||
When a user logs in via the GitHub OAuth flow and selects the configured organization, the system will then automatically synchronize the teams they are a part of in GitHub with corresponding groups in Infisical.
|
||||
|
||||
## Manual Team Sync
|
||||
|
||||
You can manually synchronize GitHub teams for all organization members who have previously logged in with GitHub. This bulk sync operation updates team memberships without requiring users to log in again.
|
||||
|
||||
<Steps>
|
||||
<Step title="Generate a GitHub Access Token">
|
||||
To perform manual syncs, you'll need to create a GitHub Personal Access Token with the appropriate permissions. GitHub offers two types of tokens:
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Classic Token">
|
||||
1. Go to [GitHub Settings → Personal Access Tokens → Tokens (classic)](https://github.com/settings/tokens)
|
||||
2. Click **Generate new token** → **Generate new token (classic)**
|
||||
3. Give your token a descriptive name (e.g., "Infisical GitHub Sync")
|
||||
4. Set an appropriate expiration date
|
||||
5. Select the **read:org** scope - Required to read organization team information
|
||||
6. Click **Generate token**
|
||||
7. Copy the token immediately (you won't be able to see it again)
|
||||
|
||||

|
||||
</Tab>
|
||||
<Tab title="Fine-grained Token">
|
||||
1. Go to [GitHub Settings → Personal Access Tokens → Fine-grained tokens](https://github.com/settings/personal-access-tokens/new)
|
||||
2. Click **Generate new token**
|
||||
3. Give your token a descriptive name (e.g., "Infisical GitHub Sync")
|
||||
4. Set an appropriate expiration date
|
||||
5. Select your organization under **Resource owner**
|
||||
6. Under **Organization permissions**, set **Members** to **Read**
|
||||
7. Click **Generate token**
|
||||
8. Copy the token immediately (you won't be able to see it again)
|
||||
|
||||

|
||||
</Tab>
|
||||
</Tabs>
|
||||
</Step>
|
||||
|
||||
<Step title="Configure the Token in Infisical">
|
||||
1. Navigate to the **Single Sign-On (SSO)** page and select the **Provisioning** tab.
|
||||
2. Click the **Configure** button next to your GitHub Organization configuration.
|
||||
3. In the configuration modal, you'll find an optional **GitHub Access Token** field.
|
||||
4. Paste the token you generated in the previous step.
|
||||
5. Click **Update** to save the configuration.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Perform Manual Sync">
|
||||
Once you have configured the GitHub access token:
|
||||
|
||||
1. Navigate to the **Single Sign-On (SSO)** page and select the **Provisioning** tab.
|
||||
2. You'll see a **Sync Now** section with a button to trigger the manual sync.
|
||||
3. Click **Sync Now** to synchronize GitHub teams for all organization members.
|
||||
|
||||

|
||||
|
||||
The sync operation will process all organization members who have previously logged in with GitHub and update their team memberships accordingly.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
<Accordion title="Please check if your organization has approved the Infisical OAuth application.">
|
||||
|
@@ -147,6 +147,8 @@ In the following steps, we explore how to set up ACME Certificate Authority inte
|
||||
- **Directory URL**: Enter the ACME v2 directory URL for your chosen CA provider (e.g., `https://acme-v02.api.letsencrypt.org/directory` for Let's Encrypt).
|
||||
- **Account Email**: Email address to associate with your ACME account. This email will receive important notifications about your certificates.
|
||||
- **Enable Direct Issuance**: Toggle on to allow direct certificate issuance without requiring subscribers.
|
||||
- **EAB Key Identifier (KID)**: (Optional) The Key Identifier (KID) provided by your ACME CA for External Account Binding (EAB). This is required by some ACME providers (e.g., ZeroSSL, DigiCert) to link your ACME account to an external account you've pre-registered with them.
|
||||
- **EAB HMAC Key**: (Optional) The HMAC Key provided by your ACME CA for External Account Binding (EAB). This key is used in conjunction with the KID to prove ownership of the external account during ACME account registration.
|
||||
|
||||
Finally, press **Create** to register the ACME CA with Infisical.
|
||||
</Step>
|
||||
@@ -277,6 +279,19 @@ Let's Encrypt is a free, automated, and open Certificate Authority that provides
|
||||
Always test your ACME integration using Let's Encrypt's staging environment first. This allows you to verify your DNS configuration and certificate issuance process without consuming your production rate limits.
|
||||
</Note>
|
||||
|
||||
## Example: DigiCert Integration
|
||||
|
||||
DigiCert is a leading commercial Certificate Authority providing a wide range of trusted SSL/TLS certificates. Infisical can integrate with [DigiCert's ACME](https://docs.digicert.com/en/certcentral/certificate-tools/certificate-lifecycle-automation-guides/third-party-acme-integration/request-and-manage-certificates-with-acme.html) service to automate the provisioning and management of these certificates.
|
||||
|
||||
- **Directory URL**: `https://acme.digicert.com/v2/acme/directory`
|
||||
- **External Account Binding (EAB)**: Required. You will need a Key Identifier (KID) and HMAC Key from your DigiCert account to register the ACME CA in Infisical.
|
||||
- **Certificate Validity**: Typically 90 days, with automatic renewal through Infisical.
|
||||
- **Trusted By**: All major browsers and operating systems.
|
||||
|
||||
<Note>
|
||||
When integrating with DigiCert ACME, ensure you have obtained the necessary External Account Binding (EAB) Key Identifier (KID) and HMAC Key from your DigiCert account.
|
||||
</Note>
|
||||
|
||||
## FAQ
|
||||
|
||||
<AccordionGroup>
|
||||
|
206
docs/documentation/platform/pki/azure-adcs.mdx
Normal file
206
docs/documentation/platform/pki/azure-adcs.mdx
Normal file
@@ -0,0 +1,206 @@
|
||||
---
|
||||
title: "Certificates with Azure ADCS"
|
||||
description: "Learn how to issue and manage certificates using Microsoft Active Directory Certificate Services (ADCS) with Infisical."
|
||||
---
|
||||
|
||||
Issue and manage certificates using Microsoft Active Directory Certificate Services (ADCS) for enterprise-grade certificate management integrated with your existing Windows infrastructure.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before setting up ADCS integration, ensure you have:
|
||||
|
||||
- Microsoft Active Directory Certificate Services (ADCS) server running and accessible
|
||||
- Domain administrator account with certificate management permissions
|
||||
- ADCS web enrollment enabled on your server
|
||||
- Network connectivity from Infisical to the ADCS server
|
||||
- **IP whitelisting**: Your ADCS server must allow connections from Infisical's IP addresses
|
||||
- For Infisical Cloud instances, see [Networking Configuration](/documentation/setup/networking) for the list of IPs to whitelist
|
||||
- For self-hosted instances, whitelist your Infisical server's IP address
|
||||
- Azure ADCS app connection configured (see [Azure ADCS Connection](/integrations/app-connections/azure-adcs))
|
||||
|
||||
## Complete Workflow: From Setup to Certificate Issuance
|
||||
|
||||
This section walks you through the complete end-to-end process of setting up Azure ADCS integration and issuing your first certificate.
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to External Certificate Authorities">
|
||||
In your Infisical project, go to your **Certificate Project** → **Certificate Authority** to access the external CAs page.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Create New Azure ADCS Certificate Service CA">
|
||||
Click **Create CA** and configure:
|
||||
- **Type**: Choose **Azure AD Certificate Service**
|
||||
- **Name**: Friendly name for this CA (e.g., "Production ADCS CA")
|
||||
- **App Connection**: Choose your ADCS connection from the dropdown
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Certificate Authority Created">
|
||||
Once created, your Azure ADCS Certificate Authority will appear in the list and be ready for use.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Navigate to Subscribers">
|
||||
Go to **Subscribers** to access the subscribers page.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Create New Subscriber">
|
||||
Click **Add Subscriber** and configure:
|
||||
- **Name**: Unique subscriber name (e.g., "web-server-certs")
|
||||
- **Certificate Authority**: Select your ADCS CA
|
||||
- **Common Name**: Certificate CN (e.g., "api.example.com")
|
||||
- **Certificate Template**: Select from dynamically loaded ADCS templates
|
||||
- **Subject Alternative Names**: DNS names, IP addresses, or email addresses
|
||||
- **TTL**: Certificate validity period (e.g., "1y" for 1 year)
|
||||
- **Additional Subject Fields**: Organization, OU, locality, state, country, email (if required by template)
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Subscriber Created">
|
||||
Your subscriber is now created and ready to issue certificates.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Issue New Certificate">
|
||||
Click into your subscriber and click **Order Certificate** to generate a new certificate using your ADCS template.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Certificate Created">
|
||||
Your certificate has been successfully issued by the ADCS server and is ready for use.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="View Certificate Details">
|
||||
Navigate to **Certificates** to view detailed information about all issued certificates, including expiration dates, serial numbers, and certificate chains.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Certificate Templates
|
||||
|
||||
Infisical automatically retrieves available certificate templates from your ADCS server, ensuring you can only select templates that are properly configured and accessible. The system dynamically discovers templates during the certificate authority setup and certificate issuance process.
|
||||
|
||||
### Common Template Types
|
||||
|
||||
ADCS templates you might see include:
|
||||
- **Web Server**: For SSL/TLS certificates with server authentication
|
||||
- **Computer**: For machine authentication certificates
|
||||
- **User**: For client authentication certificates
|
||||
- **Basic EFS**: For Encrypting File System certificates
|
||||
- **EFS Recovery Agent**: For EFS data recovery
|
||||
- **Administrator**: For administrative certificates
|
||||
- **Subordinate Certification Authority**: For issuing CA certificates
|
||||
|
||||
### Template Requirements
|
||||
|
||||
Ensure your ADCS templates are configured with:
|
||||
- **Enroll permissions** for your connection account
|
||||
- **Auto-enroll permissions** if using automated workflows
|
||||
- **Subject name requirements** matching your certificate requests
|
||||
- **Key usage extensions** appropriate for your use case
|
||||
|
||||
<Info>
|
||||
**Dynamic Template Discovery**: Infisical queries your ADCS server in real-time to populate available templates. Only templates you have permission to use will be displayed during certificate issuance.
|
||||
</Info>
|
||||
|
||||
## Certificate Issuance Limitations
|
||||
|
||||
### Immediate Issuance Only
|
||||
|
||||
<Warning>
|
||||
**Manual Approval Not Supported**: Infisical currently supports only **immediate certificate issuance**. Certificates that require manual approval or are held by ADCS policies cannot be issued through Infisical yet.
|
||||
</Warning>
|
||||
|
||||
For successful certificate issuance, ensure your ADCS templates and policies are configured to:
|
||||
- **Auto-approve** certificate requests without manual intervention
|
||||
- **Not require** administrator approval for the templates you plan to use
|
||||
- **Allow** the connection account to request and receive certificates immediately
|
||||
|
||||
### What Happens with Manual Approval
|
||||
|
||||
If a certificate request requires manual approval:
|
||||
1. The request will be submitted to ADCS successfully
|
||||
2. Infisical will attempt to retrieve the certificate with exponential backoff (up to 5 retries over ~1 minute)
|
||||
3. If the certificate is not approved within this timeframe, the request will **fail**
|
||||
4. **No background polling**: Currently, Infisical does not check for certificates that might be approved hours or days later
|
||||
|
||||
<Info>
|
||||
**Future Enhancement**: Background polling for delayed certificate approvals is planned for future releases.
|
||||
</Info>
|
||||
|
||||
### Certificate Revocation
|
||||
|
||||
<Warning>
|
||||
Certificate revocation is **not supported** by the Azure ADCS connector due to security and complexity considerations.
|
||||
</Warning>
|
||||
|
||||
## Advanced Configuration
|
||||
|
||||
### Custom Validity Periods
|
||||
|
||||
Enable custom certificate validity periods on your ADCS server:
|
||||
|
||||
```cmd
|
||||
# Run on ADCS server as Administrator
|
||||
certutil -setreg policy\EditFlags +EDITF_ATTRIBUTEENDDATE
|
||||
net stop certsvc
|
||||
net start certsvc
|
||||
```
|
||||
|
||||
This allows Infisical to control certificate expiration dates directly.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
**Certificate Request Denied**
|
||||
- Verify ADCS template permissions for your connection account
|
||||
- Check template subject name requirements
|
||||
- Ensure template allows the requested key algorithm and size
|
||||
|
||||
**Revocation Service Unavailable**
|
||||
- Verify IIS is running and the revocation endpoint is accessible
|
||||
- Check IIS application pool permissions
|
||||
- Test endpoint connectivity from Infisical
|
||||
|
||||
**Template Not Found**
|
||||
- Verify template exists on ADCS server and is published
|
||||
- Check that your connection account has enrollment permissions for the template
|
||||
- Ensure the template is properly configured and available in the ADCS web enrollment interface
|
||||
- Templates are dynamically loaded - refresh the PKI Subscriber form if templates don't appear
|
||||
|
||||
**Certificate Request Pending/Timeout**
|
||||
- Check if your ADCS template requires manual approval - Infisical only supports immediate issuance
|
||||
- Verify the certificate template is configured for auto-approval
|
||||
- Ensure your connection account has sufficient permissions to request certificates without approval
|
||||
- Review ADCS server policies that might be holding the certificate request
|
||||
|
||||
**Network Connectivity Issues**
|
||||
- Verify your ADCS server's firewall allows connections from Infisical
|
||||
- For Infisical Cloud: Ensure Infisical's IP addresses are whitelisted (see [Networking Configuration](/documentation/setup/networking))
|
||||
- For self-hosted: Whitelist your Infisical server's IP address on the ADCS server
|
||||
- Test HTTPS connectivity to the ADCS web enrollment endpoint
|
||||
- Check for any network security appliances blocking the connection
|
||||
|
||||
**Authentication Failures**
|
||||
- Verify ADCS connection credentials
|
||||
- Check domain account permissions
|
||||
- Ensure network connectivity to ADCS server
|
||||
|
||||
**SSL/TLS Certificate Errors**
|
||||
- For ADCS servers with self-signed or private certificates: disable "Reject Unauthorized" in the SSL tab of your Azure ADCS app connection, or provide the certificate in PEM format
|
||||
- Common SSL errors: `UNABLE_TO_VERIFY_LEAF_SIGNATURE`, `SELF_SIGNED_CERT_IN_CHAIN`, `CERT_HAS_EXPIRED`
|
||||
- The SSL configuration applies to all HTTPS communications between Infisical and your ADCS server
|
||||
- Only HTTPS URLs are supported - HTTP connections are not allowed for security reasons
|
@@ -22,7 +22,7 @@ The table below provides a quick overview of which delivery method may be suitab
|
||||
| Kubernetes (file-based, with rotation) | [Kubernetes CSI Provider](/integrations/platforms/kubernetes-csi) | Mounted files | Uses CSI driver to mount secrets as files with automatic rotation |
|
||||
| Image builds (VMs or containers) | [Packer Plugin](/integrations/frameworks/packer) | Env vars or files | Inject secrets at image build time |
|
||||
| Ansible automation | [Ansible Collection](/integrations/platforms/ansible) | Variables | Runtime secret fetching in playbooks using lookup plugin |
|
||||
| Terraform / Pulumi | [Terraform Provider](/integrations/frameworks/terraform), [Pulumi](/integrations/frameworks/pulumi) | Inputs / ephemeral resources | Use ephemeral for security; avoids storing secrets in state |
|
||||
| Terraform / Pulumi | [Terraform Provider](https://registry.terraform.io/providers/Infisical/infisical/latest/docs), [Pulumi](/integrations/frameworks/pulumi) | Inputs / ephemeral resources | Use ephemeral for security; avoids storing secrets in state |
|
||||
| Third-party platforms (GitHub, AWS, etc.) | [Secret Syncs](/integrations/secret-syncs/overview) | Preloaded secrets | Push secrets to platforms that can't fetch directly from Infisical |
|
||||
|
||||
From here, you can explore the delivery method that best matches your environment:
|
||||
@@ -90,7 +90,7 @@ This is useful when external systems require secrets to be available ahead of ti
|
||||
|
||||
Infisical integrates with common IaC and automation tools to help you securely inject secrets into your infrastructure provisioning workflows:
|
||||
|
||||
- [Terraform](/integrations/frameworks/terraform): Use the official Infisical Terraform provider to fetch secrets either as ephemeral resources (never written to state files) or as traditional data sources. Ideal for managing cloud infrastructure while keeping secrets secure and version-safe.
|
||||
- [Terraform](https://registry.terraform.io/providers/Infisical/infisical/latest/docs): Use the official Infisical Terraform provider to fetch secrets either as ephemeral resources (never written to state files) or as traditional data sources. Ideal for managing cloud infrastructure while keeping secrets secure and version-safe.
|
||||
- [Pulumi](/integrations/frameworks/pulumi): Integrate Infisical into Pulumi projects using the Terraform Bridge, allowing you to fetch and manage secrets in TypeScript, Go, Python, or C# — without changing your existing workflows.
|
||||
- [Ansible](/integrations/platforms/ansible): Retrieve secrets from Infisical at runtime using the official Ansible Collection and lookup plugin. Works well for dynamic configuration during playbook execution.
|
||||
- [Packer](/integrations/frameworks/packer): Inject secrets into VM or container images at build time using the Infisical Packer Plugin — useful for provisioning base images that require secure configuration values.
|
||||
|
Binary file not shown.
After Width: | Height: | Size: 600 KiB |
Binary file not shown.
After Width: | Height: | Size: 448 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user