mirror of
https://github.com/Infisical/infisical.git
synced 2025-09-06 06:00:42 +00:00
Compare commits
199 Commits
misc/add-s
...
feat/prima
Author | SHA1 | Date | |
---|---|---|---|
|
0c26fcbb0f | ||
|
035156bcc3 | ||
|
c116eb9ed2 | ||
|
839b27d5bf | ||
|
1909fae076 | ||
|
735ddc1138 | ||
|
3b235e3668 | ||
|
5c2dc32ded | ||
|
d84572532a | ||
|
93341ef6e5 | ||
|
3d78984320 | ||
|
4a55500325 | ||
|
3dae165710 | ||
|
a94635e5be | ||
|
912cd5d20a | ||
|
e29a0e487e | ||
|
8aa270545d | ||
|
3c24132e97 | ||
|
38a7cb896b | ||
|
6abd58ee21 | ||
|
c8275f41a3 | ||
|
a6d8ca5a6b | ||
|
c6b1af5737 | ||
|
8467286aa3 | ||
|
cea43d497d | ||
|
3700597ba7 | ||
|
65f0597bd8 | ||
|
5b3cae7255 | ||
|
a4ff6340f8 | ||
|
c802b4aa3a | ||
|
b7d202c33a | ||
|
2fc9725b24 | ||
|
bfb2486204 | ||
|
c29b5e37f3 | ||
|
2b1a36a96d | ||
|
5a2058d24a | ||
|
e666409026 | ||
|
ecfc8b5f87 | ||
|
435bcd03d3 | ||
|
4d6e12d6b2 | ||
|
a6b4939ea5 | ||
|
640dccadb7 | ||
|
3ebd5305c2 | ||
|
8d1c0b432b | ||
|
be588c2653 | ||
|
88155576a2 | ||
|
394538769b | ||
|
f7828ed458 | ||
|
b40bb72643 | ||
|
4f1cd69bcc | ||
|
4d4b4c13c3 | ||
|
c8bf9049de | ||
|
ab91863c77 | ||
|
14473c742c | ||
|
6db4c614af | ||
|
21e2db2963 | ||
|
4063cf5294 | ||
|
da0d4a31b1 | ||
|
b7d3ddff21 | ||
|
a3c6b1134b | ||
|
d931725930 | ||
|
6702498028 | ||
|
b650b142f7 | ||
|
19a5f52d20 | ||
|
e51c5256a0 | ||
|
3bb0c9b3ad | ||
|
41404148e1 | ||
|
e04e11f597 | ||
|
5fffa17c30 | ||
|
3fa6154517 | ||
|
1d5cdb4000 | ||
|
a1b53855bb | ||
|
b447ccd3f0 | ||
|
2058afb3e0 | ||
|
dc0a7d3a70 | ||
|
53618a4bd8 | ||
|
d6ca2cdc2e | ||
|
acf3bdc5a3 | ||
|
533d9cea38 | ||
|
82faf3a797 | ||
|
ece0af7787 | ||
|
6bccb1e5eb | ||
|
dc23abdb86 | ||
|
8d3be92d09 | ||
|
1e7f0f8a39 | ||
|
c99a4b7cc8 | ||
|
e3838643e5 | ||
|
5bd961735d | ||
|
1147cfcea4 | ||
|
abb577e4e9 | ||
|
29dd49d696 | ||
|
0f76003f77 | ||
|
1c4dfbe028 | ||
|
65be2e7f7b | ||
|
cf64c89ea3 | ||
|
d934f03597 | ||
|
e051cfd146 | ||
|
be30327dc9 | ||
|
f9784f15ed | ||
|
8e42fdaf5b | ||
|
2a52463585 | ||
|
20287973b1 | ||
|
7f958e6d89 | ||
|
e7138f1be9 | ||
|
01fba20872 | ||
|
696a70577a | ||
|
8ba61e8293 | ||
|
5944642278 | ||
|
f5434b5cba | ||
|
1159b74bdb | ||
|
bc4885b098 | ||
|
97be78a107 | ||
|
4b42f7b1b5 | ||
|
3de7fec650 | ||
|
7bc6697801 | ||
|
34c6d254a0 | ||
|
a0da2f2d4c | ||
|
c7987772e3 | ||
|
07a55bb943 | ||
|
7894bd8ae1 | ||
|
5eee99e9ac | ||
|
4485d7f757 | ||
|
d3c3f3a17e | ||
|
999588b06e | ||
|
37153cd8cf | ||
|
4547ed7aeb | ||
|
e8ef0191d6 | ||
|
7d74dce82b | ||
|
aae6a3f9af | ||
|
43dd45de29 | ||
|
13b20806ba | ||
|
49b5ab8126 | ||
|
c99d5c210c | ||
|
fc6778dd89 | ||
|
2f68ff1629 | ||
|
cde7673a23 | ||
|
1165b05e8a | ||
|
8884c0e6bd | ||
|
0762de93d6 | ||
|
af2f21fe93 | ||
|
dcd588007c | ||
|
8d6461b01d | ||
|
f52dbaa2f2 | ||
|
0c92764409 | ||
|
976317e71b | ||
|
7b52d60036 | ||
|
83479a091e | ||
|
4e2592960d | ||
|
8d5b6a17b1 | ||
|
8945bc0dc1 | ||
|
bceaac844f | ||
|
2f375d6b65 | ||
|
8f00bab61c | ||
|
ec12acfcdf | ||
|
34a8301617 | ||
|
1b22438c46 | ||
|
8ffff7e779 | ||
|
a349dda4bc | ||
|
f63ee39f3d | ||
|
f550a2ae3f | ||
|
725e55f7e5 | ||
|
f59efc1948 | ||
|
f52e90a5c1 | ||
|
2fda307b67 | ||
|
ff7b530252 | ||
|
10cfbe0c74 | ||
|
4da24bfa39 | ||
|
8123be4c14 | ||
|
9a98192b9b | ||
|
991ee20ec7 | ||
|
dc48281e6a | ||
|
b3002d784e | ||
|
c782493704 | ||
|
6c7062fa16 | ||
|
5c632db282 | ||
|
de5ad47f77 | ||
|
57c667f0b1 | ||
|
15d3638612 | ||
|
ebd3b5c9d1 | ||
|
52bbe25fc5 | ||
|
5136dbc543 | ||
|
bceddab89f | ||
|
6d5bed756a | ||
|
bb14231d71 | ||
|
a7f33d669f | ||
|
d985b84577 | ||
|
8a72023e80 | ||
|
41a3ac6bd4 | ||
|
2fb5cc1712 | ||
|
b352428032 | ||
|
914bb3d389 | ||
|
be70bfa33f | ||
|
7758e5dbfa | ||
|
22fca374f2 | ||
|
94039ca509 | ||
|
c8f124e4c5 | ||
|
2501c57030 | ||
|
60b3f5c7c6 | ||
|
c2cea8cffc |
334
backend/package-lock.json
generated
334
backend/package-lock.json
generated
@@ -25,6 +25,7 @@
|
||||
"@fastify/multipart": "8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/reply-from": "^9.8.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/static": "^7.0.4",
|
||||
@@ -63,6 +64,7 @@
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
"axios": "^1.11.0",
|
||||
"axios-ntlm": "^1.4.4",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"botbuilder": "^4.23.2",
|
||||
@@ -8043,6 +8045,42 @@
|
||||
"toad-cache": "^3.3.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/reply-from": {
|
||||
"version": "9.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/reply-from/-/reply-from-9.8.0.tgz",
|
||||
"integrity": "sha512-bPNVaFhEeNI0Lyl6404YZaPFokudCplidE3QoOcr78yOy6H9sYw97p5KPYvY/NJNUHfFtvxOaSAHnK+YSiv/Mg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/error": "^3.0.0",
|
||||
"end-of-stream": "^1.4.4",
|
||||
"fast-content-type-parse": "^1.1.0",
|
||||
"fast-querystring": "^1.0.0",
|
||||
"fastify-plugin": "^4.0.0",
|
||||
"toad-cache": "^3.7.0",
|
||||
"undici": "^5.19.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/reply-from/node_modules/@fastify/busboy": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
|
||||
"integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/reply-from/node_modules/undici": {
|
||||
"version": "5.29.0",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz",
|
||||
"integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/busboy": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/request-context": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/request-context/-/request-context-5.1.0.tgz",
|
||||
@@ -12956,216 +12994,6 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@swc/core": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.107.tgz",
|
||||
"integrity": "sha512-zKhqDyFcTsyLIYK1iEmavljZnf4CCor5pF52UzLAz4B6Nu/4GLU+2LQVAf+oRHjusG39PTPjd2AlRT3f3QWfsQ==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@swc/counter": "^0.1.1",
|
||||
"@swc/types": "^0.1.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/swc"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@swc/core-darwin-arm64": "1.3.107",
|
||||
"@swc/core-darwin-x64": "1.3.107",
|
||||
"@swc/core-linux-arm-gnueabihf": "1.3.107",
|
||||
"@swc/core-linux-arm64-gnu": "1.3.107",
|
||||
"@swc/core-linux-arm64-musl": "1.3.107",
|
||||
"@swc/core-linux-x64-gnu": "1.3.107",
|
||||
"@swc/core-linux-x64-musl": "1.3.107",
|
||||
"@swc/core-win32-arm64-msvc": "1.3.107",
|
||||
"@swc/core-win32-ia32-msvc": "1.3.107",
|
||||
"@swc/core-win32-x64-msvc": "1.3.107"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@swc/helpers": "^0.5.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@swc/helpers": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-darwin-arm64": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.107.tgz",
|
||||
"integrity": "sha512-47tD/5vSXWxPd0j/ZllyQUg4bqalbQTsmqSw0J4dDdS82MWqCAwUErUrAZPRjBkjNQ6Kmrf5rpCWaGTtPw+ngw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-darwin-x64": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.107.tgz",
|
||||
"integrity": "sha512-hwiLJ2ulNkBGAh1m1eTfeY1417OAYbRGcb/iGsJ+LuVLvKAhU/itzsl535CvcwAlt2LayeCFfcI8gdeOLeZa9A==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-arm-gnueabihf": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.107.tgz",
|
||||
"integrity": "sha512-I2wzcC0KXqh0OwymCmYwNRgZ9nxX7DWnOOStJXV3pS0uB83TXAkmqd7wvMBuIl9qu4Hfomi9aDM7IlEEn9tumQ==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-arm64-gnu": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.107.tgz",
|
||||
"integrity": "sha512-HWgnn7JORYlOYnGsdunpSF8A+BCZKPLzLtEUA27/M/ZuANcMZabKL9Zurt7XQXq888uJFAt98Gy+59PU90aHKg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-arm64-musl": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.107.tgz",
|
||||
"integrity": "sha512-vfPF74cWfAm8hyhS8yvYI94ucMHIo8xIYU+oFOW9uvDlGQRgnUf/6DEVbLyt/3yfX5723Ln57U8uiMALbX5Pyw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-x64-gnu": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.107.tgz",
|
||||
"integrity": "sha512-uBVNhIg0ip8rH9OnOsCARUFZ3Mq3tbPHxtmWk9uAa5u8jQwGWeBx5+nTHpDOVd3YxKb6+5xDEI/edeeLpha/9g==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-x64-musl": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.107.tgz",
|
||||
"integrity": "sha512-mvACkUvzSIB12q1H5JtabWATbk3AG+pQgXEN95AmEX2ZA5gbP9+B+mijsg7Sd/3tboHr7ZHLz/q3SHTvdFJrEw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-win32-arm64-msvc": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.107.tgz",
|
||||
"integrity": "sha512-J3P14Ngy/1qtapzbguEH41kY109t6DFxfbK4Ntz9dOWNuVY3o9/RTB841ctnJk0ZHEG+BjfCJjsD2n8H5HcaOA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-win32-ia32-msvc": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.107.tgz",
|
||||
"integrity": "sha512-ZBUtgyjTHlz8TPJh7kfwwwFma+ktr6OccB1oXC8fMSopD0AxVnQasgun3l3099wIsAB9eEsJDQ/3lDkOLs1gBA==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-win32-x64-msvc": {
|
||||
"version": "1.3.107",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.107.tgz",
|
||||
"integrity": "sha512-Eyzo2XRqWOxqhE1gk9h7LWmUf4Bp4Xn2Ttb0ayAXFp6YSTxQIThXcT9kipXZqcpxcmDwoq8iWbbf2P8XL743EA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/counter": {
|
||||
"version": "0.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz",
|
||||
@@ -13183,14 +13011,6 @@
|
||||
"tslib": "^2.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/types": {
|
||||
"version": "0.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz",
|
||||
"integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/@techteamer/ocsp": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@techteamer/ocsp/-/ocsp-1.0.1.tgz",
|
||||
@@ -15195,6 +15015,18 @@
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/axios-ntlm": {
|
||||
"version": "1.4.4",
|
||||
"resolved": "https://registry.npmjs.org/axios-ntlm/-/axios-ntlm-1.4.4.tgz",
|
||||
"integrity": "sha512-kpCRdzMfL8gi0Z0o96P3QPAK4XuC8iciGgxGXe+PeQ4oyjI2LZN8WSOKbu0Y9Jo3T/A7pB81n6jYVPIpglEuRA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"axios": "^1.8.4",
|
||||
"des.js": "^1.1.0",
|
||||
"dev-null": "^0.1.1",
|
||||
"js-md4": "^0.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/axios-retry": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/axios-retry/-/axios-retry-4.0.0.tgz",
|
||||
@@ -16954,6 +16786,16 @@
|
||||
"resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
|
||||
"integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="
|
||||
},
|
||||
"node_modules/des.js": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/des.js/-/des.js-1.1.0.tgz",
|
||||
"integrity": "sha512-r17GxjhUCjSRy8aiJpr8/UadFIzMzJGexI3Nmz4ADi9LYSFx4gTBp80+NaX/YsXWWLhpZ7v/v/ubEc/bCNfKwg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.1",
|
||||
"minimalistic-assert": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/destroy": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
|
||||
@@ -16981,6 +16823,12 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/dev-null": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/dev-null/-/dev-null-0.1.1.tgz",
|
||||
"integrity": "sha512-nMNZG0zfMgmdv8S5O0TM5cpwNbGKRGPCxVsr0SmA3NZZy9CYBbuNLL0PD3Acx9e5LIUgwONXtM9kM6RlawPxEQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/diff": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
|
||||
@@ -19029,49 +18877,6 @@
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/gcp-metadata": {
|
||||
"version": "5.3.0",
|
||||
"resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.3.0.tgz",
|
||||
"integrity": "sha512-FNTkdNEnBdlqF2oatizolQqNANMrcqJt6AAYt99B3y1aLLC8Hc5IOBb+ZnnzllodEEf6xMBp6wRcBbc16fa65w==",
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"gaxios": "^5.0.0",
|
||||
"json-bigint": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/gcp-metadata/node_modules/gaxios": {
|
||||
"version": "5.1.3",
|
||||
"resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.3.tgz",
|
||||
"integrity": "sha512-95hVgBRgEIRQQQHIbnxBXeHbW4TqFk4ZDJW7wmVtvYar72FdhRIo1UGOLS2eRAKCPEdPBWu+M7+A33D9CdX9rA==",
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"extend": "^3.0.2",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
"is-stream": "^2.0.0",
|
||||
"node-fetch": "^2.6.9"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/gcp-metadata/node_modules/is-stream": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
|
||||
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/generate-function": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz",
|
||||
@@ -29562,9 +29367,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/toad-cache": {
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/toad-cache/-/toad-cache-3.3.0.tgz",
|
||||
"integrity": "sha512-3oDzcogWGHZdkwrHyvJVpPjA7oNzY6ENOV3PsWJY9XYPZ6INo94Yd47s5may1U+nleBPwDhrRiTPMIvKaa3MQg==",
|
||||
"version": "3.7.0",
|
||||
"resolved": "https://registry.npmjs.org/toad-cache/-/toad-cache-3.7.0.tgz",
|
||||
"integrity": "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
|
@@ -37,7 +37,7 @@
|
||||
"build": "tsup --sourcemap",
|
||||
"build:frontend": "npm run build --prefix ../frontend",
|
||||
"start": "node --enable-source-maps dist/main.mjs",
|
||||
"type:check": "tsc --noEmit",
|
||||
"type:check": "node --max-old-space-size=8192 ./node_modules/.bin/tsc --noEmit",
|
||||
"lint:fix": "node --max-old-space-size=8192 ./node_modules/.bin/eslint --fix --ext js,ts ./src",
|
||||
"lint": "node --max-old-space-size=8192 ./node_modules/.bin/eslint 'src/**/*.ts'",
|
||||
"test:unit": "vitest run -c vitest.unit.config.ts",
|
||||
@@ -145,6 +145,7 @@
|
||||
"@fastify/multipart": "8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/reply-from": "^9.8.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/static": "^7.0.4",
|
||||
@@ -183,6 +184,7 @@
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
"axios": "^1.11.0",
|
||||
"axios-ntlm": "^1.4.4",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"botbuilder": "^4.23.2",
|
||||
|
3
backend/src/@types/fastify.d.ts
vendored
3
backend/src/@types/fastify.d.ts
vendored
@@ -83,6 +83,7 @@ import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua
|
||||
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
|
||||
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
|
||||
import { TMicrosoftTeamsServiceFactory } from "@app/services/microsoft-teams/microsoft-teams-service";
|
||||
import { TOfflineUsageReportServiceFactory } from "@app/services/offline-usage-report/offline-usage-report-service";
|
||||
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
|
||||
import { TOrgServiceFactory } from "@app/services/org/org-service";
|
||||
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
|
||||
@@ -161,6 +162,7 @@ declare module "fastify" {
|
||||
};
|
||||
// identity injection. depending on which kinda of token the information is filled in auth
|
||||
auth: TAuthMode;
|
||||
shouldForwardWritesToPrimaryInstance: boolean;
|
||||
permission: {
|
||||
authMethod: ActorAuthMethod;
|
||||
type: ActorType;
|
||||
@@ -303,6 +305,7 @@ declare module "fastify" {
|
||||
bus: TEventBusService;
|
||||
sse: TServerSentEventsService;
|
||||
identityAuthTemplate: TIdentityAuthTemplateServiceFactory;
|
||||
offlineUsageReport: TOfflineUsageReportServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
@@ -0,0 +1,49 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 1000;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.UserAliases, "isEmailVerified"))) {
|
||||
// Add the column
|
||||
await knex.schema.alterTable(TableName.UserAliases, (t) => {
|
||||
t.boolean("isEmailVerified").defaultTo(false);
|
||||
});
|
||||
|
||||
const aliasesToUpdate: { aliasId: string; isEmailVerified: boolean }[] = await knex(TableName.UserAliases)
|
||||
.join(TableName.Users, `${TableName.UserAliases}.userId`, `${TableName.Users}.id`)
|
||||
.select([`${TableName.UserAliases}.id as aliasId`, `${TableName.Users}.isEmailVerified`]);
|
||||
|
||||
for (let i = 0; i < aliasesToUpdate.length; i += BATCH_SIZE) {
|
||||
const batch = aliasesToUpdate.slice(i, i + BATCH_SIZE);
|
||||
|
||||
const trueIds = batch.filter((row) => row.isEmailVerified).map((row) => row.aliasId);
|
||||
|
||||
if (trueIds.length > 0) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.UserAliases).whereIn("id", trueIds).update({ isEmailVerified: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasColumn(TableName.AuthTokens, "aliasId"))) {
|
||||
await knex.schema.alterTable(TableName.AuthTokens, (t) => {
|
||||
t.string("aliasId").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.UserAliases, "isEmailVerified")) {
|
||||
await knex.schema.alterTable(TableName.UserAliases, (t) => {
|
||||
t.dropColumn("isEmailVerified");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.AuthTokens, "aliasId")) {
|
||||
await knex.schema.alterTable(TableName.AuthTokens, (t) => {
|
||||
t.dropColumn("aliasId");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,57 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.IdentityUniversalAuth)) {
|
||||
const hasLockoutEnabled = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutEnabled");
|
||||
const hasLockoutThreshold = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutThreshold");
|
||||
const hasLockoutDuration = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutDurationSeconds");
|
||||
const hasLockoutCounterReset = await knex.schema.hasColumn(
|
||||
TableName.IdentityUniversalAuth,
|
||||
"lockoutCounterResetSeconds"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.IdentityUniversalAuth, (t) => {
|
||||
if (!hasLockoutEnabled) {
|
||||
t.boolean("lockoutEnabled").notNullable().defaultTo(true);
|
||||
}
|
||||
if (!hasLockoutThreshold) {
|
||||
t.integer("lockoutThreshold").notNullable().defaultTo(3);
|
||||
}
|
||||
if (!hasLockoutDuration) {
|
||||
t.integer("lockoutDurationSeconds").notNullable().defaultTo(300); // 5 minutes
|
||||
}
|
||||
if (!hasLockoutCounterReset) {
|
||||
t.integer("lockoutCounterResetSeconds").notNullable().defaultTo(30); // 30 seconds
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.IdentityUniversalAuth)) {
|
||||
const hasLockoutEnabled = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutEnabled");
|
||||
const hasLockoutThreshold = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutThreshold");
|
||||
const hasLockoutDuration = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutDurationSeconds");
|
||||
const hasLockoutCounterReset = await knex.schema.hasColumn(
|
||||
TableName.IdentityUniversalAuth,
|
||||
"lockoutCounterResetSeconds"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.IdentityUniversalAuth, (t) => {
|
||||
if (hasLockoutEnabled) {
|
||||
t.dropColumn("lockoutEnabled");
|
||||
}
|
||||
if (hasLockoutThreshold) {
|
||||
t.dropColumn("lockoutThreshold");
|
||||
}
|
||||
if (hasLockoutDuration) {
|
||||
t.dropColumn("lockoutDurationSeconds");
|
||||
}
|
||||
if (hasLockoutCounterReset) {
|
||||
t.dropColumn("lockoutCounterResetSeconds");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.boolean("shouldCheckSecretPermission").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.dropColumn("shouldCheckSecretPermission");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,29 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 100;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission")) {
|
||||
// find all existing SecretApprovalPolicy rows to backfill shouldCheckSecretPermission flag
|
||||
const rows = await knex(TableName.SecretApprovalPolicy).select(selectAllTableCols(TableName.SecretApprovalPolicy));
|
||||
|
||||
if (rows.length > 0) {
|
||||
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
||||
const batch = rows.slice(i, i + BATCH_SIZE);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.SecretApprovalPolicy)
|
||||
.whereIn(
|
||||
"id",
|
||||
batch.map((row) => row.id)
|
||||
)
|
||||
.update({ shouldCheckSecretPermission: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {}
|
@@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasPropertiesCol = await knex.schema.hasColumn(TableName.PkiSubscriber, "properties");
|
||||
|
||||
if (!hasPropertiesCol) {
|
||||
await knex.schema.alterTable(TableName.PkiSubscriber, (t) => {
|
||||
t.jsonb("properties").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasPropertiesCol = await knex.schema.hasColumn(TableName.PkiSubscriber, "properties");
|
||||
|
||||
if (hasPropertiesCol) {
|
||||
await knex.schema.alterTable(TableName.PkiSubscriber, (t) => {
|
||||
t.dropColumn("properties");
|
||||
});
|
||||
}
|
||||
}
|
@@ -17,7 +17,8 @@ export const AuthTokensSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
orgId: z.string().uuid().nullable().optional()
|
||||
orgId: z.string().uuid().nullable().optional(),
|
||||
aliasId: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TAuthTokens = z.infer<typeof AuthTokensSchema>;
|
||||
|
@@ -18,7 +18,11 @@ export const IdentityUniversalAuthsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
accessTokenPeriod: z.coerce.number().default(0),
|
||||
lockoutEnabled: z.boolean().default(true),
|
||||
lockoutThreshold: z.number().default(3),
|
||||
lockoutDurationSeconds: z.number().default(300),
|
||||
lockoutCounterResetSeconds: z.number().default(30)
|
||||
});
|
||||
|
||||
export type TIdentityUniversalAuths = z.infer<typeof IdentityUniversalAuthsSchema>;
|
||||
|
@@ -25,7 +25,8 @@ export const PkiSubscribersSchema = z.object({
|
||||
lastAutoRenewAt: z.date().nullable().optional(),
|
||||
lastOperationStatus: z.string().nullable().optional(),
|
||||
lastOperationMessage: z.string().nullable().optional(),
|
||||
lastOperationAt: z.date().nullable().optional()
|
||||
lastOperationAt: z.date().nullable().optional(),
|
||||
properties: z.unknown().nullable().optional()
|
||||
});
|
||||
|
||||
export type TPkiSubscribers = z.infer<typeof PkiSubscribersSchema>;
|
||||
|
@@ -17,7 +17,8 @@ export const SecretApprovalPoliciesSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
allowedSelfApprovals: z.boolean().default(true),
|
||||
shouldCheckSecretPermission: z.boolean().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
|
||||
|
@@ -16,7 +16,8 @@ export const UserAliasesSchema = z.object({
|
||||
emails: z.string().array().nullable().optional(),
|
||||
orgId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
isEmailVerified: z.boolean().default(false).nullable().optional()
|
||||
});
|
||||
|
||||
export type TUserAliases = z.infer<typeof UserAliasesSchema>;
|
||||
|
@@ -133,6 +133,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
approvals: z.number(),
|
||||
approvers: z
|
||||
.object({
|
||||
isOrgMembershipActive: z.boolean().nullable().optional(),
|
||||
userId: z.string().nullable().optional(),
|
||||
sequence: z.number().nullable().optional(),
|
||||
approvalsRequired: z.number().nullable().optional(),
|
||||
@@ -150,6 +151,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
}),
|
||||
reviewers: z
|
||||
.object({
|
||||
isOrgMembershipActive: z.boolean().nullable().optional(),
|
||||
userId: z.string(),
|
||||
status: z.string()
|
||||
})
|
||||
|
@@ -126,4 +126,39 @@ export const registerGithubOrgSyncRouter = async (server: FastifyZodProvider) =>
|
||||
return { githubOrgSyncConfig };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/sync-all-teams",
|
||||
method: "POST",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
totalUsers: z.number(),
|
||||
errors: z.array(z.string()),
|
||||
createdTeams: z.array(z.string()),
|
||||
updatedTeams: z.array(z.string()),
|
||||
removedMemberships: z.number(),
|
||||
syncDuration: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const result = await server.services.githubOrgSync.syncAllTeams({
|
||||
orgPermission: req.permission
|
||||
});
|
||||
|
||||
return {
|
||||
totalUsers: result.totalUsers,
|
||||
errors: result.errors,
|
||||
createdTeams: result.createdTeams,
|
||||
updatedTeams: result.updatedTeams,
|
||||
removedMemberships: result.removedMemberships,
|
||||
syncDuration: result.syncDuration
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -294,22 +294,30 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
200: z.object({
|
||||
approval: SecretApprovalRequestsSchema.merge(
|
||||
z.object({
|
||||
// secretPath: z.string(),
|
||||
policy: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
approvals: z.number(),
|
||||
approvers: approvalRequestUser.array(),
|
||||
approvers: approvalRequestUser
|
||||
.extend({ isOrgMembershipActive: z.boolean().nullable().optional() })
|
||||
.array(),
|
||||
bypassers: approvalRequestUser.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
allowedSelfApprovals: z.boolean(),
|
||||
shouldCheckSecretPermission: z.boolean().nullable().optional()
|
||||
}),
|
||||
environment: z.string(),
|
||||
statusChangedByUser: approvalRequestUser.optional(),
|
||||
committerUser: approvalRequestUser.nullish(),
|
||||
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
|
||||
reviewers: approvalRequestUser
|
||||
.extend({
|
||||
status: z.string(),
|
||||
comment: z.string().optional(),
|
||||
isOrgMembershipActive: z.boolean().nullable().optional()
|
||||
})
|
||||
.array(),
|
||||
secretPath: z.string(),
|
||||
commits: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true, version: true, secretValue: true })
|
||||
|
@@ -5,6 +5,7 @@ import {
|
||||
AccessApprovalRequestsSchema,
|
||||
TableName,
|
||||
TAccessApprovalRequests,
|
||||
TOrgMemberships,
|
||||
TUserGroupMembership,
|
||||
TUsers
|
||||
} from "@app/db/schemas";
|
||||
@@ -144,6 +145,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
| {
|
||||
userId: string;
|
||||
@@ -151,6 +153,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
)[];
|
||||
bypassers: string[];
|
||||
@@ -202,6 +205,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
reviewers: {
|
||||
userId: string;
|
||||
status: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}[];
|
||||
approvers: (
|
||||
| {
|
||||
@@ -210,6 +214,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
| {
|
||||
userId: string;
|
||||
@@ -217,6 +222,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
)[];
|
||||
bypassers: string[];
|
||||
@@ -288,6 +294,24 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
`requestedByUser.id`
|
||||
)
|
||||
|
||||
.leftJoin<TOrgMemberships>(
|
||||
db(TableName.OrgMembership).as("approverOrgMembership"),
|
||||
`${TableName.AccessApprovalPolicyApprover}.approverUserId`,
|
||||
`approverOrgMembership.userId`
|
||||
)
|
||||
|
||||
.leftJoin<TOrgMemberships>(
|
||||
db(TableName.OrgMembership).as("approverGroupOrgMembership"),
|
||||
`${TableName.Users}.id`,
|
||||
`approverGroupOrgMembership.userId`
|
||||
)
|
||||
|
||||
.leftJoin<TOrgMemberships>(
|
||||
db(TableName.OrgMembership).as("reviewerOrgMembership"),
|
||||
`${TableName.AccessApprovalRequestReviewer}.reviewerUserId`,
|
||||
`reviewerOrgMembership.userId`
|
||||
)
|
||||
|
||||
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
|
||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||
@@ -300,6 +324,10 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
|
||||
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt"),
|
||||
|
||||
db.ref("isActive").withSchema("approverOrgMembership").as("approverIsOrgMembershipActive"),
|
||||
db.ref("isActive").withSchema("approverGroupOrgMembership").as("approverGroupIsOrgMembershipActive"),
|
||||
db.ref("isActive").withSchema("reviewerOrgMembership").as("reviewerIsOrgMembershipActive"),
|
||||
db.ref("maxTimePeriod").withSchema(TableName.AccessApprovalPolicy).as("policyMaxTimePeriod")
|
||||
)
|
||||
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
@@ -396,17 +424,26 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
{
|
||||
key: "reviewerUserId",
|
||||
label: "reviewers" as const,
|
||||
mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined)
|
||||
mapper: ({ reviewerUserId: userId, reviewerStatus: status, reviewerIsOrgMembershipActive }) =>
|
||||
userId ? { userId, status, isOrgMembershipActive: reviewerIsOrgMembershipActive } : undefined
|
||||
},
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverUserId, approverSequence, approvalsRequired, approverUsername, approverEmail }) => ({
|
||||
mapper: ({
|
||||
approverUserId,
|
||||
approverSequence,
|
||||
approvalsRequired,
|
||||
approverUsername,
|
||||
approverEmail,
|
||||
approverIsOrgMembershipActive
|
||||
}) => ({
|
||||
userId: approverUserId,
|
||||
sequence: approverSequence,
|
||||
approvalsRequired,
|
||||
email: approverEmail,
|
||||
username: approverUsername
|
||||
username: approverUsername,
|
||||
isOrgMembershipActive: approverIsOrgMembershipActive
|
||||
})
|
||||
},
|
||||
{
|
||||
@@ -417,13 +454,15 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
approverSequence,
|
||||
approvalsRequired,
|
||||
approverGroupEmail,
|
||||
approverGroupUsername
|
||||
approverGroupUsername,
|
||||
approverGroupIsOrgMembershipActive
|
||||
}) => ({
|
||||
userId: approverGroupUserId,
|
||||
sequence: approverSequence,
|
||||
approvalsRequired,
|
||||
email: approverGroupEmail,
|
||||
username: approverGroupUsername
|
||||
username: approverGroupUsername,
|
||||
isOrgMembershipActive: approverGroupIsOrgMembershipActive
|
||||
})
|
||||
},
|
||||
{ key: "bypasserUserId", label: "bypassers" as const, mapper: ({ bypasserUserId }) => bypasserUserId },
|
||||
|
@@ -87,6 +87,7 @@ export interface TAccessApprovalRequestServiceFactory {
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
| {
|
||||
userId: string;
|
||||
@@ -94,6 +95,7 @@ export interface TAccessApprovalRequestServiceFactory {
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
)[];
|
||||
bypassers: string[];
|
||||
@@ -145,6 +147,7 @@ export interface TAccessApprovalRequestServiceFactory {
|
||||
reviewers: {
|
||||
userId: string;
|
||||
status: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}[];
|
||||
approvers: (
|
||||
| {
|
||||
@@ -153,6 +156,7 @@ export interface TAccessApprovalRequestServiceFactory {
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
| {
|
||||
userId: string;
|
||||
@@ -160,6 +164,7 @@ export interface TAccessApprovalRequestServiceFactory {
|
||||
approvalsRequired: number | null | undefined;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
isOrgMembershipActive: boolean;
|
||||
}
|
||||
)[];
|
||||
bypassers: string[];
|
||||
|
@@ -14,7 +14,7 @@ import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { EventType, filterableSecretEvents } from "./audit-log-types";
|
||||
|
||||
export interface TAuditLogDALFactory extends Omit<TOrmify<TableName.AuditLog>, "find"> {
|
||||
pruneAuditLog: (tx?: knex.Knex) => Promise<void>;
|
||||
pruneAuditLog: () => Promise<void>;
|
||||
find: (
|
||||
arg: Omit<TFindQuery, "actor" | "eventType"> & {
|
||||
actorId?: string | undefined;
|
||||
@@ -41,6 +41,10 @@ type TFindQuery = {
|
||||
offset?: number;
|
||||
};
|
||||
|
||||
const QUERY_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
|
||||
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
|
||||
const MAX_RETRY_ON_FAILURE = 3;
|
||||
|
||||
export const auditLogDALFactory = (db: TDbClient) => {
|
||||
const auditLogOrm = ormify(db, TableName.AuditLog);
|
||||
|
||||
@@ -151,20 +155,20 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
// delete all audit log that have expired
|
||||
const pruneAuditLog: TAuditLogDALFactory["pruneAuditLog"] = async (tx) => {
|
||||
const runPrune = async (dbClient: knex.Knex) => {
|
||||
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
|
||||
const MAX_RETRY_ON_FAILURE = 3;
|
||||
const pruneAuditLog: TAuditLogDALFactory["pruneAuditLog"] = async () => {
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
|
||||
do {
|
||||
try {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
deletedAuditLogIds = await db.transaction(async (trx) => {
|
||||
await trx.raw(`SET statement_timeout = ${QUERY_TIMEOUT_MS}`);
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
|
||||
do {
|
||||
try {
|
||||
const findExpiredLogSubQuery = dbClient(TableName.AuditLog)
|
||||
const findExpiredLogSubQuery = trx(TableName.AuditLog)
|
||||
.where("expiresAt", "<", today)
|
||||
.where("createdAt", "<", today) // to use audit log partition
|
||||
.orderBy(`${TableName.AuditLog}.createdAt`, "desc")
|
||||
@@ -172,35 +176,25 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
deletedAuditLogIds = await dbClient(TableName.AuditLog)
|
||||
.whereIn("id", findExpiredLogSubQuery)
|
||||
.del()
|
||||
.returning("id");
|
||||
numberOfRetryOnFailure = 0; // reset
|
||||
} catch (error) {
|
||||
numberOfRetryOnFailure += 1;
|
||||
deletedAuditLogIds = [];
|
||||
logger.error(error, "Failed to delete audit log on pruning");
|
||||
} finally {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
|
||||
};
|
||||
const results = await trx(TableName.AuditLog).whereIn("id", findExpiredLogSubQuery).del().returning("id");
|
||||
|
||||
if (tx) {
|
||||
await runPrune(tx);
|
||||
} else {
|
||||
const QUERY_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
|
||||
await db.transaction(async (trx) => {
|
||||
await trx.raw(`SET statement_timeout = ${QUERY_TIMEOUT_MS}`);
|
||||
await runPrune(trx);
|
||||
});
|
||||
}
|
||||
return results;
|
||||
});
|
||||
|
||||
numberOfRetryOnFailure = 0; // reset
|
||||
} catch (error) {
|
||||
numberOfRetryOnFailure += 1;
|
||||
deletedAuditLogIds = [];
|
||||
logger.error(error, "Failed to delete audit log on pruning");
|
||||
} finally {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
|
||||
};
|
||||
|
||||
const create: TAuditLogDALFactory["create"] = async (tx) => {
|
||||
|
@@ -6,9 +6,9 @@ import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { OrgPermissionAuditLogsActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { ProjectPermissionAuditLogsActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TAuditLogDALFactory } from "./audit-log-dal";
|
||||
import { TAuditLogQueueServiceFactory } from "./audit-log-queue";
|
||||
import { EventType, TAuditLogServiceFactory } from "./audit-log-types";
|
||||
@@ -41,7 +41,10 @@ export const auditLogServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionAuditLogsActions.Read,
|
||||
ProjectPermissionSub.AuditLogs
|
||||
);
|
||||
} else {
|
||||
// Organization-wide logs
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
@@ -52,7 +55,10 @@ export const auditLogServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionAuditLogsActions.Read,
|
||||
OrgPermissionSubjects.AuditLogs
|
||||
);
|
||||
}
|
||||
|
||||
// If project ID is not provided, then we need to return all the audit logs for the organization itself.
|
||||
|
@@ -198,6 +198,7 @@ export enum EventType {
|
||||
|
||||
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
|
||||
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
|
||||
CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS = "clear-identity-universal-auth-lockouts",
|
||||
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id",
|
||||
@@ -281,6 +282,7 @@ export enum EventType {
|
||||
UPDATE_SSH_CERTIFICATE_TEMPLATE = "update-ssh-certificate-template",
|
||||
DELETE_SSH_CERTIFICATE_TEMPLATE = "delete-ssh-certificate-template",
|
||||
GET_SSH_CERTIFICATE_TEMPLATE = "get-ssh-certificate-template",
|
||||
GET_AZURE_AD_TEMPLATES = "get-azure-ad-templates",
|
||||
GET_SSH_HOST = "get-ssh-host",
|
||||
CREATE_SSH_HOST = "create-ssh-host",
|
||||
UPDATE_SSH_HOST = "update-ssh-host",
|
||||
@@ -866,6 +868,10 @@ interface AddIdentityUniversalAuthEvent {
|
||||
accessTokenMaxTTL: number;
|
||||
accessTokenNumUsesLimit: number;
|
||||
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
|
||||
lockoutEnabled: boolean;
|
||||
lockoutThreshold: number;
|
||||
lockoutDurationSeconds: number;
|
||||
lockoutCounterResetSeconds: number;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -878,6 +884,10 @@ interface UpdateIdentityUniversalAuthEvent {
|
||||
accessTokenMaxTTL?: number;
|
||||
accessTokenNumUsesLimit?: number;
|
||||
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
|
||||
lockoutEnabled?: boolean;
|
||||
lockoutThreshold?: number;
|
||||
lockoutDurationSeconds?: number;
|
||||
lockoutCounterResetSeconds?: number;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1037,6 +1047,13 @@ interface RevokeIdentityUniversalAuthClientSecretEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface ClearIdentityUniversalAuthLockoutsEvent {
|
||||
type: EventType.CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface LoginIdentityGcpAuthEvent {
|
||||
type: EventType.LOGIN_IDENTITY_GCP_AUTH;
|
||||
metadata: {
|
||||
@@ -2497,6 +2514,14 @@ interface CreateCertificateTemplateEstConfig {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetAzureAdCsTemplatesEvent {
|
||||
type: EventType.GET_AZURE_AD_TEMPLATES;
|
||||
metadata: {
|
||||
caId: string;
|
||||
amount: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateCertificateTemplateEstConfig {
|
||||
type: EventType.UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG;
|
||||
metadata: {
|
||||
@@ -3491,6 +3516,7 @@ export type Event =
|
||||
| GetIdentityUniversalAuthClientSecretsEvent
|
||||
| GetIdentityUniversalAuthClientSecretByIdEvent
|
||||
| RevokeIdentityUniversalAuthClientSecretEvent
|
||||
| ClearIdentityUniversalAuthLockoutsEvent
|
||||
| LoginIdentityGcpAuthEvent
|
||||
| AddIdentityGcpAuthEvent
|
||||
| DeleteIdentityGcpAuthEvent
|
||||
@@ -3636,6 +3662,7 @@ export type Event =
|
||||
| CreateCertificateTemplateEstConfig
|
||||
| UpdateCertificateTemplateEstConfig
|
||||
| GetCertificateTemplateEstConfig
|
||||
| GetAzureAdCsTemplatesEvent
|
||||
| AttemptCreateSlackIntegration
|
||||
| AttemptReinstallSlackIntegration
|
||||
| UpdateSlackIntegration
|
||||
|
@@ -1,14 +1,19 @@
|
||||
/* eslint-disable @typescript-eslint/return-await */
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { Octokit } from "@octokit/core";
|
||||
import { paginateGraphql } from "@octokit/plugin-paginate-graphql";
|
||||
import { Octokit as OctokitRest } from "@octokit/rest";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { OrgMembershipRole } from "@app/db/schemas";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { groupBy } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { retryWithBackoff } from "@app/lib/retry";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
|
||||
import { TGroupDALFactory } from "../group/group-dal";
|
||||
import { TUserGroupMembershipDALFactory } from "../group/user-group-membership-dal";
|
||||
@@ -16,20 +21,67 @@ import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { TGithubOrgSyncDALFactory } from "./github-org-sync-dal";
|
||||
import { TCreateGithubOrgSyncDTO, TDeleteGithubOrgSyncDTO, TUpdateGithubOrgSyncDTO } from "./github-org-sync-types";
|
||||
import {
|
||||
TCreateGithubOrgSyncDTO,
|
||||
TDeleteGithubOrgSyncDTO,
|
||||
TSyncAllTeamsDTO,
|
||||
TSyncResult,
|
||||
TUpdateGithubOrgSyncDTO,
|
||||
TValidateGithubTokenDTO
|
||||
} from "./github-org-sync-types";
|
||||
|
||||
const OctokitWithPlugin = Octokit.plugin(paginateGraphql);
|
||||
|
||||
// Type definitions for GitHub API errors
|
||||
interface GitHubApiError extends Error {
|
||||
status?: number;
|
||||
response?: {
|
||||
status?: number;
|
||||
headers?: {
|
||||
"x-ratelimit-reset"?: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
interface OrgMembershipWithUser {
|
||||
id: string;
|
||||
orgId: string;
|
||||
role: string;
|
||||
status: string;
|
||||
isActive: boolean;
|
||||
inviteEmail: string | null;
|
||||
user: {
|
||||
id: string;
|
||||
email: string;
|
||||
username: string | null;
|
||||
firstName: string | null;
|
||||
lastName: string | null;
|
||||
} | null;
|
||||
}
|
||||
|
||||
interface GroupMembership {
|
||||
id: string;
|
||||
groupId: string;
|
||||
groupName: string;
|
||||
orgMembershipId: string;
|
||||
firstName: string | null;
|
||||
lastName: string | null;
|
||||
}
|
||||
|
||||
type TGithubOrgSyncServiceFactoryDep = {
|
||||
githubOrgSyncDAL: TGithubOrgSyncDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
userGroupMembershipDAL: Pick<
|
||||
TUserGroupMembershipDALFactory,
|
||||
"findGroupMembershipsByUserIdInOrg" | "insertMany" | "delete"
|
||||
"findGroupMembershipsByUserIdInOrg" | "findGroupMembershipsByGroupIdInOrg" | "insertMany" | "delete"
|
||||
>;
|
||||
groupDAL: Pick<TGroupDALFactory, "insertMany" | "transaction" | "find">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
orgMembershipDAL: Pick<
|
||||
TOrgMembershipDALFactory,
|
||||
"find" | "findOrgMembershipById" | "findOrgMembershipsWithUsersByOrgId"
|
||||
>;
|
||||
};
|
||||
|
||||
export type TGithubOrgSyncServiceFactory = ReturnType<typeof githubOrgSyncServiceFactory>;
|
||||
@@ -40,7 +92,8 @@ export const githubOrgSyncServiceFactory = ({
|
||||
kmsService,
|
||||
userGroupMembershipDAL,
|
||||
groupDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
orgMembershipDAL
|
||||
}: TGithubOrgSyncServiceFactoryDep) => {
|
||||
const createGithubOrgSync = async ({
|
||||
githubOrgName,
|
||||
@@ -304,8 +357,8 @@ export const githubOrgSyncServiceFactory = ({
|
||||
const removeFromTeams = infisicalUserGroups.filter((el) => !githubUserTeamSet.has(el.groupName));
|
||||
|
||||
if (newTeams.length || updateTeams.length || removeFromTeams.length) {
|
||||
await groupDAL.transaction(async (tx) => {
|
||||
if (newTeams.length) {
|
||||
if (newTeams.length) {
|
||||
await groupDAL.transaction(async (tx) => {
|
||||
const newGroups = await groupDAL.insertMany(
|
||||
newTeams.map((newGroupName) => ({
|
||||
name: newGroupName,
|
||||
@@ -322,9 +375,11 @@ export const githubOrgSyncServiceFactory = ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (updateTeams.length) {
|
||||
if (updateTeams.length) {
|
||||
await groupDAL.transaction(async (tx) => {
|
||||
await userGroupMembershipDAL.insertMany(
|
||||
updateTeams.map((el) => ({
|
||||
groupId: githubUserTeamOnInfisicalGroupByName[el][0].id,
|
||||
@@ -332,16 +387,433 @@ export const githubOrgSyncServiceFactory = ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (removeFromTeams.length) {
|
||||
if (removeFromTeams.length) {
|
||||
await groupDAL.transaction(async (tx) => {
|
||||
await userGroupMembershipDAL.delete(
|
||||
{ userId, $in: { groupId: removeFromTeams.map((el) => el.groupId) } },
|
||||
tx
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const validateGithubToken = async ({ orgPermission, githubOrgAccessToken }: TValidateGithubTokenDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
orgPermission.orgId,
|
||||
orgPermission.authMethod,
|
||||
orgPermission.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.GithubOrgSync);
|
||||
|
||||
const plan = await licenseService.getPlan(orgPermission.orgId);
|
||||
if (!plan.githubOrgSync) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to validate GitHub token due to plan restriction. Upgrade plan to use GitHub organization sync."
|
||||
});
|
||||
}
|
||||
|
||||
const config = await githubOrgSyncDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!config) {
|
||||
throw new BadRequestError({ message: "GitHub organization sync is not configured" });
|
||||
}
|
||||
|
||||
try {
|
||||
const testOctokit = new OctokitRest({
|
||||
auth: githubOrgAccessToken,
|
||||
request: {
|
||||
signal: AbortSignal.timeout(10000)
|
||||
}
|
||||
});
|
||||
|
||||
const { data: org } = await testOctokit.rest.orgs.get({
|
||||
org: config.githubOrgName
|
||||
});
|
||||
|
||||
const octokitGraphQL = new OctokitWithPlugin({
|
||||
auth: githubOrgAccessToken,
|
||||
request: {
|
||||
signal: AbortSignal.timeout(10000)
|
||||
}
|
||||
});
|
||||
|
||||
await octokitGraphQL.graphql(`query($org: String!) { organization(login: $org) { id name } }`, {
|
||||
org: config.githubOrgName
|
||||
});
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
organizationInfo: {
|
||||
id: org.id,
|
||||
login: org.login,
|
||||
name: org.name || org.login,
|
||||
publicRepos: org.public_repos,
|
||||
privateRepos: org.owned_private_repos || 0
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(error, `GitHub token validation failed for org ${config.githubOrgName}`);
|
||||
|
||||
const gitHubError = error as GitHubApiError;
|
||||
const statusCode = gitHubError.status || gitHubError.response?.status;
|
||||
if (statusCode) {
|
||||
if (statusCode === 401) {
|
||||
throw new BadRequestError({
|
||||
message: "GitHub access token is invalid or expired."
|
||||
});
|
||||
}
|
||||
if (statusCode === 403) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"GitHub access token lacks required permissions. Required: 1) 'read:org' scope for organization teams, 2) Token owner must be an organization member with team visibility access, 3) Organization settings must allow team visibility. Check GitHub token scopes and organization member permissions."
|
||||
});
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
throw new BadRequestError({
|
||||
message: `Organization '${config.githubOrgName}' not found or access token does not have access to it.`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `GitHub token validation failed: ${(error as Error).message}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const syncAllTeams = async ({ orgPermission }: TSyncAllTeamsDTO): Promise<TSyncResult> => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
orgPermission.orgId,
|
||||
orgPermission.authMethod,
|
||||
orgPermission.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionActions.Edit,
|
||||
OrgPermissionSubjects.GithubOrgSyncManual
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(orgPermission.orgId);
|
||||
if (!plan.githubOrgSync) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to sync all GitHub teams due to plan restriction. Upgrade plan to use GitHub organization sync."
|
||||
});
|
||||
}
|
||||
|
||||
const config = await githubOrgSyncDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!config || !config?.isActive) {
|
||||
throw new BadRequestError({ message: "GitHub organization sync is not configured or not active" });
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: orgPermission.orgId
|
||||
});
|
||||
|
||||
if (!config.encryptedGithubOrgAccessToken) {
|
||||
throw new BadRequestError({
|
||||
message: "GitHub organization access token is required. Please set a token first."
|
||||
});
|
||||
}
|
||||
|
||||
const orgAccessToken = decryptor({ cipherTextBlob: config.encryptedGithubOrgAccessToken }).toString();
|
||||
|
||||
try {
|
||||
const testOctokit = new OctokitRest({
|
||||
auth: orgAccessToken,
|
||||
request: {
|
||||
signal: AbortSignal.timeout(10000)
|
||||
}
|
||||
});
|
||||
|
||||
await testOctokit.rest.orgs.get({
|
||||
org: config.githubOrgName
|
||||
});
|
||||
|
||||
await testOctokit.rest.users.getAuthenticated();
|
||||
} catch (error) {
|
||||
throw new BadRequestError({
|
||||
message: "Stored GitHub access token is invalid or expired. Please set a new token."
|
||||
});
|
||||
}
|
||||
|
||||
const allMembers = await orgMembershipDAL.findOrgMembershipsWithUsersByOrgId(orgPermission.orgId);
|
||||
const activeMembers = allMembers.filter(
|
||||
(member) => member.status === "accepted" && member.isActive
|
||||
) as OrgMembershipWithUser[];
|
||||
|
||||
const startTime = Date.now();
|
||||
const syncErrors: string[] = [];
|
||||
|
||||
const octokit = new OctokitWithPlugin({
|
||||
auth: orgAccessToken,
|
||||
request: {
|
||||
signal: AbortSignal.timeout(30000)
|
||||
}
|
||||
});
|
||||
|
||||
const data = await retryWithBackoff(async () => {
|
||||
return octokit.graphql
|
||||
.paginate<{
|
||||
organization: {
|
||||
teams: {
|
||||
totalCount: number;
|
||||
edges: {
|
||||
node: {
|
||||
name: string;
|
||||
description: string;
|
||||
members: {
|
||||
edges: {
|
||||
node: {
|
||||
login: string;
|
||||
};
|
||||
}[];
|
||||
};
|
||||
};
|
||||
}[];
|
||||
};
|
||||
};
|
||||
}>(
|
||||
`
|
||||
query orgTeams($cursor: String, $org: String!) {
|
||||
organization(login: $org) {
|
||||
teams(first: 100, after: $cursor) {
|
||||
totalCount
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
description
|
||||
members(first: 100) {
|
||||
edges {
|
||||
node {
|
||||
login
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
{
|
||||
org: config.githubOrgName
|
||||
}
|
||||
)
|
||||
.catch((err) => {
|
||||
logger.error(err, "GitHub GraphQL error for batched team sync");
|
||||
|
||||
const gitHubError = err as GitHubApiError;
|
||||
const statusCode = gitHubError.status || gitHubError.response?.status;
|
||||
if (statusCode) {
|
||||
if (statusCode === 401) {
|
||||
throw new BadRequestError({
|
||||
message: "GitHub access token is invalid or expired. Please provide a new token."
|
||||
});
|
||||
}
|
||||
if (statusCode === 403) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"GitHub access token lacks required permissions for organization team sync. Required: 1) 'admin:org' scope, 2) Token owner must be organization owner or have team read permissions, 3) Organization settings must allow team visibility. Check token scopes and user role."
|
||||
});
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
throw new BadRequestError({
|
||||
message: `Organization ${config.githubOrgName} not found or access token does not have sufficient permissions to read it.`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if ((err as Error)?.message?.includes("Although you appear to have the correct authorization credential")) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Organization has restricted OAuth app access. Please check that: 1) Your organization has approved the Infisical OAuth application, 2) The token owner has sufficient organization permissions."
|
||||
});
|
||||
}
|
||||
throw new BadRequestError({ message: `GitHub GraphQL query failed: ${(err as Error)?.message}` });
|
||||
});
|
||||
});
|
||||
|
||||
const {
|
||||
organization: { teams }
|
||||
} = data;
|
||||
|
||||
const userTeamMap = new Map<string, string[]>();
|
||||
const allGithubUsernamesInTeams = new Set<string>();
|
||||
|
||||
teams?.edges?.forEach((teamEdge) => {
|
||||
const teamName = teamEdge.node.name.toLowerCase();
|
||||
|
||||
teamEdge.node.members.edges.forEach((memberEdge) => {
|
||||
const username = memberEdge.node.login.toLowerCase();
|
||||
allGithubUsernamesInTeams.add(username);
|
||||
|
||||
if (!userTeamMap.has(username)) {
|
||||
userTeamMap.set(username, []);
|
||||
}
|
||||
userTeamMap.get(username)!.push(teamName);
|
||||
});
|
||||
});
|
||||
|
||||
const allGithubTeamNames = Array.from(new Set(teams?.edges?.map((edge) => edge.node.name.toLowerCase()) || []));
|
||||
|
||||
const existingTeamsOnInfisical = await groupDAL.find({
|
||||
orgId: orgPermission.orgId,
|
||||
$in: { name: allGithubTeamNames }
|
||||
});
|
||||
const existingTeamsMap = groupBy(existingTeamsOnInfisical, (i) => i.name);
|
||||
|
||||
const teamsToCreate = allGithubTeamNames.filter((teamName) => !(teamName in existingTeamsMap));
|
||||
const createdTeams = new Set<string>();
|
||||
const updatedTeams = new Set<string>();
|
||||
const totalRemovedMemberships = 0;
|
||||
|
||||
await groupDAL.transaction(async (tx) => {
|
||||
if (teamsToCreate.length > 0) {
|
||||
const newGroups = await groupDAL.insertMany(
|
||||
teamsToCreate.map((teamName) => ({
|
||||
name: teamName,
|
||||
role: OrgMembershipRole.Member,
|
||||
slug: teamName,
|
||||
orgId: orgPermission.orgId
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
||||
newGroups.forEach((group) => {
|
||||
if (!existingTeamsMap[group.name]) {
|
||||
existingTeamsMap[group.name] = [];
|
||||
}
|
||||
existingTeamsMap[group.name].push(group);
|
||||
createdTeams.add(group.name);
|
||||
});
|
||||
}
|
||||
|
||||
const allTeams = [...Object.values(existingTeamsMap).flat()];
|
||||
|
||||
for (const team of allTeams) {
|
||||
const teamName = team.name.toLowerCase();
|
||||
|
||||
const currentMemberships = (await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(
|
||||
team.id,
|
||||
orgPermission.orgId
|
||||
)) as GroupMembership[];
|
||||
|
||||
const expectedUserIds = new Set<string>();
|
||||
teams?.edges?.forEach((teamEdge) => {
|
||||
if (teamEdge.node.name.toLowerCase() === teamName) {
|
||||
teamEdge.node.members.edges.forEach((memberEdge) => {
|
||||
const githubUsername = memberEdge.node.login.toLowerCase();
|
||||
|
||||
const matchingMember = activeMembers.find((member) => {
|
||||
const email = member.user?.email || member.inviteEmail;
|
||||
if (!email) return false;
|
||||
|
||||
const emailPrefix = email.split("@")[0].toLowerCase();
|
||||
const emailDomain = email.split("@")[1].toLowerCase();
|
||||
|
||||
if (emailPrefix === githubUsername) {
|
||||
return true;
|
||||
}
|
||||
const domainName = emailDomain.split(".")[0];
|
||||
if (githubUsername.endsWith(domainName) && githubUsername.length > domainName.length) {
|
||||
const baseUsername = githubUsername.slice(0, -domainName.length);
|
||||
if (emailPrefix === baseUsername) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
const emailSplitRegex = new RE2(/[._-]/);
|
||||
const emailParts = emailPrefix.split(emailSplitRegex);
|
||||
const longestEmailPart = emailParts.reduce((a, b) => (a.length > b.length ? a : b), "");
|
||||
if (longestEmailPart.length >= 4 && githubUsername.includes(longestEmailPart)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
if (matchingMember?.user?.id) {
|
||||
expectedUserIds.add(matchingMember.user.id);
|
||||
logger.info(
|
||||
`Matched GitHub user ${githubUsername} to email ${matchingMember.user?.email || matchingMember.inviteEmail}`
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const currentUserIds = new Set<string>();
|
||||
currentMemberships.forEach((membership) => {
|
||||
const activeMember = activeMembers.find((am) => am.id === membership.orgMembershipId);
|
||||
if (activeMember?.user?.id) {
|
||||
currentUserIds.add(activeMember.user.id);
|
||||
}
|
||||
});
|
||||
|
||||
const usersToAdd = Array.from(expectedUserIds).filter((userId) => !currentUserIds.has(userId));
|
||||
|
||||
const membershipsToRemove = currentMemberships.filter((membership) => {
|
||||
const activeMember = activeMembers.find((am) => am.id === membership.orgMembershipId);
|
||||
return activeMember?.user?.id && !expectedUserIds.has(activeMember.user.id);
|
||||
});
|
||||
|
||||
if (usersToAdd.length > 0) {
|
||||
await userGroupMembershipDAL.insertMany(
|
||||
usersToAdd.map((userId) => ({
|
||||
userId,
|
||||
groupId: team.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
updatedTeams.add(teamName);
|
||||
}
|
||||
|
||||
if (membershipsToRemove.length > 0) {
|
||||
await userGroupMembershipDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: membershipsToRemove.map((m) => m.id)
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
updatedTeams.add(teamName);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const syncDuration = Date.now() - startTime;
|
||||
|
||||
logger.info(
|
||||
{
|
||||
orgId: orgPermission.orgId,
|
||||
createdTeams: createdTeams.size,
|
||||
syncDuration
|
||||
},
|
||||
"GitHub team sync completed"
|
||||
);
|
||||
|
||||
return {
|
||||
totalUsers: activeMembers.length,
|
||||
errors: syncErrors,
|
||||
createdTeams: Array.from(createdTeams),
|
||||
updatedTeams: Array.from(updatedTeams),
|
||||
removedMemberships: totalRemovedMemberships,
|
||||
syncDuration
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
@@ -349,6 +821,8 @@ export const githubOrgSyncServiceFactory = ({
|
||||
updateGithubOrgSync,
|
||||
deleteGithubOrgSync,
|
||||
getGithubOrgSync,
|
||||
syncUserGroups
|
||||
syncUserGroups,
|
||||
syncAllTeams,
|
||||
validateGithubToken
|
||||
};
|
||||
};
|
||||
|
@@ -21,3 +21,21 @@ export interface TDeleteGithubOrgSyncDTO {
|
||||
export interface TGetGithubOrgSyncDTO {
|
||||
orgPermission: OrgServiceActor;
|
||||
}
|
||||
|
||||
export interface TSyncAllTeamsDTO {
|
||||
orgPermission: OrgServiceActor;
|
||||
}
|
||||
|
||||
export interface TSyncResult {
|
||||
totalUsers: number;
|
||||
errors: string[];
|
||||
createdTeams: string[];
|
||||
updatedTeams: string[];
|
||||
removedMemberships: number;
|
||||
syncDuration: number;
|
||||
}
|
||||
|
||||
export interface TValidateGithubTokenDTO {
|
||||
orgPermission: OrgServiceActor;
|
||||
githubOrgAccessToken: string;
|
||||
}
|
||||
|
@@ -400,15 +400,13 @@ export const ldapConfigServiceFactory = ({
|
||||
|
||||
userAlias = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
if (serverCfg.trustLdapEmails) {
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email: email.toLowerCase(),
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email: email.toLowerCase(),
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!newUser) {
|
||||
const uniqueUsername = await normalizeUsername(username, userDAL);
|
||||
@@ -433,7 +431,8 @@ export const ldapConfigServiceFactory = ({
|
||||
aliasType: UserAliasType.LDAP,
|
||||
externalId,
|
||||
emails: [email],
|
||||
orgId
|
||||
orgId,
|
||||
isEmailVerified: serverCfg.trustLdapEmails
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -556,15 +555,14 @@ export const ldapConfigServiceFactory = ({
|
||||
return newUser;
|
||||
});
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted) && userAlias.isEmailVerified;
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
hasExchangedPrivateKey: true,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
...(user.email && { email: user.email, isEmailVerified: userAlias.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
@@ -572,6 +570,7 @@ export const ldapConfigServiceFactory = ({
|
||||
organizationSlug: organization.slug,
|
||||
authMethod: AuthMethod.LDAP,
|
||||
authType: UserAliasType.LDAP,
|
||||
aliasId: userAlias.id,
|
||||
isUserCompleted,
|
||||
...(relayState
|
||||
? {
|
||||
@@ -585,10 +584,11 @@ export const ldapConfigServiceFactory = ({
|
||||
}
|
||||
);
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
if (user.email && !userAlias.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
userId: user.id,
|
||||
aliasId: userAlias.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
|
@@ -722,6 +722,16 @@ export const licenseServiceFactory = ({
|
||||
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
|
||||
};
|
||||
|
||||
const getCustomerId = () => {
|
||||
if (!selfHostedLicense) return "unknown";
|
||||
return selfHostedLicense?.customerId;
|
||||
};
|
||||
|
||||
const getLicenseId = () => {
|
||||
if (!selfHostedLicense) return "unknown";
|
||||
return selfHostedLicense?.licenseId;
|
||||
};
|
||||
|
||||
return {
|
||||
generateOrgCustomerId,
|
||||
removeOrgCustomer,
|
||||
@@ -736,6 +746,8 @@ export const licenseServiceFactory = ({
|
||||
return onPremFeatures;
|
||||
},
|
||||
getPlan,
|
||||
getCustomerId,
|
||||
getLicenseId,
|
||||
invalidateGetPlan,
|
||||
updateSubscriptionOrgMemberCount,
|
||||
refreshPlan,
|
||||
|
@@ -180,7 +180,7 @@ export const oidcConfigServiceFactory = ({
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
let userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: UserAliasType.OIDC
|
||||
@@ -231,32 +231,29 @@ export const oidcConfigServiceFactory = ({
|
||||
} else {
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
// we prioritize getting the most complete user to create the new alias under
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (serverCfg.trustOidcEmails) {
|
||||
// we prioritize getting the most complete user to create the new alias under
|
||||
if (!newUser) {
|
||||
// this fetches user entries created via invites
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
username: email
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!newUser) {
|
||||
// this fetches user entries created via invites
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
username: email
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (newUser && !newUser.isEmailVerified) {
|
||||
// we automatically mark it as email-verified because we've configured trust for OIDC emails
|
||||
newUser = await userDAL.updateById(newUser.id, {
|
||||
isEmailVerified: true
|
||||
});
|
||||
}
|
||||
if (newUser && !newUser.isEmailVerified) {
|
||||
// we automatically mark it as email-verified because we've configured trust for OIDC emails
|
||||
newUser = await userDAL.updateById(newUser.id, {
|
||||
isEmailVerified: serverCfg.trustOidcEmails
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -276,13 +273,14 @@ export const oidcConfigServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
await userAliasDAL.create(
|
||||
userAlias = await userAliasDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
aliasType: UserAliasType.OIDC,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
orgId
|
||||
orgId,
|
||||
isEmailVerified: serverCfg.trustOidcEmails
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -404,19 +402,20 @@ export const oidcConfigServiceFactory = ({
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const isUserCompleted = Boolean(user.isAccepted) && userAlias.isEmailVerified;
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
...(user.email && { email: user.email, isEmailVerified: userAlias.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
hasExchangedPrivateKey: true,
|
||||
aliasId: userAlias.id,
|
||||
authMethod: AuthMethod.OIDC,
|
||||
authType: UserAliasType.OIDC,
|
||||
isUserCompleted,
|
||||
@@ -430,10 +429,11 @@ export const oidcConfigServiceFactory = ({
|
||||
|
||||
await oidcConfigDAL.update({ orgId }, { lastUsed: new Date() });
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
if (user.email && !userAlias.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
userId: user.id,
|
||||
aliasId: userAlias.id
|
||||
});
|
||||
|
||||
await smtpService
|
||||
|
@@ -2,6 +2,7 @@ import { AbilityBuilder, createMongoAbility, MongoAbility } from "@casl/ability"
|
||||
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionAuditLogsActions,
|
||||
ProjectPermissionCertificateActions,
|
||||
ProjectPermissionCmekActions,
|
||||
ProjectPermissionCommitsActions,
|
||||
@@ -394,7 +395,7 @@ const buildMemberPermissionRules = () => {
|
||||
);
|
||||
|
||||
can([ProjectPermissionActions.Read], ProjectPermissionSub.Role);
|
||||
can([ProjectPermissionActions.Read], ProjectPermissionSub.AuditLogs);
|
||||
can([ProjectPermissionAuditLogsActions.Read], ProjectPermissionSub.AuditLogs);
|
||||
can([ProjectPermissionActions.Read], ProjectPermissionSub.IpAllowList);
|
||||
|
||||
// double check if all CRUD are needed for CA and Certificates
|
||||
@@ -502,7 +503,7 @@ const buildViewerPermissionRules = () => {
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
can(ProjectPermissionAuditLogsActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
|
||||
can(ProjectPermissionCertificateActions.Read, ProjectPermissionSub.Certificates);
|
||||
|
@@ -23,6 +23,10 @@ export enum OrgPermissionAppConnectionActions {
|
||||
Connect = "connect"
|
||||
}
|
||||
|
||||
export enum OrgPermissionAuditLogsActions {
|
||||
Read = "read"
|
||||
}
|
||||
|
||||
export enum OrgPermissionKmipActions {
|
||||
Proxy = "proxy",
|
||||
Setup = "setup"
|
||||
@@ -90,6 +94,7 @@ export enum OrgPermissionSubjects {
|
||||
Sso = "sso",
|
||||
Scim = "scim",
|
||||
GithubOrgSync = "github-org-sync",
|
||||
GithubOrgSyncManual = "github-org-sync-manual",
|
||||
Ldap = "ldap",
|
||||
Groups = "groups",
|
||||
Billing = "billing",
|
||||
@@ -119,13 +124,14 @@ export type OrgPermissionSet =
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Sso]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Scim]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.GithubOrgSync]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.GithubOrgSyncManual]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Ldap]
|
||||
| [OrgPermissionGroupActions, OrgPermissionSubjects.Groups]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
|
||||
| [OrgPermissionBillingActions, OrgPermissionSubjects.Billing]
|
||||
| [OrgPermissionIdentityActions, OrgPermissionSubjects.Identity]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
|
||||
| [OrgPermissionAuditLogsActions, OrgPermissionSubjects.AuditLogs]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
|
||||
| [OrgPermissionGatewayActions, OrgPermissionSubjects.Gateway]
|
||||
| [
|
||||
@@ -188,6 +194,10 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
|
||||
subject: z.literal(OrgPermissionSubjects.GithubOrgSync).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.GithubOrgSyncManual).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.Ldap).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
|
||||
@@ -214,7 +224,9 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.AuditLogs).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionAuditLogsActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.ProjectTemplates).describe("The entity this permission pertains to."),
|
||||
@@ -309,6 +321,11 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.GithubOrgSync);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.GithubOrgSync);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.GithubOrgSyncManual);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.GithubOrgSyncManual);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.GithubOrgSyncManual);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.GithubOrgSyncManual);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Ldap);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Ldap);
|
||||
@@ -340,10 +357,7 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Kms);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Kms);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.AuditLogs);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.AuditLogs);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.AuditLogs);
|
||||
can(OrgPermissionAuditLogsActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.ProjectTemplates);
|
||||
@@ -416,7 +430,7 @@ const buildMemberPermission = () => {
|
||||
can(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
|
||||
can(OrgPermissionIdentityActions.Delete, OrgPermissionSubjects.Identity);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
can(OrgPermissionAuditLogsActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
|
||||
can(OrgPermissionAppConnectionActions.Connect, OrgPermissionSubjects.AppConnections);
|
||||
can(OrgPermissionGatewayActions.ListGateways, OrgPermissionSubjects.Gateway);
|
||||
|
@@ -164,6 +164,10 @@ export enum ProjectPermissionSecretEventActions {
|
||||
SubscribeImportMutations = "subscribe-on-import-mutations"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionAuditLogsActions {
|
||||
Read = "read"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSub {
|
||||
Role = "role",
|
||||
Member = "member",
|
||||
@@ -304,7 +308,7 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionGroupActions, ProjectPermissionSub.Groups]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Integrations]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Webhooks]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.AuditLogs]
|
||||
| [ProjectPermissionAuditLogsActions, ProjectPermissionSub.AuditLogs]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Environments]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.IpAllowList]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Settings]
|
||||
@@ -645,7 +649,7 @@ const GeneralPermissionSchema = [
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.AuditLogs).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionAuditLogsActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
|
@@ -246,7 +246,7 @@ export const samlConfigServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
let userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: UserAliasType.SAML
|
||||
@@ -320,15 +320,13 @@ export const samlConfigServiceFactory = ({
|
||||
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!newUser) {
|
||||
const uniqueUsername = await normalizeUsername(`${firstName ?? ""}-${lastName ?? ""}`, userDAL);
|
||||
@@ -346,13 +344,14 @@ export const samlConfigServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
await userAliasDAL.create(
|
||||
userAlias = await userAliasDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
aliasType: UserAliasType.SAML,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
orgId
|
||||
orgId,
|
||||
isEmailVerified: serverCfg.trustSamlEmails
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -410,13 +409,13 @@ export const samlConfigServiceFactory = ({
|
||||
}
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified);
|
||||
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified && userAlias.isEmailVerified);
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
...(user.email && { email: user.email, isEmailVerified: userAlias.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
@@ -424,6 +423,7 @@ export const samlConfigServiceFactory = ({
|
||||
organizationSlug: organization.slug,
|
||||
authMethod: authProvider,
|
||||
hasExchangedPrivateKey: true,
|
||||
aliasId: userAlias.id,
|
||||
authType: UserAliasType.SAML,
|
||||
isUserCompleted,
|
||||
...(relayState
|
||||
@@ -440,10 +440,11 @@ export const samlConfigServiceFactory = ({
|
||||
|
||||
await samlConfigDAL.update({ orgId }, { lastUsed: new Date() });
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
if (user.email && !userAlias.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
userId: user.id,
|
||||
aliasId: userAlias.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
|
@@ -4,6 +4,7 @@ import { TDbClient } from "@app/db";
|
||||
import {
|
||||
SecretApprovalRequestsSchema,
|
||||
TableName,
|
||||
TOrgMemberships,
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TUserGroupMembership,
|
||||
@@ -107,11 +108,32 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`,
|
||||
`secretApprovalReviewerUser.id`
|
||||
)
|
||||
|
||||
.leftJoin<TOrgMemberships>(
|
||||
db(TableName.OrgMembership).as("approverOrgMembership"),
|
||||
`${TableName.SecretApprovalPolicyApprover}.approverUserId`,
|
||||
`approverOrgMembership.userId`
|
||||
)
|
||||
|
||||
.leftJoin<TOrgMemberships>(
|
||||
db(TableName.OrgMembership).as("approverGroupOrgMembership"),
|
||||
`secretApprovalPolicyGroupApproverUser.id`,
|
||||
`approverGroupOrgMembership.userId`
|
||||
)
|
||||
|
||||
.leftJoin<TOrgMemberships>(
|
||||
db(TableName.OrgMembership).as("reviewerOrgMembership"),
|
||||
`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`,
|
||||
`reviewerOrgMembership.userId`
|
||||
)
|
||||
|
||||
.select(selectAllTableCols(TableName.SecretApprovalRequest))
|
||||
.select(
|
||||
tx.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
|
||||
tx.ref("userId").withSchema("approverUserGroupMembership").as("approverGroupUserId"),
|
||||
tx.ref("email").withSchema("secretApprovalPolicyApproverUser").as("approverEmail"),
|
||||
tx.ref("isActive").withSchema("approverOrgMembership").as("approverIsOrgMembershipActive"),
|
||||
tx.ref("isActive").withSchema("approverGroupOrgMembership").as("approverGroupIsOrgMembershipActive"),
|
||||
tx.ref("email").withSchema("secretApprovalPolicyGroupApproverUser").as("approverGroupEmail"),
|
||||
tx.ref("username").withSchema("secretApprovalPolicyApproverUser").as("approverUsername"),
|
||||
tx.ref("username").withSchema("secretApprovalPolicyGroupApproverUser").as("approverGroupUsername"),
|
||||
@@ -148,6 +170,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
|
||||
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
|
||||
tx.ref("lastName").withSchema("secretApprovalReviewerUser").as("reviewerLastName"),
|
||||
tx.ref("isActive").withSchema("reviewerOrgMembership").as("reviewerIsOrgMembershipActive"),
|
||||
tx.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"),
|
||||
tx.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"),
|
||||
tx.ref("projectId").withSchema(TableName.Environment),
|
||||
@@ -157,7 +180,11 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
|
||||
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt"),
|
||||
tx
|
||||
.ref("shouldCheckSecretPermission")
|
||||
.withSchema(TableName.SecretApprovalPolicy)
|
||||
.as("policySecretReadAccessCompat")
|
||||
);
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
@@ -197,7 +224,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
envId: el.policyEnvId,
|
||||
deletedAt: el.policyDeletedAt,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals,
|
||||
shouldCheckSecretPermission: el.policySecretReadAccessCompat
|
||||
}
|
||||
}),
|
||||
childrenMapper: [
|
||||
@@ -211,9 +239,21 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
reviewerLastName: lastName,
|
||||
reviewerUsername: username,
|
||||
reviewerFirstName: firstName,
|
||||
reviewerComment: comment
|
||||
reviewerComment: comment,
|
||||
reviewerIsOrgMembershipActive: isOrgMembershipActive
|
||||
}) =>
|
||||
userId ? { userId, status, email, firstName, lastName, username, comment: comment ?? "" } : undefined
|
||||
userId
|
||||
? {
|
||||
userId,
|
||||
status,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username,
|
||||
comment: comment ?? "",
|
||||
isOrgMembershipActive
|
||||
}
|
||||
: undefined
|
||||
},
|
||||
{
|
||||
key: "approverUserId",
|
||||
@@ -223,13 +263,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approverEmail: email,
|
||||
approverUsername: username,
|
||||
approverLastName: lastName,
|
||||
approverFirstName: firstName
|
||||
approverFirstName: firstName,
|
||||
approverIsOrgMembershipActive: isOrgMembershipActive
|
||||
}) => ({
|
||||
userId,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username
|
||||
username,
|
||||
isOrgMembershipActive
|
||||
})
|
||||
},
|
||||
{
|
||||
@@ -240,13 +282,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approverGroupEmail: email,
|
||||
approverGroupUsername: username,
|
||||
approverGroupLastName: lastName,
|
||||
approverGroupFirstName: firstName
|
||||
approverGroupFirstName: firstName,
|
||||
approverGroupIsOrgMembershipActive: isOrgMembershipActive
|
||||
}) => ({
|
||||
userId,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username
|
||||
username,
|
||||
isOrgMembershipActive
|
||||
})
|
||||
},
|
||||
{
|
||||
@@ -653,14 +697,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
db.ref("firstName").withSchema("committerUser").as("committerUserFirstName"),
|
||||
db.ref("lastName").withSchema("committerUser").as("committerUserLastName")
|
||||
)
|
||||
.distinctOn(`${TableName.SecretApprovalRequest}.id`)
|
||||
.as("inner");
|
||||
|
||||
const query = (tx || db)
|
||||
.select("*")
|
||||
const countQuery = (await (tx || db)
|
||||
.select(db.raw("count(*) OVER() as total_count"))
|
||||
.from(innerQuery)
|
||||
.orderBy("createdAt", "desc") as typeof innerQuery;
|
||||
.from(innerQuery.clone().distinctOn(`${TableName.SecretApprovalRequest}.id`))) as Array<{
|
||||
total_count: number;
|
||||
}>;
|
||||
|
||||
const query = (tx || db).select("*").from(innerQuery).orderBy("createdAt", "desc") as typeof innerQuery;
|
||||
|
||||
if (search) {
|
||||
void query.where((qb) => {
|
||||
@@ -686,8 +731,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
.where("w.rank", ">=", rankOffset)
|
||||
.andWhere("w.rank", "<", rankOffset + limit);
|
||||
|
||||
// @ts-expect-error knex does not infer
|
||||
const totalCount = Number(docs[0]?.total_count || 0);
|
||||
const totalCount = Number(countQuery[0]?.total_count || 0);
|
||||
|
||||
const formattedDoc = sqlNestRelationships({
|
||||
data: docs,
|
||||
|
@@ -258,6 +258,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
|
||||
|
||||
const secretApprovalRequest = await secretApprovalRequestDAL.findById(id);
|
||||
|
||||
if (!secretApprovalRequest)
|
||||
throw new NotFoundError({ message: `Secret approval request with ID '${id}' not found` });
|
||||
|
||||
@@ -280,13 +281,22 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
) {
|
||||
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
|
||||
}
|
||||
const getHasSecretReadAccess = (environment: string, tags: { slug: string }[], secretPath?: string) => {
|
||||
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
|
||||
environment,
|
||||
secretPath: secretPath || "/",
|
||||
secretTags: tags.map((i) => i.slug)
|
||||
});
|
||||
return canRead;
|
||||
const getHasSecretReadAccess = (
|
||||
shouldCheckSecretPermission: boolean | null | undefined,
|
||||
environment: string,
|
||||
tags: { slug: string }[],
|
||||
secretPath?: string
|
||||
) => {
|
||||
if (shouldCheckSecretPermission) {
|
||||
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
|
||||
environment,
|
||||
secretPath: secretPath || "/",
|
||||
secretTags: tags.map((i) => i.slug)
|
||||
});
|
||||
return canRead;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
let secrets;
|
||||
@@ -308,8 +318,18 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
version: el.version,
|
||||
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
|
||||
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
|
||||
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
secretValue: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secret && el.secret.isRotatedSecret
|
||||
? undefined
|
||||
@@ -325,11 +345,17 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
id: el.secret.id,
|
||||
version: el.secret.version,
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
secretValue: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secret.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
|
||||
@@ -345,11 +371,17 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
id: el.secretVersion.id,
|
||||
version: el.secretVersion.version,
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
secretValue: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secretVersion.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
|
||||
@@ -367,7 +399,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
|
||||
secrets = encryptedSecrets.map((el) => ({
|
||||
...el,
|
||||
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.policy.shouldCheckSecretPermission,
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
...decryptSecretWithBot(el, botKey),
|
||||
secret: el.secret
|
||||
? {
|
||||
@@ -1447,6 +1484,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
|
||||
const commits: Omit<TSecretApprovalRequestsSecretsV2Insert, "requestId">[] = [];
|
||||
const commitTagIds: Record<string, string[]> = {};
|
||||
const existingTagIds: Record<string, string[]> = {};
|
||||
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
@@ -1512,6 +1550,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
type: SecretType.Shared
|
||||
}))
|
||||
);
|
||||
|
||||
secretsToUpdateStoredInDB.forEach((el) => {
|
||||
if (el.tags?.length) existingTagIds[el.key] = el.tags.map((i) => i.id);
|
||||
});
|
||||
|
||||
if (secretsToUpdateStoredInDB.length !== secretsToUpdate.length)
|
||||
throw new NotFoundError({
|
||||
message: `Secret does not exist: ${secretsToUpdateStoredInDB.map((el) => el.key).join(",")}`
|
||||
@@ -1555,7 +1598,10 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretMetadata
|
||||
}) => {
|
||||
const secretId = updatingSecretsGroupByKey[secretKey][0].id;
|
||||
if (tagIds?.length) commitTagIds[newSecretName ?? secretKey] = tagIds;
|
||||
if (tagIds?.length || existingTagIds[secretKey]?.length) {
|
||||
commitTagIds[newSecretName ?? secretKey] = tagIds || existingTagIds[secretKey];
|
||||
}
|
||||
|
||||
return {
|
||||
...latestSecretVersions[secretId],
|
||||
secretMetadata,
|
||||
|
@@ -13,7 +13,8 @@ export const PgSqlLock = {
|
||||
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`),
|
||||
CreateProject: (orgId: string) => pgAdvisoryLockHashText(`create-project:${orgId}`),
|
||||
CreateFolder: (envId: string, projectId: string) => pgAdvisoryLockHashText(`create-folder:${envId}-${projectId}`),
|
||||
SshInit: (projectId: string) => pgAdvisoryLockHashText(`ssh-bootstrap:${projectId}`)
|
||||
SshInit: (projectId: string) => pgAdvisoryLockHashText(`ssh-bootstrap:${projectId}`),
|
||||
IdentityLogin: (identityId: string, nonce: string) => pgAdvisoryLockHashText(`identity-login:${identityId}:${nonce}`)
|
||||
} as const;
|
||||
|
||||
// all the key prefixes used must be set here to avoid conflict
|
||||
@@ -40,6 +41,7 @@ export const KeyStorePrefixes = {
|
||||
SecretRotationLock: (rotationId: string) => `secret-rotation-v2-mutex-${rotationId}` as const,
|
||||
SecretScanningLock: (dataSourceId: string, resourceExternalId: string) =>
|
||||
`secret-scanning-v2-mutex-${dataSourceId}-${resourceExternalId}` as const,
|
||||
IdentityLockoutLock: (lockoutKey: string) => `identity-lockout-lock-${lockoutKey}` as const,
|
||||
CaOrderCertificateForSubscriberLock: (subscriberId: string) =>
|
||||
`ca-order-certificate-for-subscriber-lock-${subscriberId}` as const,
|
||||
SecretSyncLastRunTimestamp: (syncId: string) => `secret-sync-last-run-${syncId}` as const,
|
||||
|
@@ -166,7 +166,12 @@ export const UNIVERSAL_AUTH = {
|
||||
accessTokenNumUsesLimit:
|
||||
"The maximum number of times that an access token can be used; a value of 0 implies infinite number of uses.",
|
||||
accessTokenPeriod:
|
||||
"The period for an access token in seconds. This value will be referenced at renewal time. Default value is 0."
|
||||
"The period for an access token in seconds. This value will be referenced at renewal time. Default value is 0.",
|
||||
lockoutEnabled: "Whether the lockout feature is enabled.",
|
||||
lockoutThreshold: "The amount of times login must fail before locking the identity auth method.",
|
||||
lockoutDurationSeconds: "How long an identity auth method lockout lasts.",
|
||||
lockoutCounterResetSeconds:
|
||||
"How long to wait from the most recent failed login until resetting the lockout counter."
|
||||
},
|
||||
RETRIEVE: {
|
||||
identityId: "The ID of the identity to retrieve the auth method for."
|
||||
@@ -181,7 +186,12 @@ export const UNIVERSAL_AUTH = {
|
||||
accessTokenTTL: "The new lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
|
||||
accessTokenPeriod: "The new period for an access token in seconds."
|
||||
accessTokenPeriod: "The new period for an access token in seconds.",
|
||||
lockoutEnabled: "Whether the lockout feature is enabled.",
|
||||
lockoutThreshold: "The amount of times login must fail before locking the identity auth method.",
|
||||
lockoutDurationSeconds: "How long an identity auth method lockout lasts.",
|
||||
lockoutCounterResetSeconds:
|
||||
"How long to wait from the most recent failed login until resetting the lockout counter."
|
||||
},
|
||||
CREATE_CLIENT_SECRET: {
|
||||
identityId: "The ID of the identity to create a client secret for.",
|
||||
@@ -201,6 +211,9 @@ export const UNIVERSAL_AUTH = {
|
||||
identityId: "The ID of the identity to revoke the client secret from.",
|
||||
clientSecretId: "The ID of the client secret to revoke."
|
||||
},
|
||||
CLEAR_CLIENT_LOCKOUTS: {
|
||||
identityId: "The ID of the identity to clear the client lockouts from."
|
||||
},
|
||||
RENEW_ACCESS_TOKEN: {
|
||||
accessToken: "The access token to renew."
|
||||
},
|
||||
@@ -2148,7 +2161,9 @@ export const CertificateAuthorities = {
|
||||
directoryUrl: `The directory URL for the ACME Certificate Authority.`,
|
||||
accountEmail: `The email address for the ACME Certificate Authority.`,
|
||||
provider: `The DNS provider for the ACME Certificate Authority.`,
|
||||
hostedZoneId: `The hosted zone ID for the ACME Certificate Authority.`
|
||||
hostedZoneId: `The hosted zone ID for the ACME Certificate Authority.`,
|
||||
eabKid: `The External Account Binding (EAB) Key ID for the ACME Certificate Authority. Required if the ACME provider uses EAB.`,
|
||||
eabHmacKey: `The External Account Binding (EAB) HMAC key for the ACME Certificate Authority. Required if the ACME provider uses EAB.`
|
||||
},
|
||||
INTERNAL: {
|
||||
type: "The type of CA to create.",
|
||||
@@ -2312,6 +2327,15 @@ export const AppConnections = {
|
||||
OKTA: {
|
||||
instanceUrl: "The URL used to access your Okta organization.",
|
||||
apiToken: "The API token used to authenticate with Okta."
|
||||
},
|
||||
AZURE_ADCS: {
|
||||
adcsUrl:
|
||||
"The HTTPS URL of the Azure ADCS instance to connect with (e.g., 'https://adcs.yourdomain.com/certsrv').",
|
||||
username: "The username used to access Azure ADCS (format: 'DOMAIN\\username' or 'username@domain.com').",
|
||||
password: "The password used to access Azure ADCS.",
|
||||
sslRejectUnauthorized:
|
||||
"Whether or not to reject unauthorized SSL certificates (true/false). Set to false only in test environments with self-signed certificates.",
|
||||
sslCertificate: "The SSL certificate (PEM format) to use for secure connection."
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@@ -218,6 +218,8 @@ const envSchema = z
|
||||
),
|
||||
PARAMS_FOLDER_SECRET_DETECTION_ENTROPY: z.coerce.number().optional().default(3.7),
|
||||
|
||||
INFISICAL_PRIMARY_INSTANCE_URL: zpStr(z.string().optional()),
|
||||
|
||||
// HSM
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
|
121
backend/src/lib/ip/index.test.ts
Normal file
121
backend/src/lib/ip/index.test.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import { extractIPDetails, IPType, isValidCidr, isValidIp, isValidIpOrCidr } from "./index";
|
||||
|
||||
describe("IP Validation", () => {
|
||||
describe("isValidIp", () => {
|
||||
test("should validate IPv4 addresses with ports", () => {
|
||||
expect(isValidIp("192.168.1.1:8080")).toBe(true);
|
||||
expect(isValidIp("10.0.0.1:1234")).toBe(true);
|
||||
expect(isValidIp("172.16.0.1:80")).toBe(true);
|
||||
});
|
||||
|
||||
test("should validate IPv6 addresses with ports", () => {
|
||||
expect(isValidIp("[2001:db8::1]:8080")).toBe(true);
|
||||
expect(isValidIp("[fe80::1ff:fe23:4567:890a]:1234")).toBe(true);
|
||||
expect(isValidIp("[::1]:80")).toBe(true);
|
||||
});
|
||||
|
||||
test("should validate regular IPv4 addresses", () => {
|
||||
expect(isValidIp("192.168.1.1")).toBe(true);
|
||||
expect(isValidIp("10.0.0.1")).toBe(true);
|
||||
expect(isValidIp("172.16.0.1")).toBe(true);
|
||||
});
|
||||
|
||||
test("should validate regular IPv6 addresses", () => {
|
||||
expect(isValidIp("2001:db8::1")).toBe(true);
|
||||
expect(isValidIp("fe80::1ff:fe23:4567:890a")).toBe(true);
|
||||
expect(isValidIp("::1")).toBe(true);
|
||||
});
|
||||
|
||||
test("should reject invalid IP addresses", () => {
|
||||
expect(isValidIp("256.256.256.256")).toBe(false);
|
||||
expect(isValidIp("192.168.1")).toBe(false);
|
||||
expect(isValidIp("192.168.1.1.1")).toBe(false);
|
||||
expect(isValidIp("2001:db8::1::1")).toBe(false);
|
||||
expect(isValidIp("invalid")).toBe(false);
|
||||
});
|
||||
|
||||
test("should reject malformed IP addresses with ports", () => {
|
||||
expect(isValidIp("192.168.1.1:")).toBe(false);
|
||||
expect(isValidIp("192.168.1.1:abc")).toBe(false);
|
||||
expect(isValidIp("[2001:db8::1]")).toBe(false);
|
||||
expect(isValidIp("[2001:db8::1]:")).toBe(false);
|
||||
expect(isValidIp("[2001:db8::1]:abc")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isValidCidr", () => {
|
||||
test("should validate IPv4 CIDR blocks", () => {
|
||||
expect(isValidCidr("192.168.1.0/24")).toBe(true);
|
||||
expect(isValidCidr("10.0.0.0/8")).toBe(true);
|
||||
expect(isValidCidr("172.16.0.0/16")).toBe(true);
|
||||
});
|
||||
|
||||
test("should validate IPv6 CIDR blocks", () => {
|
||||
expect(isValidCidr("2001:db8::/32")).toBe(true);
|
||||
expect(isValidCidr("fe80::/10")).toBe(true);
|
||||
expect(isValidCidr("::/0")).toBe(true);
|
||||
});
|
||||
|
||||
test("should reject invalid CIDR blocks", () => {
|
||||
expect(isValidCidr("192.168.1.0/33")).toBe(false);
|
||||
expect(isValidCidr("2001:db8::/129")).toBe(false);
|
||||
expect(isValidCidr("192.168.1.0/abc")).toBe(false);
|
||||
expect(isValidCidr("invalid/24")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isValidIpOrCidr", () => {
|
||||
test("should validate both IP addresses and CIDR blocks", () => {
|
||||
expect(isValidIpOrCidr("192.168.1.1")).toBe(true);
|
||||
expect(isValidIpOrCidr("2001:db8::1")).toBe(true);
|
||||
expect(isValidIpOrCidr("192.168.1.0/24")).toBe(true);
|
||||
expect(isValidIpOrCidr("2001:db8::/32")).toBe(true);
|
||||
});
|
||||
|
||||
test("should reject invalid inputs", () => {
|
||||
expect(isValidIpOrCidr("invalid")).toBe(false);
|
||||
expect(isValidIpOrCidr("192.168.1.0/33")).toBe(false);
|
||||
expect(isValidIpOrCidr("2001:db8::/129")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("extractIPDetails", () => {
|
||||
test("should extract IPv4 address details", () => {
|
||||
const result = extractIPDetails("192.168.1.1");
|
||||
expect(result).toEqual({
|
||||
ipAddress: "192.168.1.1",
|
||||
type: IPType.IPV4
|
||||
});
|
||||
});
|
||||
|
||||
test("should extract IPv6 address details", () => {
|
||||
const result = extractIPDetails("2001:db8::1");
|
||||
expect(result).toEqual({
|
||||
ipAddress: "2001:db8::1",
|
||||
type: IPType.IPV6
|
||||
});
|
||||
});
|
||||
|
||||
test("should extract IPv4 CIDR details", () => {
|
||||
const result = extractIPDetails("192.168.1.0/24");
|
||||
expect(result).toEqual({
|
||||
ipAddress: "192.168.1.0",
|
||||
type: IPType.IPV4,
|
||||
prefix: 24
|
||||
});
|
||||
});
|
||||
|
||||
test("should extract IPv6 CIDR details", () => {
|
||||
const result = extractIPDetails("2001:db8::/32");
|
||||
expect(result).toEqual({
|
||||
ipAddress: "2001:db8::",
|
||||
type: IPType.IPV6,
|
||||
prefix: 32
|
||||
});
|
||||
});
|
||||
|
||||
test("should throw error for invalid IP", () => {
|
||||
expect(() => extractIPDetails("invalid")).toThrow("Failed to extract IP details");
|
||||
});
|
||||
});
|
||||
});
|
@@ -1,5 +1,7 @@
|
||||
import net from "node:net";
|
||||
|
||||
import RE2 from "re2";
|
||||
|
||||
import { ForbiddenRequestError } from "../errors";
|
||||
|
||||
export enum IPType {
|
||||
@@ -7,25 +9,55 @@ export enum IPType {
|
||||
IPV6 = "ipv6"
|
||||
}
|
||||
|
||||
const PORT_REGEX = new RE2(/^\d+$/);
|
||||
|
||||
/**
|
||||
* Strips port from IP address if present.
|
||||
* Handles both IPv4 (e.g. 1.2.3.4:1234) and IPv6 (e.g. [2001:db8::1]:8080) formats.
|
||||
* Returns the IP address without port and a boolean indicating if a port was present.
|
||||
*/
|
||||
const stripPort = (ip: string): { ipAddress: string } => {
|
||||
// Handle IPv6 with port (e.g. [2001:db8::1]:8080)
|
||||
if (ip.startsWith("[") && ip.includes("]:")) {
|
||||
const endBracketIndex = ip.indexOf("]");
|
||||
if (endBracketIndex === -1) return { ipAddress: ip };
|
||||
const ipPart = ip.slice(1, endBracketIndex);
|
||||
const portPart = ip.slice(endBracketIndex + 2);
|
||||
if (!portPart || !PORT_REGEX.test(portPart)) return { ipAddress: ip };
|
||||
return { ipAddress: ipPart };
|
||||
}
|
||||
|
||||
// Handle IPv4 with port (e.g. 1.2.3.4:1234)
|
||||
if (ip.includes(":")) {
|
||||
const [ipPart, portPart] = ip.split(":");
|
||||
if (!portPart || !PORT_REGEX.test(portPart)) return { ipAddress: ip };
|
||||
return { ipAddress: ipPart };
|
||||
}
|
||||
|
||||
return { ipAddress: ip };
|
||||
};
|
||||
|
||||
/**
|
||||
* Return details of IP [ip]:
|
||||
* - If [ip] is a specific IP address then return the IPv4/IPv6 address
|
||||
* - If [ip] is a subnet then return the network IPv4/IPv6 address and prefix
|
||||
*/
|
||||
export const extractIPDetails = (ip: string) => {
|
||||
if (net.isIPv4(ip))
|
||||
const { ipAddress } = stripPort(ip);
|
||||
|
||||
if (net.isIPv4(ipAddress))
|
||||
return {
|
||||
ipAddress: ip,
|
||||
ipAddress,
|
||||
type: IPType.IPV4
|
||||
};
|
||||
|
||||
if (net.isIPv6(ip))
|
||||
if (net.isIPv6(ipAddress))
|
||||
return {
|
||||
ipAddress: ip,
|
||||
ipAddress,
|
||||
type: IPType.IPV6
|
||||
};
|
||||
|
||||
const [ipNet, prefix] = ip.split("/");
|
||||
const [ipNet, prefix] = ipAddress.split("/");
|
||||
|
||||
let type;
|
||||
switch (net.isIP(ipNet)) {
|
||||
@@ -57,7 +89,8 @@ export const extractIPDetails = (ip: string) => {
|
||||
*
|
||||
*/
|
||||
export const isValidCidr = (cidr: string): boolean => {
|
||||
const [ip, prefix] = cidr.split("/");
|
||||
const { ipAddress } = stripPort(cidr);
|
||||
const [ip, prefix] = ipAddress.split("/");
|
||||
|
||||
const prefixNum = parseInt(prefix, 10);
|
||||
|
||||
@@ -90,13 +123,15 @@ export const isValidCidr = (cidr: string): boolean => {
|
||||
*
|
||||
*/
|
||||
export const isValidIpOrCidr = (ip: string): boolean => {
|
||||
const { ipAddress } = stripPort(ip);
|
||||
|
||||
// if the string contains a slash, treat it as a CIDR block
|
||||
if (ip.includes("/")) {
|
||||
return isValidCidr(ip);
|
||||
if (ipAddress.includes("/")) {
|
||||
return isValidCidr(ipAddress);
|
||||
}
|
||||
|
||||
// otherwise, treat it as a standalone IP address
|
||||
if (net.isIPv4(ip) || net.isIPv6(ip)) {
|
||||
if (net.isIPv4(ipAddress) || net.isIPv6(ipAddress)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -104,7 +139,8 @@ export const isValidIpOrCidr = (ip: string): boolean => {
|
||||
};
|
||||
|
||||
export const isValidIp = (ip: string) => {
|
||||
return net.isIPv4(ip) || net.isIPv6(ip);
|
||||
const { ipAddress } = stripPort(ip);
|
||||
return net.isIPv4(ipAddress) || net.isIPv6(ipAddress);
|
||||
};
|
||||
|
||||
export type TIp = {
|
||||
@@ -112,6 +148,7 @@ export type TIp = {
|
||||
type: IPType;
|
||||
prefix: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates the IP address [ipAddress] against the trusted IPs [trustedIps].
|
||||
*/
|
||||
@@ -126,8 +163,9 @@ export const checkIPAgainstBlocklist = ({ ipAddress, trustedIps }: { ipAddress:
|
||||
}
|
||||
}
|
||||
|
||||
const { type } = extractIPDetails(ipAddress);
|
||||
const check = blockList.check(ipAddress, type);
|
||||
const { type, ipAddress: cleanIpAddress } = extractIPDetails(ipAddress);
|
||||
|
||||
const check = blockList.check(cleanIpAddress, type);
|
||||
|
||||
if (!check)
|
||||
throw new ForbiddenRequestError({
|
||||
|
43
backend/src/lib/retry/index.ts
Normal file
43
backend/src/lib/retry/index.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
interface GitHubApiError extends Error {
|
||||
status?: number;
|
||||
response?: {
|
||||
status?: number;
|
||||
headers?: {
|
||||
"x-ratelimit-reset"?: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
const delay = (ms: number) =>
|
||||
new Promise<void>((resolve) => {
|
||||
setTimeout(() => resolve(), ms);
|
||||
});
|
||||
|
||||
export const retryWithBackoff = async <T>(fn: () => Promise<T>, maxRetries = 3, baseDelay = 1000): Promise<T> => {
|
||||
let lastError: Error;
|
||||
|
||||
for (let attempt = 0; attempt <= maxRetries; attempt += 1) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (error) {
|
||||
lastError = error as Error;
|
||||
const gitHubError = error as GitHubApiError;
|
||||
const statusCode = gitHubError.status || gitHubError.response?.status;
|
||||
if (statusCode === 403) {
|
||||
const rateLimitReset = gitHubError.response?.headers?.["x-ratelimit-reset"];
|
||||
if (rateLimitReset) {
|
||||
const resetTime = parseInt(rateLimitReset, 10) * 1000;
|
||||
const waitTime = Math.max(resetTime - Date.now(), baseDelay);
|
||||
await delay(Math.min(waitTime, 60000));
|
||||
} else {
|
||||
await delay(baseDelay * 2 ** attempt);
|
||||
}
|
||||
} else if (attempt < maxRetries) {
|
||||
await delay(baseDelay * 2 ** attempt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError!;
|
||||
};
|
@@ -107,110 +107,117 @@ export const extractAuth = async (req: FastifyRequest, jwtSecret: string) => {
|
||||
};
|
||||
|
||||
// ! Important: You can only 100% count on the `req.permission.orgId` field being present when the auth method is Identity Access Token (Machine Identity).
|
||||
export const injectIdentity = fp(async (server: FastifyZodProvider) => {
|
||||
server.decorateRequest("auth", null);
|
||||
server.addHook("onRequest", async (req) => {
|
||||
const appCfg = getConfig();
|
||||
export const injectIdentity = fp(
|
||||
async (server: FastifyZodProvider, opt: { shouldForwardWritesToPrimaryInstance?: boolean }) => {
|
||||
server.decorateRequest("auth", null);
|
||||
server.decorateRequest("shouldForwardWritesToPrimaryInstance", Boolean(opt.shouldForwardWritesToPrimaryInstance));
|
||||
server.addHook("onRequest", async (req) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (req.url.includes(".well-known/est") || req.url.includes("/api/v3/auth/")) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Authentication is handled on a route-level here.
|
||||
if (req.url.includes("/api/v1/workflow-integrations/microsoft-teams/message-endpoint")) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { authMode, token, actor } = await extractAuth(req, appCfg.AUTH_SECRET);
|
||||
|
||||
if (!authMode) return;
|
||||
|
||||
switch (authMode) {
|
||||
case AuthMode.JWT: {
|
||||
const { user, tokenVersionId, orgId } = await server.services.authToken.fnValidateJwtIdentity(token);
|
||||
requestContext.set("orgId", orgId);
|
||||
req.auth = {
|
||||
authMode: AuthMode.JWT,
|
||||
user,
|
||||
userId: user.id,
|
||||
tokenVersionId,
|
||||
actor,
|
||||
orgId: orgId as string,
|
||||
authMethod: token.authMethod,
|
||||
isMfaVerified: token.isMfaVerified,
|
||||
token
|
||||
};
|
||||
break;
|
||||
if (opt.shouldForwardWritesToPrimaryInstance && req.method !== "GET") {
|
||||
return;
|
||||
}
|
||||
case AuthMode.IDENTITY_ACCESS_TOKEN: {
|
||||
const identity = await server.services.identityAccessToken.fnValidateIdentityAccessToken(token, req.realIp);
|
||||
const serverCfg = await getServerCfg();
|
||||
requestContext.set("orgId", identity.orgId);
|
||||
req.auth = {
|
||||
authMode: AuthMode.IDENTITY_ACCESS_TOKEN,
|
||||
actor,
|
||||
orgId: identity.orgId,
|
||||
identityId: identity.identityId,
|
||||
identityName: identity.name,
|
||||
authMethod: null,
|
||||
isInstanceAdmin: serverCfg?.adminIdentityIds?.includes(identity.identityId),
|
||||
token
|
||||
};
|
||||
if (token?.identityAuth?.oidc) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
identityId: identity.identityId,
|
||||
oidc: token?.identityAuth?.oidc
|
||||
});
|
||||
|
||||
if (req.url.includes(".well-known/est") || req.url.includes("/api/v3/auth/")) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Authentication is handled on a route-level here.
|
||||
if (req.url.includes("/api/v1/workflow-integrations/microsoft-teams/message-endpoint")) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { authMode, token, actor } = await extractAuth(req, appCfg.AUTH_SECRET);
|
||||
|
||||
if (!authMode) return;
|
||||
|
||||
switch (authMode) {
|
||||
case AuthMode.JWT: {
|
||||
const { user, tokenVersionId, orgId } = await server.services.authToken.fnValidateJwtIdentity(token);
|
||||
requestContext.set("orgId", orgId);
|
||||
req.auth = {
|
||||
authMode: AuthMode.JWT,
|
||||
user,
|
||||
userId: user.id,
|
||||
tokenVersionId,
|
||||
actor,
|
||||
orgId: orgId as string,
|
||||
authMethod: token.authMethod,
|
||||
isMfaVerified: token.isMfaVerified,
|
||||
token
|
||||
};
|
||||
break;
|
||||
}
|
||||
if (token?.identityAuth?.kubernetes) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
case AuthMode.IDENTITY_ACCESS_TOKEN: {
|
||||
const identity = await server.services.identityAccessToken.fnValidateIdentityAccessToken(token, req.realIp);
|
||||
const serverCfg = await getServerCfg();
|
||||
requestContext.set("orgId", identity.orgId);
|
||||
req.auth = {
|
||||
authMode: AuthMode.IDENTITY_ACCESS_TOKEN,
|
||||
actor,
|
||||
orgId: identity.orgId,
|
||||
identityId: identity.identityId,
|
||||
kubernetes: token?.identityAuth?.kubernetes
|
||||
});
|
||||
identityName: identity.name,
|
||||
authMethod: null,
|
||||
isInstanceAdmin: serverCfg?.adminIdentityIds?.includes(identity.identityId),
|
||||
token
|
||||
};
|
||||
if (token?.identityAuth?.oidc) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
identityId: identity.identityId,
|
||||
oidc: token?.identityAuth?.oidc
|
||||
});
|
||||
}
|
||||
if (token?.identityAuth?.kubernetes) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
identityId: identity.identityId,
|
||||
kubernetes: token?.identityAuth?.kubernetes
|
||||
});
|
||||
}
|
||||
if (token?.identityAuth?.aws) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
identityId: identity.identityId,
|
||||
aws: token?.identityAuth?.aws
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (token?.identityAuth?.aws) {
|
||||
requestContext.set("identityAuthInfo", {
|
||||
identityId: identity.identityId,
|
||||
aws: token?.identityAuth?.aws
|
||||
});
|
||||
case AuthMode.SERVICE_TOKEN: {
|
||||
const serviceToken = await server.services.serviceToken.fnValidateServiceToken(token);
|
||||
requestContext.set("orgId", serviceToken.orgId);
|
||||
req.auth = {
|
||||
orgId: serviceToken.orgId,
|
||||
authMode: AuthMode.SERVICE_TOKEN as const,
|
||||
serviceToken,
|
||||
serviceTokenId: serviceToken.id,
|
||||
actor,
|
||||
authMethod: null,
|
||||
token
|
||||
};
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case AuthMode.API_KEY: {
|
||||
const user = await server.services.apiKey.fnValidateApiKey(token as string);
|
||||
req.auth = {
|
||||
authMode: AuthMode.API_KEY as const,
|
||||
userId: user.id,
|
||||
actor,
|
||||
user,
|
||||
orgId: "API_KEY", // We set the orgId to an arbitrary value, since we can't link an API key to a specific org. We have to deprecate API keys soon!
|
||||
authMethod: null,
|
||||
token: token as string
|
||||
};
|
||||
break;
|
||||
}
|
||||
case AuthMode.SCIM_TOKEN: {
|
||||
const { orgId, scimTokenId } = await server.services.scim.fnValidateScimToken(token);
|
||||
requestContext.set("orgId", orgId);
|
||||
req.auth = { authMode: AuthMode.SCIM_TOKEN, actor, scimTokenId, orgId, authMethod: null };
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new BadRequestError({ message: "Invalid token strategy provided" });
|
||||
}
|
||||
case AuthMode.SERVICE_TOKEN: {
|
||||
const serviceToken = await server.services.serviceToken.fnValidateServiceToken(token);
|
||||
requestContext.set("orgId", serviceToken.orgId);
|
||||
req.auth = {
|
||||
orgId: serviceToken.orgId,
|
||||
authMode: AuthMode.SERVICE_TOKEN as const,
|
||||
serviceToken,
|
||||
serviceTokenId: serviceToken.id,
|
||||
actor,
|
||||
authMethod: null,
|
||||
token
|
||||
};
|
||||
break;
|
||||
}
|
||||
case AuthMode.API_KEY: {
|
||||
const user = await server.services.apiKey.fnValidateApiKey(token as string);
|
||||
req.auth = {
|
||||
authMode: AuthMode.API_KEY as const,
|
||||
userId: user.id,
|
||||
actor,
|
||||
user,
|
||||
orgId: "API_KEY", // We set the orgId to an arbitrary value, since we can't link an API key to a specific org. We have to deprecate API keys soon!
|
||||
authMethod: null,
|
||||
token: token as string
|
||||
};
|
||||
break;
|
||||
}
|
||||
case AuthMode.SCIM_TOKEN: {
|
||||
const { orgId, scimTokenId } = await server.services.scim.fnValidateScimToken(token);
|
||||
requestContext.set("orgId", orgId);
|
||||
req.auth = { authMode: AuthMode.SCIM_TOKEN, actor, scimTokenId, orgId, authMethod: null };
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new BadRequestError({ message: "Invalid token strategy provided" });
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
@@ -10,6 +10,10 @@ interface TAuthOptions {
|
||||
export const verifyAuth =
|
||||
<T extends FastifyRequest>(authStrategies: AuthMode[], options: TAuthOptions = { requireOrg: true }) =>
|
||||
(req: T, _res: FastifyReply, done: HookHandlerDoneFunction) => {
|
||||
if (req.shouldForwardWritesToPrimaryInstance && req.method !== "GET") {
|
||||
return done();
|
||||
}
|
||||
|
||||
if (!Array.isArray(authStrategies)) throw new Error("Auth strategy must be array");
|
||||
if (!req.auth) throw new UnauthorizedError({ message: "Token missing" });
|
||||
|
||||
|
14
backend/src/server/plugins/primary-forwarding-mode.ts
Normal file
14
backend/src/server/plugins/primary-forwarding-mode.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import replyFrom from "@fastify/reply-from";
|
||||
import fp from "fastify-plugin";
|
||||
|
||||
export const forwardWritesToPrimary = fp(async (server, opt: { primaryUrl: string }) => {
|
||||
await server.register(replyFrom, {
|
||||
base: opt.primaryUrl
|
||||
});
|
||||
|
||||
server.addHook("preValidation", async (request, reply) => {
|
||||
if (request.url.startsWith("/api") && ["POST", "PUT", "DELETE", "PATCH"].includes(request.method)) {
|
||||
return reply.from(request.url);
|
||||
}
|
||||
});
|
||||
});
|
@@ -291,6 +291,8 @@ import { TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { invalidateCacheQueueFactory } from "@app/services/super-admin/invalidate-cache-queue";
|
||||
import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
import { getServerCfg, superAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { offlineUsageReportDALFactory } from "@app/services/offline-usage-report/offline-usage-report-dal";
|
||||
import { offlineUsageReportServiceFactory } from "@app/services/offline-usage-report/offline-usage-report-service";
|
||||
import { telemetryDALFactory } from "@app/services/telemetry/telemetry-dal";
|
||||
import { telemetryQueueServiceFactory } from "@app/services/telemetry/telemetry-queue";
|
||||
import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
@@ -310,6 +312,7 @@ import { injectAssumePrivilege } from "../plugins/auth/inject-assume-privilege";
|
||||
import { injectIdentity } from "../plugins/auth/inject-identity";
|
||||
import { injectPermission } from "../plugins/auth/inject-permission";
|
||||
import { injectRateLimits } from "../plugins/inject-rate-limits";
|
||||
import { forwardWritesToPrimary } from "../plugins/primary-forwarding-mode";
|
||||
import { registerV1Routes } from "./v1";
|
||||
import { initializeOauthConfigSync } from "./v1/sso-router";
|
||||
import { registerV2Routes } from "./v2";
|
||||
@@ -385,6 +388,7 @@ export const registerRoutes = async (
|
||||
const reminderRecipientDAL = reminderRecipientDALFactory(db);
|
||||
|
||||
const integrationDAL = integrationDALFactory(db);
|
||||
const offlineUsageReportDAL = offlineUsageReportDALFactory(db);
|
||||
const integrationAuthDAL = integrationAuthDALFactory(db);
|
||||
const webhookDAL = webhookDALFactory(db);
|
||||
const serviceTokenDAL = serviceTokenDALFactory(db);
|
||||
@@ -680,7 +684,8 @@ export const registerRoutes = async (
|
||||
kmsService,
|
||||
permissionService,
|
||||
groupDAL,
|
||||
userGroupMembershipDAL
|
||||
userGroupMembershipDAL,
|
||||
orgMembershipDAL
|
||||
});
|
||||
|
||||
const ldapService = ldapConfigServiceFactory({
|
||||
@@ -726,7 +731,8 @@ export const registerRoutes = async (
|
||||
permissionService,
|
||||
groupProjectDAL,
|
||||
smtpService,
|
||||
projectMembershipDAL
|
||||
projectMembershipDAL,
|
||||
userAliasDAL
|
||||
});
|
||||
|
||||
const totpService = totpServiceFactory({
|
||||
@@ -840,7 +846,14 @@ export const registerRoutes = async (
|
||||
licenseService,
|
||||
kmsService,
|
||||
microsoftTeamsService,
|
||||
invalidateCacheQueue
|
||||
invalidateCacheQueue,
|
||||
smtpService,
|
||||
tokenService
|
||||
});
|
||||
|
||||
const offlineUsageReportService = offlineUsageReportServiceFactory({
|
||||
offlineUsageReportDAL,
|
||||
licenseService
|
||||
});
|
||||
|
||||
const orgAdminService = orgAdminServiceFactory({
|
||||
@@ -1455,7 +1468,8 @@ export const registerRoutes = async (
|
||||
identityOrgMembershipDAL,
|
||||
identityProjectDAL,
|
||||
licenseService,
|
||||
identityMetadataDAL
|
||||
identityMetadataDAL,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const identityAuthTemplateService = identityAuthTemplateServiceFactory({
|
||||
@@ -1509,7 +1523,8 @@ export const registerRoutes = async (
|
||||
identityAccessTokenDAL,
|
||||
identityUaClientSecretDAL,
|
||||
identityUaDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const identityKubernetesAuthService = identityKubernetesAuthServiceFactory({
|
||||
@@ -1743,7 +1758,8 @@ export const registerRoutes = async (
|
||||
const migrationService = externalMigrationServiceFactory({
|
||||
externalMigrationQueue,
|
||||
userDAL,
|
||||
permissionService
|
||||
permissionService,
|
||||
gatewayService
|
||||
});
|
||||
|
||||
const externalGroupOrgRoleMappingService = externalGroupOrgRoleMappingServiceFactory({
|
||||
@@ -1998,6 +2014,7 @@ export const registerRoutes = async (
|
||||
apiKey: apiKeyService,
|
||||
authToken: tokenService,
|
||||
superAdmin: superAdminService,
|
||||
offlineUsageReport: offlineUsageReportService,
|
||||
project: projectService,
|
||||
projectMembership: projectMembershipService,
|
||||
projectKey: projectKeyService,
|
||||
@@ -2130,8 +2147,14 @@ export const registerRoutes = async (
|
||||
user: userDAL,
|
||||
kmipClient: kmipClientDAL
|
||||
});
|
||||
const shouldForwardWritesToPrimaryInstance = Boolean(envConfig.INFISICAL_PRIMARY_INSTANCE_URL);
|
||||
if (shouldForwardWritesToPrimaryInstance) {
|
||||
logger.info(`Infisical primary instance is configured: ${envConfig.INFISICAL_PRIMARY_INSTANCE_URL}`);
|
||||
|
||||
await server.register(injectIdentity, { userDAL, serviceTokenDAL });
|
||||
await server.register(forwardWritesToPrimary, { primaryUrl: envConfig.INFISICAL_PRIMARY_INSTANCE_URL as string });
|
||||
}
|
||||
|
||||
await server.register(injectIdentity, { shouldForwardWritesToPrimaryInstance });
|
||||
await server.register(injectAssumePrivilege);
|
||||
await server.register(injectPermission);
|
||||
await server.register(injectRateLimits);
|
||||
|
@@ -13,6 +13,7 @@ import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { invalidateCacheLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { addAuthOriginDomainCookie } from "@app/server/lib/cookie";
|
||||
import { GenericResourceNameSchema } from "@app/server/lib/schemas";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@@ -53,7 +54,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
defaultAuthOrgAuthMethod: z.string().nullish(),
|
||||
isSecretScanningDisabled: z.boolean(),
|
||||
kubernetesAutoFetchServiceAccountToken: z.boolean(),
|
||||
paramsFolderSecretDetectionEnabled: z.boolean()
|
||||
paramsFolderSecretDetectionEnabled: z.boolean(),
|
||||
isOfflineUsageReportsEnabled: z.boolean()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -69,7 +71,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
|
||||
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
|
||||
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
|
||||
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED
|
||||
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED,
|
||||
isOfflineUsageReportsEnabled: !!serverEnvs.LICENSE_KEY_OFFLINE
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -215,7 +218,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
membershipId: z.string(),
|
||||
role: z.string(),
|
||||
roleId: z.string().nullish()
|
||||
roleId: z.string().nullish(),
|
||||
status: z.string().nullish()
|
||||
})
|
||||
.array(),
|
||||
projects: z
|
||||
@@ -838,4 +842,121 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/organization-management/organizations",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: GenericResourceNameSchema,
|
||||
inviteAdminEmails: z.string().email().array().min(1)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
organization: OrganizationsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async (req) => {
|
||||
const organization = await server.services.superAdmin.createOrganization(req.body, req.permission);
|
||||
return { organization };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/organization-management/organizations/:organizationId/memberships/:membershipId/resend-invite",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
organizationId: z.string(),
|
||||
membershipId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
organizationMembership: OrgMembershipsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async (req) => {
|
||||
const organizationMembership = await server.services.superAdmin.resendOrgInvite(req.params, req.permission);
|
||||
return { organizationMembership };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/organization-management/organizations/:organizationId/access",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
organizationId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
organizationMembership: OrgMembershipsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async (req) => {
|
||||
const organizationMembership = await server.services.superAdmin.joinOrganization(
|
||||
req.params.organizationId,
|
||||
req.permission
|
||||
);
|
||||
return { organizationMembership };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/usage-report/generate",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
csvContent: z.string(),
|
||||
signature: z.string(),
|
||||
filename: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async () => {
|
||||
const result = await server.services.offlineUsageReport.generateUsageReportCSV();
|
||||
|
||||
return {
|
||||
csvContent: result.csvContent,
|
||||
signature: result.signature,
|
||||
filename: result.filename
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -15,6 +15,10 @@ import {
|
||||
} from "@app/services/app-connection/1password";
|
||||
import { Auth0ConnectionListItemSchema, SanitizedAuth0ConnectionSchema } from "@app/services/app-connection/auth0";
|
||||
import { AwsConnectionListItemSchema, SanitizedAwsConnectionSchema } from "@app/services/app-connection/aws";
|
||||
import {
|
||||
AzureADCSConnectionListItemSchema,
|
||||
SanitizedAzureADCSConnectionSchema
|
||||
} from "@app/services/app-connection/azure-adcs/azure-adcs-connection-schemas";
|
||||
import {
|
||||
AzureAppConfigurationConnectionListItemSchema,
|
||||
SanitizedAzureAppConfigurationConnectionSchema
|
||||
@@ -150,7 +154,8 @@ const SanitizedAppConnectionSchema = z.union([
|
||||
...SanitizedSupabaseConnectionSchema.options,
|
||||
...SanitizedDigitalOceanConnectionSchema.options,
|
||||
...SanitizedNetlifyConnectionSchema.options,
|
||||
...SanitizedOktaConnectionSchema.options
|
||||
...SanitizedOktaConnectionSchema.options,
|
||||
...SanitizedAzureADCSConnectionSchema.options
|
||||
]);
|
||||
|
||||
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
@@ -190,7 +195,8 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
SupabaseConnectionListItemSchema,
|
||||
DigitalOceanConnectionListItemSchema,
|
||||
NetlifyConnectionListItemSchema,
|
||||
OktaConnectionListItemSchema
|
||||
OktaConnectionListItemSchema,
|
||||
AzureADCSConnectionListItemSchema
|
||||
]);
|
||||
|
||||
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
|
@@ -0,0 +1,18 @@
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateAzureADCSConnectionSchema,
|
||||
SanitizedAzureADCSConnectionSchema,
|
||||
UpdateAzureADCSConnectionSchema
|
||||
} from "@app/services/app-connection/azure-adcs";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerAzureADCSConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.AzureADCS,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedAzureADCSConnectionSchema,
|
||||
createSchema: CreateAzureADCSConnectionSchema,
|
||||
updateSchema: UpdateAzureADCSConnectionSchema
|
||||
});
|
||||
};
|
@@ -53,4 +53,36 @@ export const registerChecklyConnectionRouter = async (server: FastifyZodProvider
|
||||
return { accounts };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/accounts/:accountId/groups`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid(),
|
||||
accountId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
groups: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId, accountId } = req.params;
|
||||
|
||||
const groups = await server.services.appConnection.checkly.listGroups(connectionId, accountId, req.permission);
|
||||
|
||||
return { groups };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -5,6 +5,7 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
|
||||
import { registerOnePassConnectionRouter } from "./1password-connection-router";
|
||||
import { registerAuth0ConnectionRouter } from "./auth0-connection-router";
|
||||
import { registerAwsConnectionRouter } from "./aws-connection-router";
|
||||
import { registerAzureADCSConnectionRouter } from "./azure-adcs-connection-router";
|
||||
import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-configuration-connection-router";
|
||||
import { registerAzureClientSecretsConnectionRouter } from "./azure-client-secrets-connection-router";
|
||||
import { registerAzureDevOpsConnectionRouter } from "./azure-devops-connection-router";
|
||||
@@ -50,6 +51,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
||||
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
|
||||
[AppConnection.AzureClientSecrets]: registerAzureClientSecretsConnectionRouter,
|
||||
[AppConnection.AzureDevOps]: registerAzureDevOpsConnectionRouter,
|
||||
[AppConnection.AzureADCS]: registerAzureADCSConnectionRouter,
|
||||
[AppConnection.Databricks]: registerDatabricksConnectionRouter,
|
||||
[AppConnection.Humanitec]: registerHumanitecConnectionRouter,
|
||||
[AppConnection.TerraformCloud]: registerTerraformCloudConnectionRouter,
|
||||
|
@@ -0,0 +1,78 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import {
|
||||
AzureAdCsCertificateAuthoritySchema,
|
||||
CreateAzureAdCsCertificateAuthoritySchema,
|
||||
UpdateAzureAdCsCertificateAuthoritySchema
|
||||
} from "@app/services/certificate-authority/azure-ad-cs/azure-ad-cs-certificate-authority-schemas";
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
|
||||
import { registerCertificateAuthorityEndpoints } from "./certificate-authority-endpoints";
|
||||
|
||||
export const registerAzureAdCsCertificateAuthorityRouter = async (server: FastifyZodProvider) => {
|
||||
registerCertificateAuthorityEndpoints({
|
||||
caType: CaType.AZURE_AD_CS,
|
||||
server,
|
||||
responseSchema: AzureAdCsCertificateAuthoritySchema,
|
||||
createSchema: CreateAzureAdCsCertificateAuthoritySchema,
|
||||
updateSchema: UpdateAzureAdCsCertificateAuthoritySchema
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:caId/templates",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
description: "Get available certificate templates from Azure AD CS CA",
|
||||
params: z.object({
|
||||
caId: z.string().describe("Azure AD CS CA ID")
|
||||
}),
|
||||
querystring: z.object({
|
||||
projectId: z.string().describe("Project ID")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
templates: z.array(
|
||||
z.object({
|
||||
id: z.string().describe("Template identifier"),
|
||||
name: z.string().describe("Template display name"),
|
||||
description: z.string().optional().describe("Template description")
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const templates = await server.services.certificateAuthority.getAzureAdcsTemplates({
|
||||
caId: req.params.caId,
|
||||
projectId: req.query.projectId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_AZURE_AD_TEMPLATES,
|
||||
metadata: {
|
||||
caId: req.params.caId,
|
||||
amount: templates.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { templates };
|
||||
}
|
||||
});
|
||||
};
|
@@ -1,6 +1,7 @@
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
|
||||
import { registerAcmeCertificateAuthorityRouter } from "./acme-certificate-authority-router";
|
||||
import { registerAzureAdCsCertificateAuthorityRouter } from "./azure-ad-cs-certificate-authority-router";
|
||||
import { registerInternalCertificateAuthorityRouter } from "./internal-certificate-authority-router";
|
||||
|
||||
export * from "./internal-certificate-authority-router";
|
||||
@@ -8,5 +9,6 @@ export * from "./internal-certificate-authority-router";
|
||||
export const CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP: Record<CaType, (server: FastifyZodProvider) => Promise<void>> =
|
||||
{
|
||||
[CaType.INTERNAL]: registerInternalCertificateAuthorityRouter,
|
||||
[CaType.ACME]: registerAcmeCertificateAuthorityRouter
|
||||
[CaType.ACME]: registerAcmeCertificateAuthorityRouter,
|
||||
[CaType.AZURE_AD_CS]: registerAzureAdCsCertificateAuthorityRouter
|
||||
};
|
||||
|
@@ -703,6 +703,9 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
// prevent older projects from accessing endpoint
|
||||
if (!shouldUseSecretV2Bridge) throw new BadRequestError({ message: "Project version not supported" });
|
||||
|
||||
// verify folder exists and user has project permission
|
||||
await server.services.folder.getFolderByPath({ projectId, environment, secretPath }, req.permission);
|
||||
|
||||
const tags = req.query.tags?.split(",") ?? [];
|
||||
|
||||
let remainingLimit = limit;
|
||||
|
@@ -250,7 +250,8 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
||||
description: true
|
||||
}).optional(),
|
||||
identity: IdentitiesSchema.pick({ name: true, id: true, hasDeleteProtection: true }).extend({
|
||||
authMethods: z.array(z.string())
|
||||
authMethods: z.array(z.string()),
|
||||
activeLockoutAuthMethods: z.array(z.string())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@@ -137,7 +137,21 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
.min(0)
|
||||
.default(0)
|
||||
.describe(UNIVERSAL_AUTH.ATTACH.accessTokenNumUsesLimit),
|
||||
accessTokenPeriod: z.number().int().min(0).default(0).describe(UNIVERSAL_AUTH.ATTACH.accessTokenPeriod)
|
||||
accessTokenPeriod: z.number().int().min(0).default(0).describe(UNIVERSAL_AUTH.ATTACH.accessTokenPeriod),
|
||||
lockoutEnabled: z.boolean().default(true).describe(UNIVERSAL_AUTH.ATTACH.lockoutEnabled),
|
||||
lockoutThreshold: z.number().min(1).max(30).default(3).describe(UNIVERSAL_AUTH.ATTACH.lockoutThreshold),
|
||||
lockoutDurationSeconds: z
|
||||
.number()
|
||||
.min(30)
|
||||
.max(86400)
|
||||
.default(300)
|
||||
.describe(UNIVERSAL_AUTH.ATTACH.lockoutDurationSeconds),
|
||||
lockoutCounterResetSeconds: z
|
||||
.number()
|
||||
.min(5)
|
||||
.max(3600)
|
||||
.default(30)
|
||||
.describe(UNIVERSAL_AUTH.ATTACH.lockoutCounterResetSeconds)
|
||||
})
|
||||
.refine(
|
||||
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
|
||||
@@ -171,7 +185,11 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
accessTokenMaxTTL: identityUniversalAuth.accessTokenMaxTTL,
|
||||
accessTokenTrustedIps: identityUniversalAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
|
||||
clientSecretTrustedIps: identityUniversalAuth.clientSecretTrustedIps as TIdentityTrustedIp[],
|
||||
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit
|
||||
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit,
|
||||
lockoutEnabled: identityUniversalAuth.lockoutEnabled,
|
||||
lockoutThreshold: identityUniversalAuth.lockoutThreshold,
|
||||
lockoutDurationSeconds: identityUniversalAuth.lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds: identityUniversalAuth.lockoutCounterResetSeconds
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -243,7 +261,21 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
.min(0)
|
||||
.max(315360000)
|
||||
.optional()
|
||||
.describe(UNIVERSAL_AUTH.UPDATE.accessTokenPeriod)
|
||||
.describe(UNIVERSAL_AUTH.UPDATE.accessTokenPeriod),
|
||||
lockoutEnabled: z.boolean().optional().describe(UNIVERSAL_AUTH.UPDATE.lockoutEnabled),
|
||||
lockoutThreshold: z.number().min(1).max(30).optional().describe(UNIVERSAL_AUTH.UPDATE.lockoutThreshold),
|
||||
lockoutDurationSeconds: z
|
||||
.number()
|
||||
.min(30)
|
||||
.max(86400)
|
||||
.optional()
|
||||
.describe(UNIVERSAL_AUTH.UPDATE.lockoutDurationSeconds),
|
||||
lockoutCounterResetSeconds: z
|
||||
.number()
|
||||
.min(5)
|
||||
.max(3600)
|
||||
.optional()
|
||||
.describe(UNIVERSAL_AUTH.UPDATE.lockoutCounterResetSeconds)
|
||||
})
|
||||
.refine(
|
||||
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
|
||||
@@ -276,7 +308,11 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
accessTokenMaxTTL: identityUniversalAuth.accessTokenMaxTTL,
|
||||
accessTokenTrustedIps: identityUniversalAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
|
||||
clientSecretTrustedIps: identityUniversalAuth.clientSecretTrustedIps as TIdentityTrustedIp[],
|
||||
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit
|
||||
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit,
|
||||
lockoutEnabled: identityUniversalAuth.lockoutEnabled,
|
||||
lockoutThreshold: identityUniversalAuth.lockoutThreshold,
|
||||
lockoutDurationSeconds: identityUniversalAuth.lockoutDurationSeconds,
|
||||
lockoutCounterResetSeconds: identityUniversalAuth.lockoutCounterResetSeconds
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -594,4 +630,53 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
|
||||
return { clientSecretData };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/universal-auth/identities/:identityId/clear-lockouts",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.UniversalAuth],
|
||||
description: "Clear Universal Auth Lockouts for identity",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
identityId: z.string().describe(UNIVERSAL_AUTH.CLEAR_CLIENT_LOCKOUTS.identityId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
deleted: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const clearLockoutsData = await server.services.identityUa.clearUniversalAuthLockouts({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
identityId: req.params.identityId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: clearLockoutsData.orgId,
|
||||
event: {
|
||||
type: EventType.CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS,
|
||||
metadata: {
|
||||
identityId: clearLockoutsData.identityId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return clearLockoutsData;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import RE2 from "re2";
|
||||
import { z } from "zod";
|
||||
|
||||
import { CertificatesSchema } from "@app/db/schemas";
|
||||
@@ -112,7 +113,88 @@ export const registerPkiSubscriberRouter = async (server: FastifyZodProvider) =>
|
||||
.transform((arr) => Array.from(new Set(arr)))
|
||||
.describe(PKI_SUBSCRIBERS.CREATE.extendedKeyUsages),
|
||||
enableAutoRenewal: z.boolean().optional().describe(PKI_SUBSCRIBERS.CREATE.enableAutoRenewal),
|
||||
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.CREATE.autoRenewalPeriodInDays)
|
||||
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.CREATE.autoRenewalPeriodInDays),
|
||||
properties: z
|
||||
.object({
|
||||
azureTemplateType: z.string().optional().describe("Azure ADCS Certificate Template Type"),
|
||||
organization: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Organization cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Organization contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Organization cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Organization (O) - Maximum 64 characters, no special DN characters"),
|
||||
organizationalUnit: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Organizational Unit cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Organizational Unit contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Organizational Unit cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Organizational Unit (OU) - Maximum 64 characters, no special DN characters"),
|
||||
country: z
|
||||
.string()
|
||||
.trim()
|
||||
.length(2, "Country must be exactly 2 characters")
|
||||
.regex(new RE2("^[A-Z]{2}$"), "Country must be exactly 2 uppercase letters")
|
||||
.optional()
|
||||
.describe("Country (C) - Two uppercase letter country code (e.g., US, CA, GB)"),
|
||||
state: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "State cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'State contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"State cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("State/Province (ST) - Maximum 64 characters, no special DN characters"),
|
||||
locality: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Locality cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Locality contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Locality cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Locality (L) - Maximum 64 characters, no special DN characters"),
|
||||
emailAddress: z
|
||||
.string()
|
||||
.trim()
|
||||
.email("Email Address must be a valid email format")
|
||||
.min(6, "Email Address must be at least 6 characters")
|
||||
.max(64, "Email Address cannot exceed 64 characters")
|
||||
.optional()
|
||||
.describe("Email Address - Valid email format between 6 and 64 characters")
|
||||
})
|
||||
.optional()
|
||||
.describe("Additional subscriber properties and subject fields")
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedPkiSubscriber
|
||||
@@ -199,7 +281,88 @@ export const registerPkiSubscriberRouter = async (server: FastifyZodProvider) =>
|
||||
.optional()
|
||||
.describe(PKI_SUBSCRIBERS.UPDATE.extendedKeyUsages),
|
||||
enableAutoRenewal: z.boolean().optional().describe(PKI_SUBSCRIBERS.UPDATE.enableAutoRenewal),
|
||||
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.UPDATE.autoRenewalPeriodInDays)
|
||||
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.UPDATE.autoRenewalPeriodInDays),
|
||||
properties: z
|
||||
.object({
|
||||
azureTemplateType: z.string().optional().describe("Azure ADCS Certificate Template Type"),
|
||||
organization: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Organization cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Organization contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Organization cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Organization (O) - Maximum 64 characters, no special DN characters"),
|
||||
organizationalUnit: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Organizational Unit cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Organizational Unit contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Organizational Unit cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Organizational Unit (OU) - Maximum 64 characters, no special DN characters"),
|
||||
country: z
|
||||
.string()
|
||||
.trim()
|
||||
.length(2, "Country must be exactly 2 characters")
|
||||
.regex(new RE2("^[A-Z]{2}$"), "Country must be exactly 2 uppercase letters")
|
||||
.optional()
|
||||
.describe("Country (C) - Two uppercase letter country code (e.g., US, CA, GB)"),
|
||||
state: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "State cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'State contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"State cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("State/Province (ST) - Maximum 64 characters, no special DN characters"),
|
||||
locality: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(64, "Locality cannot exceed 64 characters")
|
||||
.regex(
|
||||
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
|
||||
'Locality contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
|
||||
)
|
||||
.regex(
|
||||
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
|
||||
"Locality cannot start or end with spaces, hyphens, underscores, or periods"
|
||||
)
|
||||
.optional()
|
||||
.describe("Locality (L) - Maximum 64 characters, no special DN characters"),
|
||||
emailAddress: z
|
||||
.string()
|
||||
.trim()
|
||||
.email("Email Address must be a valid email format")
|
||||
.min(6, "Email Address must be at least 6 characters")
|
||||
.max(64, "Email Address cannot exceed 64 characters")
|
||||
.optional()
|
||||
.describe("Email Address - Valid email format between 6 and 64 characters")
|
||||
})
|
||||
.optional()
|
||||
.describe("Additional subscriber properties and subject fields")
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedPkiSubscriber
|
||||
|
@@ -108,7 +108,11 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
id: true
|
||||
}).merge(UserEncryptionKeysSchema.pick({ publicKey: true })),
|
||||
})
|
||||
.merge(UserEncryptionKeysSchema.pick({ publicKey: true }))
|
||||
.extend({
|
||||
isOrgMembershipActive: z.boolean()
|
||||
}),
|
||||
project: SanitizedProjectSchema.pick({ name: true, id: true }),
|
||||
roles: z.array(
|
||||
z.object({
|
||||
|
@@ -6,12 +6,14 @@ import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { AcmeCertificateAuthoritySchema } from "@app/services/certificate-authority/acme/acme-certificate-authority-schemas";
|
||||
import { AzureAdCsCertificateAuthoritySchema } from "@app/services/certificate-authority/azure-ad-cs/azure-ad-cs-certificate-authority-schemas";
|
||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
import { InternalCertificateAuthoritySchema } from "@app/services/certificate-authority/internal/internal-certificate-authority-schemas";
|
||||
|
||||
const CertificateAuthoritySchema = z.discriminatedUnion("type", [
|
||||
InternalCertificateAuthoritySchema,
|
||||
AcmeCertificateAuthoritySchema
|
||||
AcmeCertificateAuthoritySchema,
|
||||
AzureAdCsCertificateAuthoritySchema
|
||||
]);
|
||||
|
||||
export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
@@ -52,19 +54,31 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
req.permission
|
||||
);
|
||||
|
||||
const azureAdCsCas = await server.services.certificateAuthority.listCertificateAuthoritiesByProjectId(
|
||||
{
|
||||
projectId: req.query.projectId,
|
||||
type: CaType.AZURE_AD_CS
|
||||
},
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_CAS,
|
||||
metadata: {
|
||||
caIds: [...(internalCas ?? []).map((ca) => ca.id), ...(acmeCas ?? []).map((ca) => ca.id)]
|
||||
caIds: [
|
||||
...(internalCas ?? []).map((ca) => ca.id),
|
||||
...(acmeCas ?? []).map((ca) => ca.id),
|
||||
...(azureAdCsCas ?? []).map((ca) => ca.id)
|
||||
]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
certificateAuthorities: [...(internalCas ?? []), ...(acmeCas ?? [])]
|
||||
certificateAuthorities: [...(internalCas ?? []), ...(acmeCas ?? []), ...(azureAdCsCas ?? [])]
|
||||
};
|
||||
}
|
||||
});
|
||||
|
@@ -18,14 +18,14 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
username: z.string().trim()
|
||||
token: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
await server.services.user.sendEmailVerificationCode(req.body.username);
|
||||
await server.services.user.sendEmailVerificationCode(req.body.token);
|
||||
return {};
|
||||
}
|
||||
});
|
||||
|
@@ -2,10 +2,13 @@ import fastifyMultipart from "@fastify/multipart";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { VaultMappingType } from "@app/services/external-migration/external-migration-types";
|
||||
import {
|
||||
ExternalMigrationProviders,
|
||||
VaultMappingType
|
||||
} from "@app/services/external-migration/external-migration-types";
|
||||
|
||||
const MB25_IN_BYTES = 26214400;
|
||||
|
||||
@@ -66,7 +69,8 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
|
||||
vaultAccessToken: z.string(),
|
||||
vaultNamespace: z.string().trim().optional(),
|
||||
vaultUrl: z.string(),
|
||||
mappingType: z.nativeEnum(VaultMappingType)
|
||||
mappingType: z.nativeEnum(VaultMappingType),
|
||||
gatewayId: z.string().optional()
|
||||
})
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
@@ -80,4 +84,33 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/custom-migration-enabled/:provider",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
provider: z.nativeEnum(ExternalMigrationProviders)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
enabled: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const enabled = await server.services.migration.hasCustomVaultMigration({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
provider: req.params.provider
|
||||
});
|
||||
return { enabled };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -419,6 +419,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
200: z.object({
|
||||
secret: secretRawSchema.extend({
|
||||
secretValueHidden: z.boolean(),
|
||||
secretPath: z.string(),
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.optional()
|
||||
})
|
||||
|
@@ -8,6 +8,7 @@ export enum AppConnection {
|
||||
AzureAppConfiguration = "azure-app-configuration",
|
||||
AzureClientSecrets = "azure-client-secrets",
|
||||
AzureDevOps = "azure-devops",
|
||||
AzureADCS = "azure-adcs",
|
||||
Humanitec = "humanitec",
|
||||
TerraformCloud = "terraform-cloud",
|
||||
Vercel = "vercel",
|
||||
|
@@ -31,6 +31,11 @@ import {
|
||||
} from "./app-connection-types";
|
||||
import { Auth0ConnectionMethod, getAuth0ConnectionListItem, validateAuth0ConnectionCredentials } from "./auth0";
|
||||
import { AwsConnectionMethod, getAwsConnectionListItem, validateAwsConnectionCredentials } from "./aws";
|
||||
import { AzureADCSConnectionMethod } from "./azure-adcs";
|
||||
import {
|
||||
getAzureADCSConnectionListItem,
|
||||
validateAzureADCSConnectionCredentials
|
||||
} from "./azure-adcs/azure-adcs-connection-fns";
|
||||
import {
|
||||
AzureAppConfigurationConnectionMethod,
|
||||
getAzureAppConfigurationConnectionListItem,
|
||||
@@ -136,6 +141,7 @@ export const listAppConnectionOptions = () => {
|
||||
getAzureKeyVaultConnectionListItem(),
|
||||
getAzureAppConfigurationConnectionListItem(),
|
||||
getAzureDevopsConnectionListItem(),
|
||||
getAzureADCSConnectionListItem(),
|
||||
getDatabricksConnectionListItem(),
|
||||
getHumanitecConnectionListItem(),
|
||||
getTerraformCloudConnectionListItem(),
|
||||
@@ -227,6 +233,7 @@ export const validateAppConnectionCredentials = async (
|
||||
[AppConnection.AzureClientSecrets]:
|
||||
validateAzureClientSecretsConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.AzureDevOps]: validateAzureDevOpsConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.AzureADCS]: validateAzureADCSConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Humanitec]: validateHumanitecConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Postgres]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.MsSql]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
@@ -300,6 +307,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
|
||||
case MsSqlConnectionMethod.UsernameAndPassword:
|
||||
case MySqlConnectionMethod.UsernameAndPassword:
|
||||
case OracleDBConnectionMethod.UsernameAndPassword:
|
||||
case AzureADCSConnectionMethod.UsernamePassword:
|
||||
return "Username & Password";
|
||||
case WindmillConnectionMethod.AccessToken:
|
||||
case HCVaultConnectionMethod.AccessToken:
|
||||
@@ -357,6 +365,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
|
||||
[AppConnection.AzureKeyVault]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureAppConfiguration]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureDevOps]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.AzureADCS]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Humanitec]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Postgres]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
|
||||
[AppConnection.MsSql]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
|
||||
|
@@ -9,6 +9,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
[AppConnection.AzureAppConfiguration]: "Azure App Configuration",
|
||||
[AppConnection.AzureClientSecrets]: "Azure Client Secrets",
|
||||
[AppConnection.AzureDevOps]: "Azure DevOps",
|
||||
[AppConnection.AzureADCS]: "Azure ADCS",
|
||||
[AppConnection.Databricks]: "Databricks",
|
||||
[AppConnection.Humanitec]: "Humanitec",
|
||||
[AppConnection.TerraformCloud]: "Terraform Cloud",
|
||||
@@ -49,6 +50,7 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
|
||||
[AppConnection.AzureAppConfiguration]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureClientSecrets]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureDevOps]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.AzureADCS]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Databricks]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Humanitec]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.TerraformCloud]: AppConnectionPlanType.Regular,
|
||||
|
@@ -45,6 +45,7 @@ import {
|
||||
import { ValidateAuth0ConnectionCredentialsSchema } from "./auth0";
|
||||
import { ValidateAwsConnectionCredentialsSchema } from "./aws";
|
||||
import { awsConnectionService } from "./aws/aws-connection-service";
|
||||
import { ValidateAzureADCSConnectionCredentialsSchema } from "./azure-adcs/azure-adcs-connection-schemas";
|
||||
import { ValidateAzureAppConfigurationConnectionCredentialsSchema } from "./azure-app-configuration";
|
||||
import { ValidateAzureClientSecretsConnectionCredentialsSchema } from "./azure-client-secrets";
|
||||
import { azureClientSecretsConnectionService } from "./azure-client-secrets/azure-client-secrets-service";
|
||||
@@ -122,6 +123,7 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
|
||||
[AppConnection.AzureKeyVault]: ValidateAzureKeyVaultConnectionCredentialsSchema,
|
||||
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema,
|
||||
[AppConnection.AzureDevOps]: ValidateAzureDevOpsConnectionCredentialsSchema,
|
||||
[AppConnection.AzureADCS]: ValidateAzureADCSConnectionCredentialsSchema,
|
||||
[AppConnection.Databricks]: ValidateDatabricksConnectionCredentialsSchema,
|
||||
[AppConnection.Humanitec]: ValidateHumanitecConnectionCredentialsSchema,
|
||||
[AppConnection.TerraformCloud]: ValidateTerraformCloudConnectionCredentialsSchema,
|
||||
@@ -598,7 +600,7 @@ export const appConnectionServiceFactory = ({
|
||||
azureClientSecrets: azureClientSecretsConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
azureDevOps: azureDevOpsConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
auth0: auth0ConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
hcvault: hcVaultConnectionService(connectAppConnectionById),
|
||||
hcvault: hcVaultConnectionService(connectAppConnectionById, gatewayService),
|
||||
windmill: windmillConnectionService(connectAppConnectionById),
|
||||
teamcity: teamcityConnectionService(connectAppConnectionById),
|
||||
oci: ociConnectionService(connectAppConnectionById, licenseService),
|
||||
|
@@ -33,6 +33,12 @@ import {
|
||||
TAwsConnectionInput,
|
||||
TValidateAwsConnectionCredentialsSchema
|
||||
} from "./aws";
|
||||
import {
|
||||
TAzureADCSConnection,
|
||||
TAzureADCSConnectionConfig,
|
||||
TAzureADCSConnectionInput,
|
||||
TValidateAzureADCSConnectionCredentialsSchema
|
||||
} from "./azure-adcs/azure-adcs-connection-types";
|
||||
import {
|
||||
TAzureAppConfigurationConnection,
|
||||
TAzureAppConfigurationConnectionConfig,
|
||||
@@ -223,6 +229,7 @@ export type TAppConnection = { id: string } & (
|
||||
| TAzureKeyVaultConnection
|
||||
| TAzureAppConfigurationConnection
|
||||
| TAzureDevOpsConnection
|
||||
| TAzureADCSConnection
|
||||
| TDatabricksConnection
|
||||
| THumanitecConnection
|
||||
| TTerraformCloudConnection
|
||||
@@ -267,6 +274,7 @@ export type TAppConnectionInput = { id: string } & (
|
||||
| TAzureKeyVaultConnectionInput
|
||||
| TAzureAppConfigurationConnectionInput
|
||||
| TAzureDevOpsConnectionInput
|
||||
| TAzureADCSConnectionInput
|
||||
| TDatabricksConnectionInput
|
||||
| THumanitecConnectionInput
|
||||
| TTerraformCloudConnectionInput
|
||||
@@ -322,6 +330,7 @@ export type TAppConnectionConfig =
|
||||
| TAzureKeyVaultConnectionConfig
|
||||
| TAzureAppConfigurationConnectionConfig
|
||||
| TAzureDevOpsConnectionConfig
|
||||
| TAzureADCSConnectionConfig
|
||||
| TAzureClientSecretsConnectionConfig
|
||||
| TDatabricksConnectionConfig
|
||||
| THumanitecConnectionConfig
|
||||
@@ -359,6 +368,7 @@ export type TValidateAppConnectionCredentialsSchema =
|
||||
| TValidateAzureAppConfigurationConnectionCredentialsSchema
|
||||
| TValidateAzureClientSecretsConnectionCredentialsSchema
|
||||
| TValidateAzureDevOpsConnectionCredentialsSchema
|
||||
| TValidateAzureADCSConnectionCredentialsSchema
|
||||
| TValidateDatabricksConnectionCredentialsSchema
|
||||
| TValidateHumanitecConnectionCredentialsSchema
|
||||
| TValidatePostgresConnectionCredentialsSchema
|
||||
|
@@ -91,7 +91,7 @@ export const validateAuth0ConnectionCredentials = async ({ credentials }: TAuth0
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: (e as Error).message ?? `Unable to validate connection: verify credentials`
|
||||
message: (e as Error).message ?? "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -0,0 +1,3 @@
|
||||
export enum AzureADCSConnectionMethod {
|
||||
UsernamePassword = "username-password"
|
||||
}
|
@@ -0,0 +1,455 @@
|
||||
/* eslint-disable no-case-declarations, @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-var-requires, no-await-in-loop, no-continue */
|
||||
import { NtlmClient } from "axios-ntlm";
|
||||
import https from "https";
|
||||
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator/validate-url";
|
||||
import { decryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
import { TAppConnectionDALFactory } from "../app-connection-dal";
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import { AzureADCSConnectionMethod } from "./azure-adcs-connection-enums";
|
||||
import { TAzureADCSConnectionConfig } from "./azure-adcs-connection-types";
|
||||
|
||||
// Type definitions for axios-ntlm
|
||||
interface AxiosNtlmConfig {
|
||||
ntlm: {
|
||||
domain: string;
|
||||
username: string;
|
||||
password: string;
|
||||
};
|
||||
httpsAgent?: https.Agent;
|
||||
url: string;
|
||||
method?: string;
|
||||
data?: string;
|
||||
headers?: Record<string, string>;
|
||||
}
|
||||
|
||||
interface AxiosNtlmResponse {
|
||||
status: number;
|
||||
data: string;
|
||||
headers: unknown;
|
||||
}
|
||||
|
||||
// Types for credential parsing
|
||||
interface ParsedCredentials {
|
||||
domain: string;
|
||||
username: string;
|
||||
fullUsername: string; // domain\username format
|
||||
}
|
||||
|
||||
// Helper function to parse and normalize credentials for Windows authentication
|
||||
const parseCredentials = (inputUsername: string): ParsedCredentials => {
|
||||
// Ensure inputUsername is a string
|
||||
if (typeof inputUsername !== "string" || !inputUsername.trim()) {
|
||||
throw new BadRequestError({
|
||||
message: "Username must be a non-empty string"
|
||||
});
|
||||
}
|
||||
|
||||
let domain = "";
|
||||
let username = "";
|
||||
let fullUsername = "";
|
||||
|
||||
if (inputUsername.includes("\\")) {
|
||||
// Already in domain\username format
|
||||
const parts = inputUsername.split("\\");
|
||||
if (parts.length === 2) {
|
||||
[domain, username] = parts;
|
||||
fullUsername = inputUsername;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid domain\\username format. Expected format: DOMAIN\\username"
|
||||
});
|
||||
}
|
||||
} else if (inputUsername.includes("@")) {
|
||||
// UPN format: user@domain.com
|
||||
const [user, domainPart] = inputUsername.split("@");
|
||||
if (!user || !domainPart) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid UPN format. Expected format: user@domain.com"
|
||||
});
|
||||
}
|
||||
|
||||
username = user;
|
||||
// Extract NetBIOS name from FQDN
|
||||
domain = domainPart.split(".")[0].toUpperCase();
|
||||
fullUsername = `${domain}\\${username}`;
|
||||
} else {
|
||||
// Plain username - assume local account or current domain
|
||||
username = inputUsername;
|
||||
domain = "";
|
||||
fullUsername = inputUsername;
|
||||
}
|
||||
|
||||
return { domain, username, fullUsername };
|
||||
};
|
||||
|
||||
// Helper to normalize URL
|
||||
const normalizeAdcsUrl = (url: string): string => {
|
||||
let normalizedUrl = url.trim();
|
||||
|
||||
// Remove trailing slash
|
||||
normalizedUrl = normalizedUrl.replace(/\/$/, "");
|
||||
|
||||
// Ensure HTTPS protocol
|
||||
if (normalizedUrl.startsWith("http://")) {
|
||||
normalizedUrl = normalizedUrl.replace("http://", "https://");
|
||||
} else if (!normalizedUrl.startsWith("https://")) {
|
||||
normalizedUrl = `https://${normalizedUrl}`;
|
||||
}
|
||||
|
||||
return normalizedUrl;
|
||||
};
|
||||
|
||||
// NTLM request wrapper
|
||||
const createHttpsAgent = (sslRejectUnauthorized: boolean, sslCertificate?: string): https.Agent => {
|
||||
const agentOptions: https.AgentOptions = {
|
||||
rejectUnauthorized: sslRejectUnauthorized,
|
||||
keepAlive: true, // axios-ntlm needs keepAlive for NTLM handshake
|
||||
ca: sslCertificate ? [sslCertificate.trim()] : undefined,
|
||||
// Disable hostname verification as Microsoft servers by default use local IPs for certificates
|
||||
// which may not match the hostname used to connect
|
||||
checkServerIdentity: () => undefined
|
||||
};
|
||||
|
||||
return new https.Agent(agentOptions);
|
||||
};
|
||||
|
||||
const axiosNtlmRequest = async (config: AxiosNtlmConfig): Promise<AxiosNtlmResponse> => {
|
||||
const method = config.method || "GET";
|
||||
|
||||
const credentials = {
|
||||
username: config.ntlm.username,
|
||||
password: config.ntlm.password,
|
||||
domain: config.ntlm.domain || "",
|
||||
workstation: ""
|
||||
};
|
||||
|
||||
const axiosConfig = {
|
||||
httpsAgent: config.httpsAgent,
|
||||
timeout: 30000
|
||||
};
|
||||
|
||||
const client = NtlmClient(credentials, axiosConfig);
|
||||
|
||||
const requestOptions: { url: string; method: string; data?: string; headers?: Record<string, string> } = {
|
||||
url: config.url,
|
||||
method
|
||||
};
|
||||
|
||||
if (config.data) {
|
||||
requestOptions.data = config.data;
|
||||
}
|
||||
|
||||
if (config.headers) {
|
||||
requestOptions.headers = config.headers;
|
||||
}
|
||||
|
||||
const response = await client(requestOptions);
|
||||
|
||||
return {
|
||||
status: response.status,
|
||||
data: response.data,
|
||||
headers: response.headers
|
||||
};
|
||||
};
|
||||
|
||||
// Test ADCS connectivity and authentication using NTLM
|
||||
const testAdcsConnection = async (
|
||||
credentials: ParsedCredentials,
|
||||
password: string,
|
||||
baseUrl: string,
|
||||
sslRejectUnauthorized: boolean = true,
|
||||
sslCertificate?: string
|
||||
): Promise<boolean> => {
|
||||
// Test endpoints in order of preference
|
||||
const testEndpoints = [
|
||||
"/certsrv/certrqus.asp", // Certificate request status (most reliable)
|
||||
"/certsrv/certfnsh.asp", // Certificate finalization
|
||||
"/certsrv/default.asp", // Main ADCS page
|
||||
"/certsrv/" // Root certsrv
|
||||
];
|
||||
|
||||
for (const endpoint of testEndpoints) {
|
||||
try {
|
||||
const testUrl = `${baseUrl}${endpoint}`;
|
||||
|
||||
const shouldRejectUnauthorized = sslRejectUnauthorized;
|
||||
|
||||
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
|
||||
|
||||
const response = await axiosNtlmRequest({
|
||||
url: testUrl,
|
||||
method: "GET",
|
||||
httpsAgent,
|
||||
ntlm: {
|
||||
domain: credentials.domain,
|
||||
username: credentials.username,
|
||||
password
|
||||
}
|
||||
});
|
||||
|
||||
// Check if we got a successful response
|
||||
if (response.status === 200) {
|
||||
const responseText = response.data;
|
||||
|
||||
// Verify this is actually an ADCS server by checking content
|
||||
const adcsIndicators = [
|
||||
"Microsoft Active Directory Certificate Services",
|
||||
"Certificate Services",
|
||||
"Request a certificate",
|
||||
"certsrv",
|
||||
"Certificate Template",
|
||||
"Web Enrollment"
|
||||
];
|
||||
|
||||
const isAdcsServer = adcsIndicators.some((indicator) =>
|
||||
responseText.toLowerCase().includes(indicator.toLowerCase())
|
||||
);
|
||||
|
||||
if (isAdcsServer) {
|
||||
// Successfully authenticated and confirmed ADCS
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (response.status === 401) {
|
||||
throw new BadRequestError({
|
||||
message: "Authentication failed. Please verify your credentials are correct."
|
||||
});
|
||||
}
|
||||
|
||||
if (response.status === 403) {
|
||||
throw new BadRequestError({
|
||||
message: "Access denied. Your account may not have permission to access ADCS web enrollment."
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof BadRequestError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Handle network and connection errors
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes("ENOTFOUND")) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot resolve ADCS server hostname. Please verify the URL is correct."
|
||||
});
|
||||
}
|
||||
if (error.message.includes("ECONNREFUSED")) {
|
||||
throw new BadRequestError({
|
||||
message: "Connection refused by ADCS server. Please verify the server is running and accessible."
|
||||
});
|
||||
}
|
||||
if (error.message.includes("ETIMEDOUT") || error.message.includes("timeout")) {
|
||||
throw new BadRequestError({
|
||||
message: "Connection timeout. Please verify the server is accessible and not blocked by firewall."
|
||||
});
|
||||
}
|
||||
if (error.message.includes("certificate") || error.message.includes("SSL") || error.message.includes("TLS")) {
|
||||
throw new BadRequestError({
|
||||
message: `SSL/TLS certificate error: ${error.message}. This may indicate a certificate verification failure.`
|
||||
});
|
||||
}
|
||||
if (error.message.includes("DEPTH_ZERO_SELF_SIGNED_CERT")) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Self-signed certificate detected. Either provide the server's certificate or set 'sslRejectUnauthorized' to false."
|
||||
});
|
||||
}
|
||||
if (error.message.includes("UNABLE_TO_VERIFY_LEAF_SIGNATURE")) {
|
||||
throw new BadRequestError({
|
||||
message: "Unable to verify certificate signature. Please provide the correct CA certificate."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Continue to next endpoint for other errors
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// If we get here, no endpoint worked
|
||||
throw new BadRequestError({
|
||||
message: "Could not connect to ADCS server. Please verify the server URL and that Web Enrollment is enabled."
|
||||
});
|
||||
};
|
||||
|
||||
// Create authenticated NTLM client for ADCS operations
|
||||
const createNtlmClient = (
|
||||
username: string,
|
||||
password: string,
|
||||
baseUrl: string,
|
||||
sslRejectUnauthorized: boolean = true,
|
||||
sslCertificate?: string
|
||||
) => {
|
||||
const parsedCredentials = parseCredentials(username);
|
||||
const normalizedUrl = normalizeAdcsUrl(baseUrl);
|
||||
|
||||
return {
|
||||
get: async (endpoint: string, additionalHeaders: Record<string, string> = {}) => {
|
||||
const shouldRejectUnauthorized = sslRejectUnauthorized;
|
||||
|
||||
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
|
||||
|
||||
return axiosNtlmRequest({
|
||||
url: `${normalizedUrl}${endpoint}`,
|
||||
method: "GET",
|
||||
httpsAgent,
|
||||
headers: additionalHeaders,
|
||||
ntlm: {
|
||||
domain: parsedCredentials.domain,
|
||||
username: parsedCredentials.username,
|
||||
password
|
||||
}
|
||||
});
|
||||
},
|
||||
post: async (endpoint: string, body: string, additionalHeaders: Record<string, string> = {}) => {
|
||||
const shouldRejectUnauthorized = sslRejectUnauthorized;
|
||||
|
||||
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
|
||||
|
||||
return axiosNtlmRequest({
|
||||
url: `${normalizedUrl}${endpoint}`,
|
||||
method: "POST",
|
||||
httpsAgent,
|
||||
data: body,
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
...additionalHeaders
|
||||
},
|
||||
ntlm: {
|
||||
domain: parsedCredentials.domain,
|
||||
username: parsedCredentials.username,
|
||||
password
|
||||
}
|
||||
});
|
||||
},
|
||||
baseUrl: normalizedUrl,
|
||||
credentials: parsedCredentials
|
||||
};
|
||||
};
|
||||
|
||||
export const getAzureADCSConnectionCredentials = async (
|
||||
connectionId: string,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const appConnection = await appConnectionDAL.findById(connectionId);
|
||||
|
||||
if (!appConnection) {
|
||||
throw new NotFoundError({ message: `Connection with ID '${connectionId}' not found` });
|
||||
}
|
||||
|
||||
if (appConnection.app !== AppConnection.AzureADCS) {
|
||||
throw new BadRequestError({ message: `Connection with ID '${connectionId}' is not an Azure ADCS connection` });
|
||||
}
|
||||
|
||||
switch (appConnection.method) {
|
||||
case AzureADCSConnectionMethod.UsernamePassword:
|
||||
const credentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as {
|
||||
username: string;
|
||||
password: string;
|
||||
adcsUrl: string;
|
||||
sslRejectUnauthorized?: boolean;
|
||||
sslCertificate?: string;
|
||||
};
|
||||
|
||||
return {
|
||||
username: credentials.username,
|
||||
password: credentials.password,
|
||||
adcsUrl: credentials.adcsUrl,
|
||||
sslRejectUnauthorized: credentials.sslRejectUnauthorized ?? true,
|
||||
sslCertificate: credentials.sslCertificate
|
||||
};
|
||||
|
||||
default:
|
||||
throw new BadRequestError({
|
||||
message: `Unsupported Azure ADCS connection method: ${appConnection.method}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const validateAzureADCSConnectionCredentials = async (appConnection: TAzureADCSConnectionConfig) => {
|
||||
const { credentials } = appConnection;
|
||||
|
||||
try {
|
||||
// Parse and validate credentials
|
||||
const parsedCredentials = parseCredentials(credentials.username);
|
||||
const normalizedUrl = normalizeAdcsUrl(credentials.adcsUrl);
|
||||
|
||||
// Validate URL to prevent DNS manipulation attacks and SSRF
|
||||
await blockLocalAndPrivateIpAddresses(normalizedUrl);
|
||||
|
||||
// Test the connection using NTLM
|
||||
await testAdcsConnection(
|
||||
parsedCredentials,
|
||||
credentials.password,
|
||||
normalizedUrl,
|
||||
credentials.sslRejectUnauthorized ?? true,
|
||||
credentials.sslCertificate
|
||||
);
|
||||
|
||||
// If we get here, authentication was successful
|
||||
return {
|
||||
username: credentials.username,
|
||||
password: credentials.password,
|
||||
adcsUrl: credentials.adcsUrl,
|
||||
sslRejectUnauthorized: credentials.sslRejectUnauthorized ?? true,
|
||||
sslCertificate: credentials.sslCertificate
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof BadRequestError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Handle unexpected errors
|
||||
let errorMessage = "Unable to validate ADCS connection.";
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes("401") || error.message.includes("Unauthorized")) {
|
||||
errorMessage = "NTLM authentication failed. Please verify your username, password, and domain are correct.";
|
||||
} else if (error.message.includes("ENOTFOUND") || error.message.includes("ECONNREFUSED")) {
|
||||
errorMessage = "Cannot connect to the ADCS server. Please verify the server URL is correct and accessible.";
|
||||
} else if (error.message.includes("timeout")) {
|
||||
errorMessage = "Connection to ADCS server timed out. Please verify the server is accessible.";
|
||||
} else if (
|
||||
error.message.includes("certificate") ||
|
||||
error.message.includes("SSL") ||
|
||||
error.message.includes("TLS") ||
|
||||
error.message.includes("DEPTH_ZERO_SELF_SIGNED_CERT") ||
|
||||
error.message.includes("UNABLE_TO_VERIFY_LEAF_SIGNATURE")
|
||||
) {
|
||||
errorMessage = `SSL/TLS certificate error: ${error.message}. The server certificate may be self-signed or the CA certificate may be incorrect.`;
|
||||
}
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate Azure ADCS connection: ${errorMessage} Details: ${
|
||||
error instanceof Error ? error.message : "Unknown error"
|
||||
}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const getAzureADCSConnectionListItem = () => ({
|
||||
name: "Azure ADCS" as const,
|
||||
app: AppConnection.AzureADCS as const,
|
||||
methods: [AzureADCSConnectionMethod.UsernamePassword] as [AzureADCSConnectionMethod.UsernamePassword]
|
||||
});
|
||||
|
||||
// Export helper functions for use in certificate ordering
|
||||
export const createAdcsHttpClient = (
|
||||
username: string,
|
||||
password: string,
|
||||
baseUrl: string,
|
||||
sslRejectUnauthorized: boolean = true,
|
||||
sslCertificate?: string
|
||||
) => {
|
||||
return createNtlmClient(username, password, baseUrl, sslRejectUnauthorized, sslCertificate);
|
||||
};
|
@@ -0,0 +1,88 @@
|
||||
import z from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { AzureADCSConnectionMethod } from "./azure-adcs-connection-enums";
|
||||
|
||||
export const AzureADCSUsernamePasswordCredentialsSchema = z.object({
|
||||
adcsUrl: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "ADCS URL required")
|
||||
.max(255)
|
||||
.refine((value) => value.startsWith("https://"), "ADCS URL must use HTTPS")
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.adcsUrl),
|
||||
username: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Username required")
|
||||
.max(255)
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.username),
|
||||
password: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Password required")
|
||||
.max(255)
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.password),
|
||||
sslRejectUnauthorized: z.boolean().optional().describe(AppConnections.CREDENTIALS.AZURE_ADCS.sslRejectUnauthorized),
|
||||
sslCertificate: z
|
||||
.string()
|
||||
.trim()
|
||||
.transform((value) => value || undefined)
|
||||
.optional()
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.sslCertificate)
|
||||
});
|
||||
|
||||
const BaseAzureADCSConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.AzureADCS) });
|
||||
|
||||
export const AzureADCSConnectionSchema = BaseAzureADCSConnectionSchema.extend({
|
||||
method: z.literal(AzureADCSConnectionMethod.UsernamePassword),
|
||||
credentials: AzureADCSUsernamePasswordCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedAzureADCSConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseAzureADCSConnectionSchema.extend({
|
||||
method: z.literal(AzureADCSConnectionMethod.UsernamePassword),
|
||||
credentials: AzureADCSUsernamePasswordCredentialsSchema.pick({
|
||||
username: true,
|
||||
adcsUrl: true,
|
||||
sslRejectUnauthorized: true,
|
||||
sslCertificate: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
export const ValidateAzureADCSConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
.literal(AzureADCSConnectionMethod.UsernamePassword)
|
||||
.describe(AppConnections.CREATE(AppConnection.AzureADCS).method),
|
||||
credentials: AzureADCSUsernamePasswordCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureADCS).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateAzureADCSConnectionSchema = ValidateAzureADCSConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.AzureADCS)
|
||||
);
|
||||
|
||||
export const UpdateAzureADCSConnectionSchema = z
|
||||
.object({
|
||||
credentials: AzureADCSUsernamePasswordCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.AzureADCS).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureADCS));
|
||||
|
||||
export const AzureADCSConnectionListItemSchema = z.object({
|
||||
name: z.literal("Azure ADCS"),
|
||||
app: z.literal(AppConnection.AzureADCS),
|
||||
methods: z.nativeEnum(AzureADCSConnectionMethod).array()
|
||||
});
|
@@ -0,0 +1,23 @@
|
||||
import z from "zod";
|
||||
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
AzureADCSConnectionSchema,
|
||||
CreateAzureADCSConnectionSchema,
|
||||
ValidateAzureADCSConnectionCredentialsSchema
|
||||
} from "./azure-adcs-connection-schemas";
|
||||
|
||||
export type TAzureADCSConnection = z.infer<typeof AzureADCSConnectionSchema>;
|
||||
|
||||
export type TAzureADCSConnectionInput = z.infer<typeof CreateAzureADCSConnectionSchema> & {
|
||||
app: AppConnection.AzureADCS;
|
||||
};
|
||||
|
||||
export type TValidateAzureADCSConnectionCredentialsSchema = typeof ValidateAzureADCSConnectionCredentialsSchema;
|
||||
|
||||
export type TAzureADCSConnectionConfig = DiscriminativePick<
|
||||
TAzureADCSConnectionInput,
|
||||
"method" | "app" | "credentials"
|
||||
>;
|
4
backend/src/services/app-connection/azure-adcs/index.ts
Normal file
4
backend/src/services/app-connection/azure-adcs/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from "./azure-adcs-connection-enums";
|
||||
export * from "./azure-adcs-connection-fns";
|
||||
export * from "./azure-adcs-connection-schemas";
|
||||
export * from "./azure-adcs-connection-types";
|
@@ -70,7 +70,7 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -186,7 +186,7 @@ export const validateAzureClientSecretsConnectionCredentials = async (config: TA
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -204,7 +204,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -186,7 +186,7 @@ export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureK
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -82,7 +82,7 @@ export const validateCamundaConnectionCredentials = async (appConnection: TCamun
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -4,6 +4,7 @@ import { AxiosInstance, AxiosRequestConfig, AxiosResponse, HttpStatusCode, isAxi
|
||||
|
||||
import { createRequestClient } from "@app/lib/config/request";
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
|
||||
|
||||
import { ChecklyConnectionMethod } from "./checkly-connection-constants";
|
||||
import { TChecklyAccount, TChecklyConnectionConfig, TChecklyVariable } from "./checkly-connection-types";
|
||||
@@ -181,6 +182,122 @@ class ChecklyPublicClient {
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async getCheckGroups(connection: TChecklyConnectionConfig, accountId: string, limit = 50, page = 1) {
|
||||
const res = await this.send<{ id: number; name: string }[]>(connection, {
|
||||
accountId,
|
||||
method: "GET",
|
||||
url: `/v1/check-groups`,
|
||||
params: { limit, page }
|
||||
});
|
||||
|
||||
return res?.map((group) => ({
|
||||
id: group.id.toString(),
|
||||
name: group.name
|
||||
}));
|
||||
}
|
||||
|
||||
async getCheckGroup(connection: TChecklyConnectionConfig, accountId: string, groupId: string) {
|
||||
try {
|
||||
type ChecklyGroupResponse = {
|
||||
id: number;
|
||||
name: string;
|
||||
environmentVariables: Array<{
|
||||
key: string;
|
||||
value: string;
|
||||
locked: boolean;
|
||||
}>;
|
||||
};
|
||||
|
||||
const res = await this.send<ChecklyGroupResponse>(connection, {
|
||||
accountId,
|
||||
method: "GET",
|
||||
url: `/v1/check-groups/${groupId}`
|
||||
});
|
||||
|
||||
if (!res) return null;
|
||||
|
||||
return {
|
||||
id: res.id.toString(),
|
||||
name: res.name,
|
||||
environmentVariables: res.environmentVariables
|
||||
};
|
||||
} catch (error) {
|
||||
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async updateCheckGroupEnvironmentVariables(
|
||||
connection: TChecklyConnectionConfig,
|
||||
accountId: string,
|
||||
groupId: string,
|
||||
environmentVariables: Array<{ key: string; value: string; locked?: boolean }>
|
||||
) {
|
||||
if (environmentVariables.length > 50) {
|
||||
throw new SecretSyncError({
|
||||
message: "Checkly does not support syncing more than 50 variables to Check Group",
|
||||
shouldRetry: false
|
||||
});
|
||||
}
|
||||
|
||||
const apiVariables = environmentVariables.map((v) => ({
|
||||
key: v.key,
|
||||
value: v.value,
|
||||
locked: v.locked ?? false,
|
||||
secret: true
|
||||
}));
|
||||
|
||||
const group = await this.getCheckGroup(connection, accountId, groupId);
|
||||
|
||||
await this.send(connection, {
|
||||
accountId,
|
||||
method: "PUT",
|
||||
url: `/v2/check-groups/${groupId}`,
|
||||
data: { name: group?.name, environmentVariables: apiVariables }
|
||||
});
|
||||
|
||||
return this.getCheckGroup(connection, accountId, groupId);
|
||||
}
|
||||
|
||||
async getCheckGroupEnvironmentVariables(connection: TChecklyConnectionConfig, accountId: string, groupId: string) {
|
||||
const group = await this.getCheckGroup(connection, accountId, groupId);
|
||||
return group?.environmentVariables || [];
|
||||
}
|
||||
|
||||
async upsertCheckGroupEnvironmentVariables(
|
||||
connection: TChecklyConnectionConfig,
|
||||
accountId: string,
|
||||
groupId: string,
|
||||
variables: Array<{ key: string; value: string; locked?: boolean }>
|
||||
) {
|
||||
const existingVars = await this.getCheckGroupEnvironmentVariables(connection, accountId, groupId);
|
||||
const varMap = new Map(existingVars.map((v) => [v.key, v]));
|
||||
|
||||
for (const newVar of variables) {
|
||||
varMap.set(newVar.key, {
|
||||
key: newVar.key,
|
||||
value: newVar.value,
|
||||
locked: newVar.locked ?? false
|
||||
});
|
||||
}
|
||||
|
||||
return this.updateCheckGroupEnvironmentVariables(connection, accountId, groupId, Array.from(varMap.values()));
|
||||
}
|
||||
|
||||
async deleteCheckGroupEnvironmentVariable(
|
||||
connection: TChecklyConnectionConfig,
|
||||
accountId: string,
|
||||
groupId: string,
|
||||
variableKey: string
|
||||
) {
|
||||
const existingVars = await this.getCheckGroupEnvironmentVariables(connection, accountId, groupId);
|
||||
const filteredVars = existingVars.filter((v) => v.key !== variableKey);
|
||||
|
||||
return this.updateCheckGroupEnvironmentVariables(connection, accountId, groupId, filteredVars);
|
||||
}
|
||||
}
|
||||
|
||||
export const ChecklyPublicAPI = new ChecklyPublicClient();
|
||||
|
@@ -24,7 +24,19 @@ export const checklyConnectionService = (getAppConnection: TGetAppConnectionFunc
|
||||
}
|
||||
};
|
||||
|
||||
const listGroups = async (connectionId: string, accountId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.Checkly, connectionId, actor);
|
||||
try {
|
||||
const groups = await ChecklyPublicAPI.getCheckGroups(appConnection, accountId);
|
||||
return groups!;
|
||||
} catch (error) {
|
||||
logger.error(error, "Failed to list accounts on Checkly");
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
listAccounts
|
||||
listAccounts,
|
||||
listGroups
|
||||
};
|
||||
};
|
||||
|
@@ -33,3 +33,15 @@ export type TChecklyAccount = {
|
||||
name: string;
|
||||
runtimeId: string;
|
||||
};
|
||||
|
||||
export type TChecklyGroupEnvironmentVariable = {
|
||||
key: string;
|
||||
value: string;
|
||||
locked: boolean;
|
||||
};
|
||||
|
||||
export type TChecklyGroup = {
|
||||
id: string;
|
||||
name: string;
|
||||
environmentVariables?: TChecklyGroupEnvironmentVariable[];
|
||||
};
|
||||
|
@@ -89,7 +89,7 @@ export const validateDatabricksConnectionCredentials = async (appConnection: TDa
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -114,7 +114,7 @@ export const validateGitHubRadarConnectionCredentials = async (config: TGitHubRa
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
|
||||
|
@@ -1,5 +1,3 @@
|
||||
import { createAppAuth } from "@octokit/auth-app";
|
||||
import { request } from "@octokit/request";
|
||||
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
|
||||
import https from "https";
|
||||
import RE2 from "re2";
|
||||
@@ -8,6 +6,7 @@ import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request as httpRequest } from "@app/lib/config/request";
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { BadRequestError, ForbiddenRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@@ -114,10 +113,13 @@ export const requestWithGitHubGateway = async <T>(
|
||||
);
|
||||
};
|
||||
|
||||
export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) => {
|
||||
export const getGitHubAppAuthToken = async (
|
||||
appConnection: TGitHubConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
const appId = appCfg.INF_APP_CONNECTION_GITHUB_APP_ID;
|
||||
const appPrivateKey = appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
|
||||
let appPrivateKey = appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
|
||||
|
||||
if (!appId || !appPrivateKey) {
|
||||
throw new InternalServerError({
|
||||
@@ -125,21 +127,42 @@ export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) =>
|
||||
});
|
||||
}
|
||||
|
||||
appPrivateKey = appPrivateKey
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.join("\n");
|
||||
|
||||
if (appConnection.method !== GitHubConnectionMethod.App) {
|
||||
throw new InternalServerError({ message: "Cannot generate GitHub App token for non-app connection" });
|
||||
}
|
||||
|
||||
const appAuth = createAppAuth({
|
||||
appId,
|
||||
privateKey: appPrivateKey,
|
||||
installationId: appConnection.credentials.installationId,
|
||||
request: request.defaults({
|
||||
baseUrl: `https://${await getGitHubInstanceApiUrl(appConnection)}`
|
||||
})
|
||||
});
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const payload = {
|
||||
iat: now,
|
||||
exp: now + 5 * 60,
|
||||
iss: appId
|
||||
};
|
||||
|
||||
const { token } = await appAuth({ type: "installation" });
|
||||
return token;
|
||||
const appJwt = crypto.jwt().sign(payload, appPrivateKey, { algorithm: "RS256" });
|
||||
|
||||
const apiBaseUrl = await getGitHubInstanceApiUrl(appConnection);
|
||||
const { installationId } = appConnection.credentials;
|
||||
|
||||
const response = await requestWithGitHubGateway<{ token: string; expires_at: string }>(
|
||||
appConnection,
|
||||
gatewayService,
|
||||
{
|
||||
url: `https://${apiBaseUrl}/app/installations/${installationId}/access_tokens`,
|
||||
method: "POST",
|
||||
headers: {
|
||||
Accept: "application/vnd.github+json",
|
||||
Authorization: `Bearer ${appJwt}`,
|
||||
"X-GitHub-Api-Version": "2022-11-28"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return response.data.token;
|
||||
};
|
||||
|
||||
const parseGitHubLinkHeader = (linkHeader: string | undefined): Record<string, string> => {
|
||||
@@ -174,7 +197,9 @@ export const makePaginatedGitHubRequest = async <T, R = T[]>(
|
||||
const { credentials, method } = appConnection;
|
||||
|
||||
const token =
|
||||
method === GitHubConnectionMethod.OAuth ? credentials.accessToken : await getGitHubAppAuthToken(appConnection);
|
||||
method === GitHubConnectionMethod.OAuth
|
||||
? credentials.accessToken
|
||||
: await getGitHubAppAuthToken(appConnection, gatewayService);
|
||||
|
||||
const baseUrl = `https://${await getGitHubInstanceApiUrl(appConnection)}${path}`;
|
||||
const initialUrlObj = new URL(baseUrl);
|
||||
@@ -422,7 +447,7 @@ export const validateGitHubConnectionCredentials = async (
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
|
||||
|
@@ -1,18 +1,18 @@
|
||||
import { AxiosError } from "axios";
|
||||
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
|
||||
import https from "https";
|
||||
|
||||
import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
import { HCVaultConnectionMethod } from "./hc-vault-connection-enums";
|
||||
import {
|
||||
THCVaultConnection,
|
||||
THCVaultConnectionConfig,
|
||||
THCVaultMountResponse,
|
||||
TValidateHCVaultConnectionCredentials
|
||||
} from "./hc-vault-connection-types";
|
||||
import { THCVaultConnection, THCVaultConnectionConfig, THCVaultMountResponse } from "./hc-vault-connection-types";
|
||||
|
||||
export const getHCVaultInstanceUrl = async (config: THCVaultConnectionConfig) => {
|
||||
const instanceUrl = removeTrailingSlash(config.credentials.instanceUrl);
|
||||
@@ -37,7 +37,78 @@ type TokenRespData = {
|
||||
};
|
||||
};
|
||||
|
||||
export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnectionCredentials) => {
|
||||
export const requestWithHCVaultGateway = async <T>(
|
||||
appConnection: { gatewayId?: string | null },
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
requestConfig: AxiosRequestConfig
|
||||
): Promise<AxiosResponse<T>> => {
|
||||
const { gatewayId } = appConnection;
|
||||
|
||||
// If gateway isn't set up, don't proxy request
|
||||
if (!gatewayId) {
|
||||
return request.request(requestConfig);
|
||||
}
|
||||
|
||||
const url = new URL(requestConfig.url as string);
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(url.toString());
|
||||
|
||||
const [targetHost] = await verifyHostInputValidity(url.hostname, true);
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(gatewayId);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
|
||||
return withGatewayProxy(
|
||||
async (proxyPort) => {
|
||||
const httpsAgent = new https.Agent({
|
||||
servername: targetHost
|
||||
});
|
||||
|
||||
url.protocol = "https:";
|
||||
url.host = `localhost:${proxyPort}`;
|
||||
|
||||
const finalRequestConfig: AxiosRequestConfig = {
|
||||
...requestConfig,
|
||||
url: url.toString(),
|
||||
httpsAgent,
|
||||
headers: {
|
||||
...requestConfig.headers,
|
||||
Host: targetHost
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
return await request.request(finalRequestConfig);
|
||||
} catch (error) {
|
||||
if (error instanceof AxiosError) {
|
||||
logger.error(
|
||||
{ message: error.message, data: (error.response as undefined | { data: unknown })?.data },
|
||||
"Error during HashiCorp Vault gateway request:"
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Tcp,
|
||||
targetHost,
|
||||
targetPort: url.port ? Number(url.port) : 8200, // 8200 is the default port for Vault self-hosted/dedicated
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export const getHCVaultAccessToken = async (
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
// Return access token directly if not using AppRole method
|
||||
if (connection.method !== HCVaultConnectionMethod.AppRole) {
|
||||
return connection.credentials.accessToken;
|
||||
@@ -46,16 +117,16 @@ export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnecti
|
||||
// Generate temporary token for AppRole method
|
||||
try {
|
||||
const { instanceUrl, roleId, secretId } = connection.credentials;
|
||||
const tokenResp = await request.post<TokenRespData>(
|
||||
`${removeTrailingSlash(instanceUrl)}/v1/auth/approle/login`,
|
||||
{ role_id: roleId, secret_id: secretId },
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const tokenResp = await requestWithHCVaultGateway<TokenRespData>(connection, gatewayService, {
|
||||
url: `${removeTrailingSlash(instanceUrl)}/v1/auth/approle/login`,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
|
||||
},
|
||||
data: { role_id: roleId, secret_id: secretId }
|
||||
});
|
||||
|
||||
if (tokenResp.status !== 200) {
|
||||
throw new BadRequestError({
|
||||
@@ -71,38 +142,55 @@ export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnecti
|
||||
}
|
||||
};
|
||||
|
||||
export const validateHCVaultConnectionCredentials = async (config: THCVaultConnectionConfig) => {
|
||||
const instanceUrl = await getHCVaultInstanceUrl(config);
|
||||
export const validateHCVaultConnectionCredentials = async (
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const instanceUrl = await getHCVaultInstanceUrl(connection);
|
||||
|
||||
try {
|
||||
const accessToken = await getHCVaultAccessToken(config);
|
||||
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
|
||||
|
||||
// Verify token
|
||||
await request.get(`${instanceUrl}/v1/auth/token/lookup-self`, {
|
||||
await requestWithHCVaultGateway(connection, gatewayService, {
|
||||
url: `${instanceUrl}/v1/auth/token/lookup-self`,
|
||||
method: "GET",
|
||||
headers: { "X-Vault-Token": accessToken }
|
||||
});
|
||||
|
||||
return config.credentials;
|
||||
return connection.credentials;
|
||||
} catch (error: unknown) {
|
||||
logger.error(error, "Unable to verify HC Vault connection");
|
||||
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
|
||||
});
|
||||
}
|
||||
|
||||
if (error instanceof BadRequestError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: "Unable to validate connection: verify credentials"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const listHCVaultMounts = async (appConnection: THCVaultConnection) => {
|
||||
const instanceUrl = await getHCVaultInstanceUrl(appConnection);
|
||||
const accessToken = await getHCVaultAccessToken(appConnection);
|
||||
export const listHCVaultMounts = async (
|
||||
connection: THCVaultConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const instanceUrl = await getHCVaultInstanceUrl(connection);
|
||||
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
|
||||
|
||||
const { data } = await request.get<THCVaultMountResponse>(`${instanceUrl}/v1/sys/mounts`, {
|
||||
const { data } = await requestWithHCVaultGateway<THCVaultMountResponse>(connection, gatewayService, {
|
||||
url: `${instanceUrl}/v1/sys/mounts`,
|
||||
method: "GET",
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(appConnection.credentials.namespace ? { "X-Vault-Namespace": appConnection.credentials.namespace } : {})
|
||||
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
|
||||
}
|
||||
});
|
||||
|
||||
|
@@ -55,11 +55,18 @@ export const HCVaultConnectionSchema = z.intersection(
|
||||
export const SanitizedHCVaultConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseHCVaultConnectionSchema.extend({
|
||||
method: z.literal(HCVaultConnectionMethod.AccessToken),
|
||||
credentials: HCVaultConnectionAccessTokenCredentialsSchema.pick({})
|
||||
credentials: HCVaultConnectionAccessTokenCredentialsSchema.pick({
|
||||
namespace: true,
|
||||
instanceUrl: true
|
||||
})
|
||||
}),
|
||||
BaseHCVaultConnectionSchema.extend({
|
||||
method: z.literal(HCVaultConnectionMethod.AppRole),
|
||||
credentials: HCVaultConnectionAppRoleCredentialsSchema.pick({})
|
||||
credentials: HCVaultConnectionAppRoleCredentialsSchema.pick({
|
||||
namespace: true,
|
||||
instanceUrl: true,
|
||||
roleId: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
@@ -81,7 +88,7 @@ export const ValidateHCVaultConnectionCredentialsSchema = z.discriminatedUnion("
|
||||
]);
|
||||
|
||||
export const CreateHCVaultConnectionSchema = ValidateHCVaultConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.HCVault)
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.HCVault, { supportsGateways: true })
|
||||
);
|
||||
|
||||
export const UpdateHCVaultConnectionSchema = z
|
||||
@@ -91,7 +98,7 @@ export const UpdateHCVaultConnectionSchema = z
|
||||
.optional()
|
||||
.describe(AppConnections.UPDATE(AppConnection.HCVault).credentials)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.HCVault));
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.HCVault, { supportsGateways: true }));
|
||||
|
||||
export const HCVaultConnectionListItemSchema = z.object({
|
||||
name: z.literal("HCVault"),
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
|
||||
@@ -11,12 +12,15 @@ type TGetAppConnectionFunc = (
|
||||
actor: OrgServiceActor
|
||||
) => Promise<THCVaultConnection>;
|
||||
|
||||
export const hcVaultConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||
export const hcVaultConnectionService = (
|
||||
getAppConnection: TGetAppConnectionFunc,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const listMounts = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.HCVault, connectionId, actor);
|
||||
|
||||
try {
|
||||
const mounts = await listHCVaultMounts(appConnection);
|
||||
const mounts = await listHCVaultMounts(appConnection, gatewayService);
|
||||
return mounts;
|
||||
} catch (error) {
|
||||
logger.error(error, "Failed to establish connection with Hashicorp Vault");
|
||||
|
@@ -75,7 +75,7 @@ export const getTokenConfig = (tokenType: TokenType) => {
|
||||
};
|
||||
|
||||
export const tokenServiceFactory = ({ tokenDAL, userDAL, orgMembershipDAL }: TAuthTokenServiceFactoryDep) => {
|
||||
const createTokenForUser = async ({ type, userId, orgId }: TCreateTokenForUserDTO) => {
|
||||
const createTokenForUser = async ({ type, userId, orgId, aliasId }: TCreateTokenForUserDTO) => {
|
||||
const { token, ...tkCfg } = getTokenConfig(type);
|
||||
const appCfg = getConfig();
|
||||
const tokenHash = await crypto.hashing().createHash(token, appCfg.SALT_ROUNDS);
|
||||
@@ -88,7 +88,8 @@ export const tokenServiceFactory = ({ tokenDAL, userDAL, orgMembershipDAL }: TAu
|
||||
type,
|
||||
userId,
|
||||
orgId,
|
||||
triesLeft: tkCfg?.triesLeft
|
||||
triesLeft: tkCfg?.triesLeft,
|
||||
aliasId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@@ -14,6 +14,7 @@ export type TCreateTokenForUserDTO = {
|
||||
type: TokenType;
|
||||
userId: string;
|
||||
orgId?: string;
|
||||
aliasId?: string;
|
||||
};
|
||||
|
||||
export type TCreateOrgInviteTokenDTO = {
|
||||
|
@@ -459,6 +459,18 @@ export const authLoginServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
// Check if authEnforced is true and the current auth method is not an enforced method
|
||||
if (
|
||||
selectedOrg.authEnforced &&
|
||||
!isAuthMethodSaml(decodedToken.authMethod) &&
|
||||
decodedToken.authMethod !== AuthMethod.OIDC &&
|
||||
!(selectedOrg.bypassOrgAuthEnabled && selectedOrgMembership.userRole === OrgMembershipRole.Admin)
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with the auth method required by your organization."
|
||||
});
|
||||
}
|
||||
|
||||
if (selectedOrg.googleSsoAuthEnforced && decodedToken.authMethod !== AuthMethod.GOOGLE) {
|
||||
const canBypass = selectedOrg.bypassOrgAuthEnabled && selectedOrgMembership.userRole === OrgMembershipRole.Admin;
|
||||
|
||||
|
@@ -64,6 +64,8 @@ type DBConfigurationColumn = {
|
||||
directoryUrl: string;
|
||||
accountEmail: string;
|
||||
hostedZoneId: string;
|
||||
eabKid?: string;
|
||||
eabHmacKey?: string;
|
||||
};
|
||||
|
||||
export const castDbEntryToAcmeCertificateAuthority = (
|
||||
@@ -89,7 +91,9 @@ export const castDbEntryToAcmeCertificateAuthority = (
|
||||
hostedZoneId: dbConfigurationCol.hostedZoneId
|
||||
},
|
||||
directoryUrl: dbConfigurationCol.directoryUrl,
|
||||
accountEmail: dbConfigurationCol.accountEmail
|
||||
accountEmail: dbConfigurationCol.accountEmail,
|
||||
eabKid: dbConfigurationCol.eabKid,
|
||||
eabHmacKey: dbConfigurationCol.eabHmacKey
|
||||
},
|
||||
status: ca.status as CaStatus
|
||||
};
|
||||
@@ -128,7 +132,7 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
});
|
||||
}
|
||||
|
||||
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig } = configuration;
|
||||
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig, eabKid, eabHmacKey } = configuration;
|
||||
const appConnection = await appConnectionDAL.findById(dnsAppConnectionId);
|
||||
|
||||
if (!appConnection) {
|
||||
@@ -171,7 +175,9 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
directoryUrl,
|
||||
accountEmail,
|
||||
dnsProvider: dnsProviderConfig.provider,
|
||||
hostedZoneId: dnsProviderConfig.hostedZoneId
|
||||
hostedZoneId: dnsProviderConfig.hostedZoneId,
|
||||
eabKid,
|
||||
eabHmacKey
|
||||
}
|
||||
},
|
||||
tx
|
||||
@@ -214,7 +220,7 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
}) => {
|
||||
const updatedCa = await certificateAuthorityDAL.transaction(async (tx) => {
|
||||
if (configuration) {
|
||||
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig } = configuration;
|
||||
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig, eabKid, eabHmacKey } = configuration;
|
||||
const appConnection = await appConnectionDAL.findById(dnsAppConnectionId);
|
||||
|
||||
if (!appConnection) {
|
||||
@@ -254,7 +260,9 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
directoryUrl,
|
||||
accountEmail,
|
||||
dnsProvider: dnsProviderConfig.provider,
|
||||
hostedZoneId: dnsProviderConfig.hostedZoneId
|
||||
hostedZoneId: dnsProviderConfig.hostedZoneId,
|
||||
eabKid,
|
||||
eabHmacKey
|
||||
}
|
||||
},
|
||||
tx
|
||||
@@ -354,10 +362,19 @@ export const AcmeCertificateAuthorityFns = ({
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(acmeCa.configuration.directoryUrl);
|
||||
|
||||
const acmeClient = new acme.Client({
|
||||
const acmeClientOptions: acme.ClientOptions = {
|
||||
directoryUrl: acmeCa.configuration.directoryUrl,
|
||||
accountKey
|
||||
});
|
||||
};
|
||||
|
||||
if (acmeCa.configuration.eabKid && acmeCa.configuration.eabHmacKey) {
|
||||
acmeClientOptions.externalAccountBinding = {
|
||||
kid: acmeCa.configuration.eabKid,
|
||||
hmacKey: acmeCa.configuration.eabHmacKey
|
||||
};
|
||||
}
|
||||
|
||||
const acmeClient = new acme.Client(acmeClientOptions);
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048);
|
||||
|
||||
|
@@ -18,7 +18,9 @@ export const AcmeCertificateAuthorityConfigurationSchema = z.object({
|
||||
hostedZoneId: z.string().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.hostedZoneId)
|
||||
}),
|
||||
directoryUrl: z.string().url().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.directoryUrl),
|
||||
accountEmail: z.string().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.accountEmail)
|
||||
accountEmail: z.string().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.accountEmail),
|
||||
eabKid: z.string().trim().max(64).optional().describe(CertificateAuthorities.CONFIGURATIONS.ACME.eabKid),
|
||||
eabHmacKey: z.string().trim().max(512).optional().describe(CertificateAuthorities.CONFIGURATIONS.ACME.eabHmacKey)
|
||||
});
|
||||
|
||||
export const AcmeCertificateAuthorityCredentialsSchema = z.object({
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,29 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { CaType } from "../certificate-authority-enums";
|
||||
import {
|
||||
BaseCertificateAuthoritySchema,
|
||||
GenericCreateCertificateAuthorityFieldsSchema,
|
||||
GenericUpdateCertificateAuthorityFieldsSchema
|
||||
} from "../certificate-authority-schemas";
|
||||
|
||||
export const AzureAdCsCertificateAuthorityConfigurationSchema = z.object({
|
||||
azureAdcsConnectionId: z.string().uuid().trim().describe("Azure ADCS Connection ID")
|
||||
});
|
||||
|
||||
export const AzureAdCsCertificateAuthoritySchema = BaseCertificateAuthoritySchema.extend({
|
||||
type: z.literal(CaType.AZURE_AD_CS),
|
||||
configuration: AzureAdCsCertificateAuthorityConfigurationSchema
|
||||
});
|
||||
|
||||
export const CreateAzureAdCsCertificateAuthoritySchema = GenericCreateCertificateAuthorityFieldsSchema(
|
||||
CaType.AZURE_AD_CS
|
||||
).extend({
|
||||
configuration: AzureAdCsCertificateAuthorityConfigurationSchema
|
||||
});
|
||||
|
||||
export const UpdateAzureAdCsCertificateAuthoritySchema = GenericUpdateCertificateAuthorityFieldsSchema(
|
||||
CaType.AZURE_AD_CS
|
||||
).extend({
|
||||
configuration: AzureAdCsCertificateAuthorityConfigurationSchema.optional()
|
||||
});
|
@@ -0,0 +1,13 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
AzureAdCsCertificateAuthoritySchema,
|
||||
CreateAzureAdCsCertificateAuthoritySchema,
|
||||
UpdateAzureAdCsCertificateAuthoritySchema
|
||||
} from "./azure-ad-cs-certificate-authority-schemas";
|
||||
|
||||
export type TAzureAdCsCertificateAuthority = z.infer<typeof AzureAdCsCertificateAuthoritySchema>;
|
||||
|
||||
export type TCreateAzureAdCsCertificateAuthorityDTO = z.infer<typeof CreateAzureAdCsCertificateAuthoritySchema>;
|
||||
|
||||
export type TUpdateAzureAdCsCertificateAuthorityDTO = z.infer<typeof UpdateAzureAdCsCertificateAuthoritySchema>;
|
@@ -1,6 +1,7 @@
|
||||
export enum CaType {
|
||||
INTERNAL = "internal",
|
||||
ACME = "acme"
|
||||
ACME = "acme",
|
||||
AZURE_AD_CS = "azure-ad-cs"
|
||||
}
|
||||
|
||||
export enum InternalCaType {
|
||||
@@ -17,3 +18,9 @@ export enum CaStatus {
|
||||
export enum CaRenewalType {
|
||||
EXISTING = "existing"
|
||||
}
|
||||
|
||||
export enum CaCapability {
|
||||
ISSUE_CERTIFICATES = "issue-certificates",
|
||||
REVOKE_CERTIFICATES = "revoke-certificates",
|
||||
RENEW_CERTIFICATES = "renew-certificates"
|
||||
}
|
||||
|
@@ -1,6 +1,29 @@
|
||||
import { CaType } from "./certificate-authority-enums";
|
||||
import { CaCapability, CaType } from "./certificate-authority-enums";
|
||||
|
||||
export const CERTIFICATE_AUTHORITIES_TYPE_MAP: Record<CaType, string> = {
|
||||
[CaType.INTERNAL]: "Internal",
|
||||
[CaType.ACME]: "ACME"
|
||||
[CaType.ACME]: "ACME",
|
||||
[CaType.AZURE_AD_CS]: "Azure AD Certificate Service"
|
||||
};
|
||||
|
||||
export const CERTIFICATE_AUTHORITIES_CAPABILITIES_MAP: Record<CaType, CaCapability[]> = {
|
||||
[CaType.INTERNAL]: [
|
||||
CaCapability.ISSUE_CERTIFICATES,
|
||||
CaCapability.REVOKE_CERTIFICATES,
|
||||
CaCapability.RENEW_CERTIFICATES
|
||||
],
|
||||
[CaType.ACME]: [CaCapability.ISSUE_CERTIFICATES, CaCapability.REVOKE_CERTIFICATES, CaCapability.RENEW_CERTIFICATES],
|
||||
[CaType.AZURE_AD_CS]: [
|
||||
CaCapability.ISSUE_CERTIFICATES,
|
||||
CaCapability.RENEW_CERTIFICATES
|
||||
// Note: REVOKE_CERTIFICATES intentionally omitted - not supported by ADCS connector
|
||||
]
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if a certificate authority type supports a specific capability
|
||||
*/
|
||||
export const caSupportsCapability = (caType: CaType, capability: CaCapability): boolean => {
|
||||
const capabilities = CERTIFICATE_AUTHORITIES_CAPABILITIES_MAP[caType] || [];
|
||||
return capabilities.includes(capability);
|
||||
};
|
||||
|
@@ -21,6 +21,7 @@ import { TCertificateSecretDALFactory } from "../certificate/certificate-secret-
|
||||
import { TPkiSubscriberDALFactory } from "../pki-subscriber/pki-subscriber-dal";
|
||||
import { SubscriberOperationStatus } from "../pki-subscriber/pki-subscriber-types";
|
||||
import { AcmeCertificateAuthorityFns } from "./acme/acme-certificate-authority-fns";
|
||||
import { AzureAdCsCertificateAuthorityFns } from "./azure-ad-cs/azure-ad-cs-certificate-authority-fns";
|
||||
import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal";
|
||||
import { CaType } from "./certificate-authority-enums";
|
||||
import { keyAlgorithmToAlgCfg } from "./certificate-authority-fns";
|
||||
@@ -33,7 +34,7 @@ import {
|
||||
|
||||
type TCertificateAuthorityQueueFactoryDep = {
|
||||
certificateAuthorityDAL: TCertificateAuthorityDALFactory;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
|
||||
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
||||
externalCertificateAuthorityDAL: Pick<TExternalCertificateAuthorityDALFactory, "create" | "update">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem">;
|
||||
@@ -82,6 +83,19 @@ export const certificateAuthorityQueueFactory = ({
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const azureAdCsFns = AzureAdCsCertificateAuthorityFns({
|
||||
appConnectionDAL,
|
||||
appConnectionService,
|
||||
certificateAuthorityDAL,
|
||||
externalCertificateAuthorityDAL,
|
||||
certificateDAL,
|
||||
certificateBodyDAL,
|
||||
certificateSecretDAL,
|
||||
kmsService,
|
||||
pkiSubscriberDAL,
|
||||
projectDAL
|
||||
});
|
||||
|
||||
// TODO 1: auto-periodic rotation
|
||||
// TODO 2: manual rotation
|
||||
|
||||
@@ -158,6 +172,13 @@ export const certificateAuthorityQueueFactory = ({
|
||||
lastOperationMessage: "Certificate ordered successfully",
|
||||
lastOperationAt: new Date()
|
||||
});
|
||||
} else if (caType === CaType.AZURE_AD_CS) {
|
||||
await azureAdCsFns.orderSubscriberCertificate(subscriberId);
|
||||
await pkiSubscriberDAL.updateById(subscriberId, {
|
||||
lastOperationStatus: SubscriberOperationStatus.SUCCESS,
|
||||
lastOperationMessage: "Certificate ordered successfully",
|
||||
lastOperationAt: new Date()
|
||||
});
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof Error) {
|
||||
|
@@ -22,6 +22,14 @@ import {
|
||||
TCreateAcmeCertificateAuthorityDTO,
|
||||
TUpdateAcmeCertificateAuthorityDTO
|
||||
} from "./acme/acme-certificate-authority-types";
|
||||
import {
|
||||
AzureAdCsCertificateAuthorityFns,
|
||||
castDbEntryToAzureAdCsCertificateAuthority
|
||||
} from "./azure-ad-cs/azure-ad-cs-certificate-authority-fns";
|
||||
import {
|
||||
TCreateAzureAdCsCertificateAuthorityDTO,
|
||||
TUpdateAzureAdCsCertificateAuthorityDTO
|
||||
} from "./azure-ad-cs/azure-ad-cs-certificate-authority-types";
|
||||
import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal";
|
||||
import { CaType } from "./certificate-authority-enums";
|
||||
import {
|
||||
@@ -34,7 +42,7 @@ import { TInternalCertificateAuthorityServiceFactory } from "./internal/internal
|
||||
import { TCreateInternalCertificateAuthorityDTO } from "./internal/internal-certificate-authority-types";
|
||||
|
||||
type TCertificateAuthorityServiceFactoryDep = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
|
||||
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
||||
certificateAuthorityDAL: Pick<
|
||||
TCertificateAuthorityDALFactory,
|
||||
@@ -91,6 +99,19 @@ export const certificateAuthorityServiceFactory = ({
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const azureAdCsFns = AzureAdCsCertificateAuthorityFns({
|
||||
appConnectionDAL,
|
||||
appConnectionService,
|
||||
certificateAuthorityDAL,
|
||||
externalCertificateAuthorityDAL,
|
||||
certificateDAL,
|
||||
certificateBodyDAL,
|
||||
certificateSecretDAL,
|
||||
kmsService,
|
||||
pkiSubscriberDAL,
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const createCertificateAuthority = async (
|
||||
{ type, projectId, name, enableDirectIssuance, configuration, status }: TCreateCertificateAuthorityDTO,
|
||||
actor: OrgServiceActor
|
||||
@@ -146,6 +167,17 @@ export const certificateAuthorityServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return azureAdCsFns.createCertificateAuthority({
|
||||
name,
|
||||
projectId,
|
||||
configuration: configuration as TCreateAzureAdCsCertificateAuthorityDTO["configuration"],
|
||||
enableDirectIssuance,
|
||||
status,
|
||||
actor
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
@@ -205,6 +237,10 @@ export const certificateAuthorityServiceFactory = ({
|
||||
return castDbEntryToAcmeCertificateAuthority(certificateAuthority);
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return castDbEntryToAzureAdCsCertificateAuthority(certificateAuthority);
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
@@ -249,6 +285,10 @@ export const certificateAuthorityServiceFactory = ({
|
||||
return acmeFns.listCertificateAuthorities({ projectId });
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return azureAdCsFns.listCertificateAuthorities({ projectId });
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
@@ -323,6 +363,17 @@ export const certificateAuthorityServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return azureAdCsFns.updateCertificateAuthority({
|
||||
id: certificateAuthority.id,
|
||||
configuration: configuration as TUpdateAzureAdCsCertificateAuthorityDTO["configuration"],
|
||||
enableDirectIssuance,
|
||||
actor,
|
||||
status,
|
||||
name
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
@@ -384,14 +435,54 @@ export const certificateAuthorityServiceFactory = ({
|
||||
return castDbEntryToAcmeCertificateAuthority(certificateAuthority);
|
||||
}
|
||||
|
||||
if (type === CaType.AZURE_AD_CS) {
|
||||
return castDbEntryToAzureAdCsCertificateAuthority(certificateAuthority);
|
||||
}
|
||||
|
||||
throw new BadRequestError({ message: "Invalid certificate authority type" });
|
||||
};
|
||||
|
||||
const getAzureAdcsTemplates = async ({
|
||||
caId,
|
||||
projectId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: {
|
||||
caId: string;
|
||||
projectId: string;
|
||||
actor: OrgServiceActor["type"];
|
||||
actorId: string;
|
||||
actorAuthMethod: OrgServiceActor["authMethod"];
|
||||
actorOrgId?: string;
|
||||
}) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.CertificateManager
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
return azureAdCsFns.getTemplates({
|
||||
caId,
|
||||
projectId
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
createCertificateAuthority,
|
||||
findCertificateAuthorityByNameAndProjectId,
|
||||
listCertificateAuthoritiesByProjectId,
|
||||
updateCertificateAuthority,
|
||||
deleteCertificateAuthority
|
||||
deleteCertificateAuthority,
|
||||
getAzureAdcsTemplates
|
||||
};
|
||||
};
|
||||
|
@@ -1,13 +1,23 @@
|
||||
import { TAcmeCertificateAuthority, TAcmeCertificateAuthorityInput } from "./acme/acme-certificate-authority-types";
|
||||
import {
|
||||
TAzureAdCsCertificateAuthority,
|
||||
TCreateAzureAdCsCertificateAuthorityDTO
|
||||
} from "./azure-ad-cs/azure-ad-cs-certificate-authority-types";
|
||||
import { CaType } from "./certificate-authority-enums";
|
||||
import {
|
||||
TInternalCertificateAuthority,
|
||||
TInternalCertificateAuthorityInput
|
||||
} from "./internal/internal-certificate-authority-types";
|
||||
|
||||
export type TCertificateAuthority = TInternalCertificateAuthority | TAcmeCertificateAuthority;
|
||||
export type TCertificateAuthority =
|
||||
| TInternalCertificateAuthority
|
||||
| TAcmeCertificateAuthority
|
||||
| TAzureAdCsCertificateAuthority;
|
||||
|
||||
export type TCertificateAuthorityInput = TInternalCertificateAuthorityInput | TAcmeCertificateAuthorityInput;
|
||||
export type TCertificateAuthorityInput =
|
||||
| TInternalCertificateAuthorityInput
|
||||
| TAcmeCertificateAuthorityInput
|
||||
| TCreateAzureAdCsCertificateAuthorityDTO;
|
||||
|
||||
export type TCreateCertificateAuthorityDTO = Omit<TCertificateAuthority, "id">;
|
||||
|
||||
|
@@ -36,12 +36,18 @@ import { validateAndMapAltNameType } from "../certificate-authority-validators";
|
||||
import { TIssueCertWithTemplateDTO } from "./internal-certificate-authority-types";
|
||||
|
||||
type TInternalCertificateAuthorityFnsDeps = {
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findByIdWithAssociatedCa" | "findById">;
|
||||
certificateAuthorityDAL: Pick<
|
||||
TCertificateAuthorityDALFactory,
|
||||
"findByIdWithAssociatedCa" | "findById" | "create" | "transaction" | "updateById" | "findWithAssociatedCa"
|
||||
>;
|
||||
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "findById">;
|
||||
certificateAuthoritySecretDAL: Pick<TCertificateAuthoritySecretDALFactory, "findOne">;
|
||||
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "findOne">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById" | "transaction" | "findOne" | "updateById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "encryptWithKmsKey" | "generateKmsKey">;
|
||||
kmsService: Pick<
|
||||
TKmsServiceFactory,
|
||||
"decryptWithKmsKey" | "encryptWithKmsKey" | "generateKmsKey" | "createCipherPairWithDataKey"
|
||||
>;
|
||||
certificateDAL: Pick<TCertificateDALFactory, "create" | "transaction">;
|
||||
certificateBodyDAL: Pick<TCertificateBodyDALFactory, "create">;
|
||||
certificateSecretDAL: Pick<TCertificateSecretDALFactory, "create">;
|
||||
|
@@ -14,6 +14,8 @@ import { TCertificateBodyDALFactory } from "@app/services/certificate/certificat
|
||||
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
|
||||
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { CaCapability, CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||
import { caSupportsCapability } from "@app/services/certificate-authority/certificate-authority-maps";
|
||||
import { TCertificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TPkiCollectionDALFactory } from "@app/services/pki-collection/pki-collection-dal";
|
||||
@@ -184,9 +186,11 @@ export const certificateServiceFactory = ({
|
||||
|
||||
const ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(cert.caId);
|
||||
|
||||
if (ca.externalCa?.id) {
|
||||
// Check if the CA type supports revocation
|
||||
const caType = (ca.externalCa?.type as CaType) ?? CaType.INTERNAL;
|
||||
if (!caSupportsCapability(caType, CaCapability.REVOKE_CERTIFICATES)) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot revoke external certificates"
|
||||
message: "Certificate revocation is not supported by this certificate authority type"
|
||||
});
|
||||
}
|
||||
|
||||
@@ -218,18 +222,37 @@ export const certificateServiceFactory = ({
|
||||
}
|
||||
);
|
||||
|
||||
// rebuild CRL (TODO: move to interval-based cron job)
|
||||
await rebuildCaCrl({
|
||||
caId: ca.id,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCrlDAL,
|
||||
certificateAuthoritySecretDAL,
|
||||
projectDAL,
|
||||
certificateDAL,
|
||||
kmsService
|
||||
});
|
||||
// Note: External CA revocation handling would go here for supported CA types
|
||||
// Currently, only internal CAs and ACME CAs support revocation
|
||||
|
||||
return { revokedAt, cert, ca: expandInternalCa(ca) };
|
||||
// rebuild CRL (TODO: move to interval-based cron job)
|
||||
// Only rebuild CRL for internal CAs - external CAs manage their own CRLs
|
||||
if (!ca.externalCa?.id) {
|
||||
await rebuildCaCrl({
|
||||
caId: ca.id,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCrlDAL,
|
||||
certificateAuthoritySecretDAL,
|
||||
projectDAL,
|
||||
certificateDAL,
|
||||
kmsService
|
||||
});
|
||||
}
|
||||
|
||||
// Return appropriate CA format based on CA type
|
||||
const caResult = ca.externalCa?.id
|
||||
? {
|
||||
id: ca.id,
|
||||
name: ca.name,
|
||||
projectId: ca.projectId,
|
||||
status: ca.status,
|
||||
enableDirectIssuance: ca.enableDirectIssuance,
|
||||
type: ca.externalCa.type,
|
||||
externalCa: ca.externalCa
|
||||
}
|
||||
: expandInternalCa(ca);
|
||||
|
||||
return { revokedAt, cert, ca: caResult };
|
||||
};
|
||||
|
||||
/**
|
||||
|
@@ -1,12 +1,21 @@
|
||||
import https from "node:https";
|
||||
|
||||
import axios, { AxiosInstance } from "axios";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
|
||||
import { InfisicalImportData, VaultMappingType } from "../external-migration-types";
|
||||
|
||||
enum KvVersion {
|
||||
V1 = "1",
|
||||
V2 = "2"
|
||||
}
|
||||
|
||||
type VaultData = {
|
||||
namespace: string;
|
||||
mount: string;
|
||||
@@ -14,7 +23,42 @@ type VaultData = {
|
||||
secretData: Record<string, string>;
|
||||
};
|
||||
|
||||
const vaultFactory = () => {
|
||||
const vaultFactory = (gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">) => {
|
||||
const $gatewayProxyWrapper = async <T>(
|
||||
inputs: {
|
||||
gatewayId: string;
|
||||
targetHost?: string;
|
||||
targetPort?: number;
|
||||
},
|
||||
gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent) => Promise<T>
|
||||
): Promise<T> => {
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
|
||||
const callbackResult = await withGatewayProxy(
|
||||
async (port, httpsAgent) => {
|
||||
const res = await gatewayCallback("http://localhost", port, httpsAgent);
|
||||
return res;
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Http,
|
||||
targetHost: inputs.targetHost,
|
||||
targetPort: inputs.targetPort,
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return callbackResult;
|
||||
};
|
||||
|
||||
const getMounts = async (request: AxiosInstance) => {
|
||||
const response = await request
|
||||
.get<{
|
||||
@@ -31,11 +75,24 @@ const vaultFactory = () => {
|
||||
|
||||
const getPaths = async (
|
||||
request: AxiosInstance,
|
||||
{ mountPath, secretPath = "" }: { mountPath: string; secretPath?: string }
|
||||
{ mountPath, secretPath = "" }: { mountPath: string; secretPath?: string },
|
||||
kvVersion: KvVersion
|
||||
) => {
|
||||
try {
|
||||
// For KV v2: /v1/{mount}/metadata/{path}?list=true
|
||||
const path = secretPath ? `${mountPath}/metadata/${secretPath}` : `${mountPath}/metadata`;
|
||||
if (kvVersion === KvVersion.V2) {
|
||||
// For KV v2: /v1/{mount}/metadata/{path}?list=true
|
||||
const path = secretPath ? `${mountPath}/metadata/${secretPath}` : `${mountPath}/metadata`;
|
||||
const response = await request.get<{
|
||||
data: {
|
||||
keys: string[];
|
||||
};
|
||||
}>(`/v1/${path}?list=true`);
|
||||
|
||||
return response.data.data.keys;
|
||||
}
|
||||
|
||||
// kv version v1: /v1/{mount}?list=true
|
||||
const path = secretPath ? `${mountPath}/${secretPath}` : mountPath;
|
||||
const response = await request.get<{
|
||||
data: {
|
||||
keys: string[];
|
||||
@@ -56,21 +113,42 @@ const vaultFactory = () => {
|
||||
|
||||
const getSecrets = async (
|
||||
request: AxiosInstance,
|
||||
{ mountPath, secretPath }: { mountPath: string; secretPath: string }
|
||||
{ mountPath, secretPath }: { mountPath: string; secretPath: string },
|
||||
kvVersion: KvVersion
|
||||
) => {
|
||||
// For KV v2: /v1/{mount}/data/{path}
|
||||
if (kvVersion === KvVersion.V2) {
|
||||
// For KV v2: /v1/{mount}/data/{path}
|
||||
const response = await request
|
||||
.get<{
|
||||
data: {
|
||||
data: Record<string, string>; // KV v2 has nested data structure
|
||||
metadata: {
|
||||
created_time: string;
|
||||
deletion_time: string;
|
||||
destroyed: boolean;
|
||||
version: number;
|
||||
};
|
||||
};
|
||||
}>(`/v1/${mountPath}/data/${secretPath}`)
|
||||
.catch((err) => {
|
||||
if (axios.isAxiosError(err)) {
|
||||
logger.error(err.response?.data, "External migration: Failed to get Vault secret");
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
|
||||
return response.data.data.data;
|
||||
}
|
||||
|
||||
// kv version v1
|
||||
|
||||
const response = await request
|
||||
.get<{
|
||||
data: {
|
||||
data: Record<string, string>; // KV v2 has nested data structure
|
||||
metadata: {
|
||||
created_time: string;
|
||||
deletion_time: string;
|
||||
destroyed: boolean;
|
||||
version: number;
|
||||
};
|
||||
};
|
||||
}>(`/v1/${mountPath}/data/${secretPath}`)
|
||||
data: Record<string, string>; // KV v1 has flat data structure
|
||||
lease_duration: number;
|
||||
lease_id: string;
|
||||
renewable: boolean;
|
||||
}>(`/v1/${mountPath}/${secretPath}`)
|
||||
.catch((err) => {
|
||||
if (axios.isAxiosError(err)) {
|
||||
logger.error(err.response?.data, "External migration: Failed to get Vault secret");
|
||||
@@ -78,7 +156,7 @@ const vaultFactory = () => {
|
||||
throw err;
|
||||
});
|
||||
|
||||
return response.data.data.data;
|
||||
return response.data.data;
|
||||
};
|
||||
|
||||
// helper function to check if a mount is KV v2 (will be useful if we add support for Vault KV v1)
|
||||
@@ -89,9 +167,10 @@ const vaultFactory = () => {
|
||||
const recursivelyGetAllPaths = async (
|
||||
request: AxiosInstance,
|
||||
mountPath: string,
|
||||
kvVersion: KvVersion,
|
||||
currentPath: string = ""
|
||||
): Promise<string[]> => {
|
||||
const paths = await getPaths(request, { mountPath, secretPath: currentPath });
|
||||
const paths = await getPaths(request, { mountPath, secretPath: currentPath }, kvVersion);
|
||||
|
||||
if (paths === null || paths.length === 0) {
|
||||
return [];
|
||||
@@ -105,7 +184,7 @@ const vaultFactory = () => {
|
||||
|
||||
if (path.endsWith("/")) {
|
||||
// it's a folder so we recurse into it
|
||||
const subSecrets = await recursivelyGetAllPaths(request, mountPath, fullItemPath);
|
||||
const subSecrets = await recursivelyGetAllPaths(request, mountPath, kvVersion, fullItemPath);
|
||||
allSecrets.push(...subSecrets);
|
||||
} else {
|
||||
// it's a secret so we add it to our results
|
||||
@@ -119,60 +198,93 @@ const vaultFactory = () => {
|
||||
async function collectVaultData({
|
||||
baseUrl,
|
||||
namespace,
|
||||
accessToken
|
||||
accessToken,
|
||||
gatewayId
|
||||
}: {
|
||||
baseUrl: string;
|
||||
namespace?: string;
|
||||
accessToken: string;
|
||||
gatewayId?: string;
|
||||
}): Promise<VaultData[]> {
|
||||
const request = axios.create({
|
||||
baseURL: baseUrl,
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(namespace ? { "X-Vault-Namespace": namespace } : {})
|
||||
const getData = async (host: string, port?: number, httpsAgent?: https.Agent) => {
|
||||
const allData: VaultData[] = [];
|
||||
|
||||
const request = axios.create({
|
||||
baseURL: port ? `${host}:${port}` : host,
|
||||
headers: {
|
||||
"X-Vault-Token": accessToken,
|
||||
...(namespace ? { "X-Vault-Namespace": namespace } : {})
|
||||
},
|
||||
httpsAgent
|
||||
});
|
||||
|
||||
// Get all mounts in this namespace
|
||||
const mounts = await getMounts(request);
|
||||
|
||||
for (const mount of Object.keys(mounts)) {
|
||||
if (!mount.endsWith("/")) {
|
||||
delete mounts[mount];
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const allData: VaultData[] = [];
|
||||
for await (const [mountPath, mountInfo] of Object.entries(mounts)) {
|
||||
// skip non-KV mounts
|
||||
if (!mountInfo.type.startsWith("kv")) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get all mounts in this namespace
|
||||
const mounts = await getMounts(request);
|
||||
const kvVersion = mountInfo.options?.version === "2" ? KvVersion.V2 : KvVersion.V1;
|
||||
|
||||
for (const mount of Object.keys(mounts)) {
|
||||
if (!mount.endsWith("/")) {
|
||||
delete mounts[mount];
|
||||
// get all paths in this mount
|
||||
const paths = await recursivelyGetAllPaths(request, `${mountPath.replace(/\/$/, "")}`, kvVersion);
|
||||
|
||||
const cleanMountPath = mountPath.replace(/\/$/, "");
|
||||
|
||||
for await (const secretPath of paths) {
|
||||
// get the actual secret data
|
||||
const secretData = await getSecrets(
|
||||
request,
|
||||
{
|
||||
mountPath: cleanMountPath,
|
||||
secretPath: secretPath.replace(`${cleanMountPath}/`, "")
|
||||
},
|
||||
kvVersion
|
||||
);
|
||||
|
||||
allData.push({
|
||||
namespace: namespace || "",
|
||||
mount: mountPath.replace(/\/$/, ""),
|
||||
path: secretPath.replace(`${cleanMountPath}/`, ""),
|
||||
secretData
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return allData;
|
||||
};
|
||||
|
||||
let data;
|
||||
|
||||
if (gatewayId) {
|
||||
const url = new URL(baseUrl);
|
||||
|
||||
const { port, protocol, hostname } = url;
|
||||
const cleanedProtocol = protocol.slice(0, -1);
|
||||
|
||||
data = await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId,
|
||||
targetHost: `${cleanedProtocol}://${hostname}`,
|
||||
targetPort: port ? Number(port) : 8200 // 8200, default port for Vault self-hosted/dedicated
|
||||
},
|
||||
getData
|
||||
);
|
||||
} else {
|
||||
data = await getData(baseUrl);
|
||||
}
|
||||
|
||||
for await (const [mountPath, mountInfo] of Object.entries(mounts)) {
|
||||
// skip non-KV mounts
|
||||
if (!mountInfo.type.startsWith("kv")) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// get all paths in this mount
|
||||
const paths = await recursivelyGetAllPaths(request, `${mountPath.replace(/\/$/, "")}`);
|
||||
|
||||
const cleanMountPath = mountPath.replace(/\/$/, "");
|
||||
|
||||
for await (const secretPath of paths) {
|
||||
// get the actual secret data
|
||||
const secretData = await getSecrets(request, {
|
||||
mountPath: cleanMountPath,
|
||||
secretPath: secretPath.replace(`${cleanMountPath}/`, "")
|
||||
});
|
||||
|
||||
allData.push({
|
||||
namespace: namespace || "",
|
||||
mount: mountPath.replace(/\/$/, ""),
|
||||
path: secretPath.replace(`${cleanMountPath}/`, ""),
|
||||
secretData
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return allData;
|
||||
return data;
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -296,17 +408,126 @@ export const transformToInfisicalFormatNamespaceToProjects = (
|
||||
};
|
||||
};
|
||||
|
||||
export const importVaultDataFn = async ({
|
||||
vaultAccessToken,
|
||||
vaultNamespace,
|
||||
vaultUrl,
|
||||
mappingType
|
||||
}: {
|
||||
vaultAccessToken: string;
|
||||
vaultNamespace?: string;
|
||||
vaultUrl: string;
|
||||
mappingType: VaultMappingType;
|
||||
}) => {
|
||||
export const transformToInfisicalFormatKeyVaultToProjectsCustomC1 = (vaultData: VaultData[]): InfisicalImportData => {
|
||||
const projects: Array<{ name: string; id: string }> = [];
|
||||
const environments: Array<{ name: string; id: string; projectId: string; envParentId?: string }> = [];
|
||||
const folders: Array<{ id: string; name: string; environmentId: string; parentFolderId?: string }> = [];
|
||||
const secrets: Array<{ id: string; name: string; environmentId: string; value: string; folderId?: string }> = [];
|
||||
|
||||
// track created entities to avoid duplicates
|
||||
const projectMap = new Map<string, string>(); // team name -> projectId
|
||||
const environmentMap = new Map<string, string>(); // team-name:envName -> environmentId
|
||||
const folderMap = new Map<string, string>(); // team-name:envName:folderPath -> folderId
|
||||
|
||||
for (const data of vaultData) {
|
||||
const { path, secretData } = data;
|
||||
|
||||
const pathParts = path.split("/").filter(Boolean);
|
||||
if (pathParts.length < 2) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// first level: environment (dev, prod, staging, etc.)
|
||||
const environmentName = pathParts[0];
|
||||
// second level: team name (team1, team2, etc.)
|
||||
const teamName = pathParts[1];
|
||||
// remaining parts: folder structure
|
||||
const folderParts = pathParts.slice(2);
|
||||
|
||||
// create project (team) if if doesn't exist
|
||||
if (!projectMap.has(teamName)) {
|
||||
const projectId = uuidv4();
|
||||
projectMap.set(teamName, projectId);
|
||||
projects.push({
|
||||
name: teamName,
|
||||
id: projectId
|
||||
});
|
||||
}
|
||||
const projectId = projectMap.get(teamName)!;
|
||||
|
||||
// create environment (dev, prod, etc.) for team
|
||||
const envKey = `${teamName}:${environmentName}`;
|
||||
if (!environmentMap.has(envKey)) {
|
||||
const environmentId = uuidv4();
|
||||
environmentMap.set(envKey, environmentId);
|
||||
environments.push({
|
||||
name: environmentName,
|
||||
id: environmentId,
|
||||
projectId
|
||||
});
|
||||
}
|
||||
const environmentId = environmentMap.get(envKey)!;
|
||||
|
||||
// create folder structure for path segments
|
||||
let currentFolderId: string | undefined;
|
||||
let currentPath = "";
|
||||
|
||||
for (const folderName of folderParts) {
|
||||
currentPath = currentPath ? `${currentPath}/${folderName}` : folderName;
|
||||
const folderKey = `${teamName}:${environmentName}:${currentPath}`;
|
||||
|
||||
if (!folderMap.has(folderKey)) {
|
||||
const folderId = uuidv4();
|
||||
folderMap.set(folderKey, folderId);
|
||||
folders.push({
|
||||
id: folderId,
|
||||
name: folderName,
|
||||
environmentId,
|
||||
parentFolderId: currentFolderId || environmentId
|
||||
});
|
||||
currentFolderId = folderId;
|
||||
} else {
|
||||
currentFolderId = folderMap.get(folderKey)!;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(secretData)) {
|
||||
secrets.push({
|
||||
id: uuidv4(),
|
||||
name: key,
|
||||
environmentId,
|
||||
value: String(value),
|
||||
folderId: currentFolderId
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
projects,
|
||||
environments,
|
||||
folders,
|
||||
secrets
|
||||
};
|
||||
};
|
||||
|
||||
// refer to internal doc for more details on which ID's belong to which orgs.
|
||||
// when its a custom migration, then it doesn't matter which mapping type is used (as of now).
|
||||
export const vaultMigrationTransformMappings: Record<
|
||||
string,
|
||||
(vaultData: VaultData[], mappingType: VaultMappingType) => InfisicalImportData
|
||||
> = {
|
||||
"68c57ab3-cea5-41fc-ae38-e156b10c14d2": transformToInfisicalFormatKeyVaultToProjectsCustomC1
|
||||
} as const;
|
||||
|
||||
export const importVaultDataFn = async (
|
||||
{
|
||||
vaultAccessToken,
|
||||
vaultNamespace,
|
||||
vaultUrl,
|
||||
mappingType,
|
||||
gatewayId,
|
||||
orgId
|
||||
}: {
|
||||
vaultAccessToken: string;
|
||||
vaultNamespace?: string;
|
||||
vaultUrl: string;
|
||||
mappingType: VaultMappingType;
|
||||
gatewayId?: string;
|
||||
orgId: string;
|
||||
},
|
||||
{ gatewayService }: { gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId"> }
|
||||
) => {
|
||||
await blockLocalAndPrivateIpAddresses(vaultUrl);
|
||||
|
||||
if (mappingType === VaultMappingType.Namespace && !vaultNamespace) {
|
||||
@@ -315,15 +536,33 @@ export const importVaultDataFn = async ({
|
||||
});
|
||||
}
|
||||
|
||||
const vaultApi = vaultFactory();
|
||||
let transformFn: (vaultData: VaultData[], mappingType: VaultMappingType) => InfisicalImportData;
|
||||
|
||||
if (mappingType === VaultMappingType.Custom) {
|
||||
transformFn = vaultMigrationTransformMappings[orgId];
|
||||
|
||||
if (!transformFn) {
|
||||
throw new BadRequestError({
|
||||
message: "Please contact our sales team to enable custom vault migrations."
|
||||
});
|
||||
}
|
||||
} else {
|
||||
transformFn = transformToInfisicalFormatNamespaceToProjects;
|
||||
}
|
||||
|
||||
logger.info(
|
||||
{ orgId, mappingType },
|
||||
`[importVaultDataFn]: Running ${orgId in vaultMigrationTransformMappings ? "custom" : "default"} transform`
|
||||
);
|
||||
|
||||
const vaultApi = vaultFactory(gatewayService);
|
||||
|
||||
const vaultData = await vaultApi.collectVaultData({
|
||||
accessToken: vaultAccessToken,
|
||||
baseUrl: vaultUrl,
|
||||
namespace: vaultNamespace
|
||||
namespace: vaultNamespace,
|
||||
gatewayId
|
||||
});
|
||||
|
||||
const infisicalData = transformToInfisicalFormatNamespaceToProjects(vaultData, mappingType);
|
||||
|
||||
return infisicalData;
|
||||
return transformFn(vaultData, mappingType);
|
||||
};
|
||||
|
@@ -1,17 +1,30 @@
|
||||
import { OrgMembershipRole } from "@app/db/schemas";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { decryptEnvKeyDataFn, importVaultDataFn, parseEnvKeyDataFn } from "./external-migration-fns";
|
||||
import {
|
||||
decryptEnvKeyDataFn,
|
||||
importVaultDataFn,
|
||||
parseEnvKeyDataFn,
|
||||
vaultMigrationTransformMappings
|
||||
} from "./external-migration-fns";
|
||||
import { TExternalMigrationQueueFactory } from "./external-migration-queue";
|
||||
import { ExternalPlatforms, TImportEnvKeyDataDTO, TImportVaultDataDTO } from "./external-migration-types";
|
||||
import {
|
||||
ExternalMigrationProviders,
|
||||
ExternalPlatforms,
|
||||
THasCustomVaultMigrationDTO,
|
||||
TImportEnvKeyDataDTO,
|
||||
TImportVaultDataDTO
|
||||
} from "./external-migration-types";
|
||||
|
||||
type TExternalMigrationServiceFactoryDep = {
|
||||
permissionService: TPermissionServiceFactory;
|
||||
externalMigrationQueue: TExternalMigrationQueueFactory;
|
||||
userDAL: Pick<TUserDALFactory, "findById">;
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
};
|
||||
|
||||
export type TExternalMigrationServiceFactory = ReturnType<typeof externalMigrationServiceFactory>;
|
||||
@@ -19,7 +32,8 @@ export type TExternalMigrationServiceFactory = ReturnType<typeof externalMigrati
|
||||
export const externalMigrationServiceFactory = ({
|
||||
permissionService,
|
||||
externalMigrationQueue,
|
||||
userDAL
|
||||
userDAL,
|
||||
gatewayService
|
||||
}: TExternalMigrationServiceFactoryDep) => {
|
||||
const importEnvKeyData = async ({
|
||||
decryptionKey,
|
||||
@@ -72,6 +86,7 @@ export const externalMigrationServiceFactory = ({
|
||||
vaultNamespace,
|
||||
mappingType,
|
||||
vaultUrl,
|
||||
gatewayId,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
@@ -91,12 +106,19 @@ export const externalMigrationServiceFactory = ({
|
||||
|
||||
const user = await userDAL.findById(actorId);
|
||||
|
||||
const vaultData = await importVaultDataFn({
|
||||
vaultAccessToken,
|
||||
vaultNamespace,
|
||||
vaultUrl,
|
||||
mappingType
|
||||
});
|
||||
const vaultData = await importVaultDataFn(
|
||||
{
|
||||
vaultAccessToken,
|
||||
vaultNamespace,
|
||||
vaultUrl,
|
||||
mappingType,
|
||||
gatewayId,
|
||||
orgId: actorOrgId
|
||||
},
|
||||
{
|
||||
gatewayService
|
||||
}
|
||||
);
|
||||
|
||||
const stringifiedJson = JSON.stringify({
|
||||
data: vaultData,
|
||||
@@ -117,8 +139,37 @@ export const externalMigrationServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
const hasCustomVaultMigration = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
provider
|
||||
}: THasCustomVaultMigrationDTO) => {
|
||||
const { membership } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
if (membership.role !== OrgMembershipRole.Admin) {
|
||||
throw new ForbiddenRequestError({ message: "Only admins can check custom migration status" });
|
||||
}
|
||||
|
||||
if (provider !== ExternalMigrationProviders.Vault) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid provider. Vault is the only supported provider for custom migrations."
|
||||
});
|
||||
}
|
||||
|
||||
return actorOrgId in vaultMigrationTransformMappings;
|
||||
};
|
||||
|
||||
return {
|
||||
importEnvKeyData,
|
||||
importVaultData
|
||||
importVaultData,
|
||||
hasCustomVaultMigration
|
||||
};
|
||||
};
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user