mirror of
https://github.com/Infisical/infisical.git
synced 2025-08-16 20:48:26 +00:00
Compare commits
83 Commits
improve-in
...
log-github
Author | SHA1 | Date | |
---|---|---|---|
|
5a3aa3d608 | ||
|
95b327de50 | ||
|
a3c36f82f3 | ||
|
42612da57d | ||
|
f63c07d538 | ||
|
98a08d136e | ||
|
6c74b875f3 | ||
|
793cd4c144 | ||
|
ec0be1166f | ||
|
899d01237c | ||
|
ff5dbe74fd | ||
|
24004084f2 | ||
|
0e401ece73 | ||
|
c4e1651df7 | ||
|
514c7596db | ||
|
9fbdede82c | ||
|
e519637e89 | ||
|
ba393b0498 | ||
|
4150f81d83 | ||
|
a45bba8537 | ||
|
fe7e8e7240 | ||
|
cf54365022 | ||
|
4f26365c21 | ||
|
c974df104e | ||
|
e88fdc957e | ||
|
55e5360dd4 | ||
|
77a8cd9efc | ||
|
de2c1c5560 | ||
|
52f773c647 | ||
|
79de7c5f08 | ||
|
3877fe524d | ||
|
4c5df70790 | ||
|
5645dd2b8d | ||
|
0d55195561 | ||
|
1c0caab469 | ||
|
ed9dfd2974 | ||
|
7f72037d77 | ||
|
9928ca17ea | ||
|
2cbd66e804 | ||
|
7357d377e1 | ||
|
149cecd805 | ||
|
c80fd55a74 | ||
|
93e7723b48 | ||
|
573b990aa3 | ||
|
e15086edc0 | ||
|
4a55ecbe12 | ||
|
13ef3809bd | ||
|
fb49c9250a | ||
|
5ced7fa923 | ||
|
5ffd42378a | ||
|
1e29d550be | ||
|
f995708e44 | ||
|
c266d68993 | ||
|
c7c8107f85 | ||
|
b906fe34a1 | ||
|
bec1fefee8 | ||
|
cd03107a60 | ||
|
07965de1db | ||
|
b20ff0f029 | ||
|
691cbe0a4f | ||
|
0787128803 | ||
|
837158e344 | ||
|
03bd1471b2 | ||
|
f53c39f65b | ||
|
092695089d | ||
|
2d80681597 | ||
|
cf23f98170 | ||
|
c4c8e121f0 | ||
|
0701c996e5 | ||
|
4ca6f165b7 | ||
|
b9dd565926 | ||
|
136b0bdcb5 | ||
|
7266d1f310 | ||
|
9c6ec807cb | ||
|
5fcae35fae | ||
|
359e19f804 | ||
|
2aa548c7dc | ||
|
4f00fc6777 | ||
|
82b765553c | ||
|
8972521716 | ||
|
81b45b24ec | ||
|
f2b0e4ae37 | ||
|
b4ed1fa96a |
@@ -55,6 +55,8 @@ USER non-root-user
|
|||||||
##
|
##
|
||||||
FROM base AS backend-build
|
FROM base AS backend-build
|
||||||
|
|
||||||
|
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install all required dependencies for build
|
# Install all required dependencies for build
|
||||||
@@ -84,6 +86,8 @@ RUN npm run build
|
|||||||
# Production stage
|
# Production stage
|
||||||
FROM base AS backend-runner
|
FROM base AS backend-runner
|
||||||
|
|
||||||
|
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install all required dependencies for runtime
|
# Install all required dependencies for runtime
|
||||||
@@ -112,6 +116,11 @@ RUN mkdir frontend-build
|
|||||||
FROM base AS production
|
FROM base AS production
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
|
build-essential \
|
||||||
|
autoconf \
|
||||||
|
automake \
|
||||||
|
libtool \
|
||||||
|
libssl-dev \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
bash \
|
bash \
|
||||||
curl \
|
curl \
|
||||||
@@ -171,6 +180,7 @@ ENV NODE_ENV production
|
|||||||
ENV STANDALONE_BUILD true
|
ENV STANDALONE_BUILD true
|
||||||
ENV STANDALONE_MODE true
|
ENV STANDALONE_MODE true
|
||||||
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
||||||
|
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||||
|
|
||||||
WORKDIR /backend
|
WORKDIR /backend
|
||||||
|
|
||||||
|
244
backend/package-lock.json
generated
244
backend/package-lock.json
generated
@@ -7,7 +7,6 @@
|
|||||||
"": {
|
"": {
|
||||||
"name": "backend",
|
"name": "backend",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"hasInstallScript": true,
|
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@aws-sdk/client-elasticache": "^3.637.0",
|
"@aws-sdk/client-elasticache": "^3.637.0",
|
||||||
@@ -34,11 +33,12 @@
|
|||||||
"@gitbeaker/rest": "^42.5.0",
|
"@gitbeaker/rest": "^42.5.0",
|
||||||
"@google-cloud/kms": "^4.5.0",
|
"@google-cloud/kms": "^4.5.0",
|
||||||
"@infisical/quic": "^1.0.8",
|
"@infisical/quic": "^1.0.8",
|
||||||
"@node-saml/passport-saml": "^5.0.1",
|
"@node-saml/passport-saml": "^5.1.0",
|
||||||
"@octokit/auth-app": "^7.1.1",
|
"@octokit/auth-app": "^7.1.1",
|
||||||
"@octokit/core": "^5.2.1",
|
"@octokit/core": "^5.2.1",
|
||||||
"@octokit/plugin-paginate-graphql": "^4.0.1",
|
"@octokit/plugin-paginate-graphql": "^4.0.1",
|
||||||
"@octokit/plugin-retry": "^5.0.5",
|
"@octokit/plugin-retry": "^5.0.5",
|
||||||
|
"@octokit/request": "8.4.1",
|
||||||
"@octokit/rest": "^20.0.2",
|
"@octokit/rest": "^20.0.2",
|
||||||
"@octokit/webhooks-types": "^7.3.1",
|
"@octokit/webhooks-types": "^7.3.1",
|
||||||
"@octopusdeploy/api-client": "^3.4.1",
|
"@octopusdeploy/api-client": "^3.4.1",
|
||||||
@@ -9574,20 +9574,20 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@node-saml/node-saml": {
|
"node_modules/@node-saml/node-saml": {
|
||||||
"version": "5.0.1",
|
"version": "5.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.1.0.tgz",
|
||||||
"integrity": "sha512-YQzFPEC+CnsfO9AFYnwfYZKIzOLx3kITaC1HrjHVLTo6hxcQhc+LgHODOMvW4VCV95Gwrz1MshRUWCPzkDqmnA==",
|
"integrity": "sha512-t3cJnZ4aC7HhPZ6MGylGZULvUtBOZ6FzuUndaHGXjmIZHXnLfC/7L8a57O9Q9V7AxJGKAiRM5zu2wNm9EsvQpw==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/debug": "^4.1.12",
|
"@types/debug": "^4.1.12",
|
||||||
"@types/qs": "^6.9.11",
|
"@types/qs": "^6.9.18",
|
||||||
"@types/xml-encryption": "^1.2.4",
|
"@types/xml-encryption": "^1.2.4",
|
||||||
"@types/xml2js": "^0.4.14",
|
"@types/xml2js": "^0.4.14",
|
||||||
"@xmldom/is-dom-node": "^1.0.1",
|
"@xmldom/is-dom-node": "^1.0.1",
|
||||||
"@xmldom/xmldom": "^0.8.10",
|
"@xmldom/xmldom": "^0.8.10",
|
||||||
"debug": "^4.3.4",
|
"debug": "^4.4.0",
|
||||||
"xml-crypto": "^6.0.1",
|
"xml-crypto": "^6.1.2",
|
||||||
"xml-encryption": "^3.0.2",
|
"xml-encryption": "^3.1.0",
|
||||||
"xml2js": "^0.6.2",
|
"xml2js": "^0.6.2",
|
||||||
"xmlbuilder": "^15.1.1",
|
"xmlbuilder": "^15.1.1",
|
||||||
"xpath": "^0.0.34"
|
"xpath": "^0.0.34"
|
||||||
@@ -9597,9 +9597,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@node-saml/node-saml/node_modules/debug": {
|
"node_modules/@node-saml/node-saml/node_modules/debug": {
|
||||||
"version": "4.4.0",
|
"version": "4.4.1",
|
||||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
|
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
|
||||||
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
|
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"ms": "^2.1.3"
|
"ms": "^2.1.3"
|
||||||
@@ -9636,14 +9636,14 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@node-saml/passport-saml": {
|
"node_modules/@node-saml/passport-saml": {
|
||||||
"version": "5.0.1",
|
"version": "5.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.1.0.tgz",
|
||||||
"integrity": "sha512-fMztg3zfSnjLEgxvpl6HaDMNeh0xeQX4QHiF9e2Lsie2dc4qFE37XYbQZhVmn8XJ2awPpSWLQ736UskYgGU8lQ==",
|
"integrity": "sha512-pBm+iFjv9eihcgeJuSUs4c0AuX1QEFdHwP8w1iaWCfDzXdeWZxUBU5HT2bY2S4dvNutcy+A9hYsH7ZLBGtgwDg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@node-saml/node-saml": "^5.0.1",
|
"@node-saml/node-saml": "^5.1.0",
|
||||||
"@types/express": "^4.17.21",
|
"@types/express": "^4.17.23",
|
||||||
"@types/passport": "^1.0.16",
|
"@types/passport": "^1.0.17",
|
||||||
"@types/passport-strategy": "^0.2.38",
|
"@types/passport-strategy": "^0.2.38",
|
||||||
"passport": "^0.7.0",
|
"passport": "^0.7.0",
|
||||||
"passport-strategy": "^1.0.0"
|
"passport-strategy": "^1.0.0"
|
||||||
@@ -9778,18 +9778,6 @@
|
|||||||
"node": ">= 18"
|
"node": ">= 18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/auth-app/node_modules/@octokit/endpoint": {
|
|
||||||
"version": "10.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
|
|
||||||
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/types": "^13.0.0",
|
|
||||||
"universal-user-agent": "^7.0.2"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 18"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/auth-app/node_modules/@octokit/openapi-types": {
|
"node_modules/@octokit/auth-app/node_modules/@octokit/openapi-types": {
|
||||||
"version": "22.2.0",
|
"version": "22.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
||||||
@@ -9836,11 +9824,6 @@
|
|||||||
"node": "14 || >=16.14"
|
"node": "14 || >=16.14"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/auth-app/node_modules/universal-user-agent": {
|
|
||||||
"version": "7.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
|
|
||||||
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/auth-oauth-app": {
|
"node_modules/@octokit/auth-oauth-app": {
|
||||||
"version": "8.1.1",
|
"version": "8.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/auth-oauth-app/-/auth-oauth-app-8.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/auth-oauth-app/-/auth-oauth-app-8.1.1.tgz",
|
||||||
@@ -9856,18 +9839,6 @@
|
|||||||
"node": ">= 18"
|
"node": ">= 18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/auth-oauth-app/node_modules/@octokit/endpoint": {
|
|
||||||
"version": "10.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
|
|
||||||
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/types": "^13.0.0",
|
|
||||||
"universal-user-agent": "^7.0.2"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 18"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/auth-oauth-app/node_modules/@octokit/openapi-types": {
|
"node_modules/@octokit/auth-oauth-app/node_modules/@octokit/openapi-types": {
|
||||||
"version": "22.2.0",
|
"version": "22.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
||||||
@@ -9906,11 +9877,6 @@
|
|||||||
"@octokit/openapi-types": "^22.2.0"
|
"@octokit/openapi-types": "^22.2.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/auth-oauth-app/node_modules/universal-user-agent": {
|
|
||||||
"version": "7.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
|
|
||||||
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/auth-oauth-device": {
|
"node_modules/@octokit/auth-oauth-device": {
|
||||||
"version": "7.1.1",
|
"version": "7.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/auth-oauth-device/-/auth-oauth-device-7.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/auth-oauth-device/-/auth-oauth-device-7.1.1.tgz",
|
||||||
@@ -9925,18 +9891,6 @@
|
|||||||
"node": ">= 18"
|
"node": ">= 18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/auth-oauth-device/node_modules/@octokit/endpoint": {
|
|
||||||
"version": "10.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
|
|
||||||
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/types": "^13.0.0",
|
|
||||||
"universal-user-agent": "^7.0.2"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 18"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/auth-oauth-device/node_modules/@octokit/openapi-types": {
|
"node_modules/@octokit/auth-oauth-device/node_modules/@octokit/openapi-types": {
|
||||||
"version": "22.2.0",
|
"version": "22.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
||||||
@@ -9975,11 +9929,6 @@
|
|||||||
"@octokit/openapi-types": "^22.2.0"
|
"@octokit/openapi-types": "^22.2.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/auth-oauth-device/node_modules/universal-user-agent": {
|
|
||||||
"version": "7.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
|
|
||||||
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/auth-oauth-user": {
|
"node_modules/@octokit/auth-oauth-user": {
|
||||||
"version": "5.1.1",
|
"version": "5.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/auth-oauth-user/-/auth-oauth-user-5.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/auth-oauth-user/-/auth-oauth-user-5.1.1.tgz",
|
||||||
@@ -9995,18 +9944,6 @@
|
|||||||
"node": ">= 18"
|
"node": ">= 18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/auth-oauth-user/node_modules/@octokit/endpoint": {
|
|
||||||
"version": "10.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
|
|
||||||
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/types": "^13.0.0",
|
|
||||||
"universal-user-agent": "^7.0.2"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 18"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/auth-oauth-user/node_modules/@octokit/openapi-types": {
|
"node_modules/@octokit/auth-oauth-user/node_modules/@octokit/openapi-types": {
|
||||||
"version": "22.2.0",
|
"version": "22.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
||||||
@@ -10045,11 +9982,6 @@
|
|||||||
"@octokit/openapi-types": "^22.2.0"
|
"@octokit/openapi-types": "^22.2.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/auth-oauth-user/node_modules/universal-user-agent": {
|
|
||||||
"version": "7.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
|
|
||||||
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/auth-token": {
|
"node_modules/@octokit/auth-token": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz",
|
||||||
@@ -10103,32 +10035,38 @@
|
|||||||
"@octokit/openapi-types": "^24.2.0"
|
"@octokit/openapi-types": "^24.2.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@octokit/core/node_modules/universal-user-agent": {
|
||||||
|
"version": "6.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
|
||||||
|
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
"node_modules/@octokit/endpoint": {
|
"node_modules/@octokit/endpoint": {
|
||||||
"version": "9.0.6",
|
"version": "10.1.4",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.4.tgz",
|
||||||
"integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
|
"integrity": "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@octokit/types": "^13.1.0",
|
"@octokit/types": "^14.0.0",
|
||||||
"universal-user-agent": "^6.0.0"
|
"universal-user-agent": "^7.0.2"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 18"
|
"node": ">= 18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/endpoint/node_modules/@octokit/openapi-types": {
|
"node_modules/@octokit/endpoint/node_modules/@octokit/openapi-types": {
|
||||||
"version": "24.2.0",
|
"version": "25.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
|
||||||
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
|
"integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/endpoint/node_modules/@octokit/types": {
|
"node_modules/@octokit/endpoint/node_modules/@octokit/types": {
|
||||||
"version": "13.10.0",
|
"version": "14.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
|
||||||
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
|
"integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@octokit/openapi-types": "^24.2.0"
|
"@octokit/openapi-types": "^25.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/graphql": {
|
"node_modules/@octokit/graphql": {
|
||||||
@@ -10160,6 +10098,12 @@
|
|||||||
"@octokit/openapi-types": "^24.2.0"
|
"@octokit/openapi-types": "^24.2.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@octokit/graphql/node_modules/universal-user-agent": {
|
||||||
|
"version": "6.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
|
||||||
|
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
"node_modules/@octokit/oauth-authorization-url": {
|
"node_modules/@octokit/oauth-authorization-url": {
|
||||||
"version": "7.1.1",
|
"version": "7.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-7.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-7.1.1.tgz",
|
||||||
@@ -10182,18 +10126,6 @@
|
|||||||
"node": ">= 18"
|
"node": ">= 18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/oauth-methods/node_modules/@octokit/endpoint": {
|
|
||||||
"version": "10.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
|
|
||||||
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/types": "^13.0.0",
|
|
||||||
"universal-user-agent": "^7.0.2"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 18"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/oauth-methods/node_modules/@octokit/openapi-types": {
|
"node_modules/@octokit/oauth-methods/node_modules/@octokit/openapi-types": {
|
||||||
"version": "22.2.0",
|
"version": "22.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
||||||
@@ -10232,11 +10164,6 @@
|
|||||||
"@octokit/openapi-types": "^22.2.0"
|
"@octokit/openapi-types": "^22.2.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/oauth-methods/node_modules/universal-user-agent": {
|
|
||||||
"version": "7.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
|
|
||||||
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/openapi-types": {
|
"node_modules/@octokit/openapi-types": {
|
||||||
"version": "19.1.0",
|
"version": "19.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-19.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-19.1.0.tgz",
|
||||||
@@ -10377,31 +10304,54 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": {
|
"node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": {
|
||||||
"version": "22.2.0",
|
"version": "24.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
|
||||||
"integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg=="
|
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
|
||||||
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/request-error/node_modules/@octokit/types": {
|
"node_modules/@octokit/request-error/node_modules/@octokit/types": {
|
||||||
"version": "13.6.1",
|
"version": "13.10.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
|
||||||
"integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==",
|
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
|
||||||
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@octokit/openapi-types": "^22.2.0"
|
"@octokit/openapi-types": "^24.2.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@octokit/request/node_modules/@octokit/endpoint": {
|
||||||
|
"version": "9.0.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
|
||||||
|
"integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@octokit/types": "^13.1.0",
|
||||||
|
"universal-user-agent": "^6.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/request/node_modules/@octokit/openapi-types": {
|
"node_modules/@octokit/request/node_modules/@octokit/openapi-types": {
|
||||||
"version": "22.2.0",
|
"version": "24.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
|
||||||
"integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg=="
|
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
|
||||||
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@octokit/request/node_modules/@octokit/types": {
|
"node_modules/@octokit/request/node_modules/@octokit/types": {
|
||||||
"version": "13.6.1",
|
"version": "13.10.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
|
||||||
"integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==",
|
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
|
||||||
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@octokit/openapi-types": "^22.2.0"
|
"@octokit/openapi-types": "^24.2.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@octokit/request/node_modules/universal-user-agent": {
|
||||||
|
"version": "6.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
|
||||||
|
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
"node_modules/@octokit/rest": {
|
"node_modules/@octokit/rest": {
|
||||||
"version": "20.0.2",
|
"version": "20.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-20.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-20.0.2.tgz",
|
||||||
@@ -13351,9 +13301,10 @@
|
|||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@types/express": {
|
"node_modules/@types/express": {
|
||||||
"version": "4.17.21",
|
"version": "4.17.23",
|
||||||
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz",
|
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.23.tgz",
|
||||||
"integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==",
|
"integrity": "sha512-Crp6WY9aTYP3qPi2wGDo9iUe/rceX01UMhnF1jmwDcKCFM6cx7YhGP/Mpr3y9AASpfHixIG0E6azCcL5OcDHsQ==",
|
||||||
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/body-parser": "*",
|
"@types/body-parser": "*",
|
||||||
"@types/express-serve-static-core": "^4.17.33",
|
"@types/express-serve-static-core": "^4.17.33",
|
||||||
@@ -13523,9 +13474,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@types/passport": {
|
"node_modules/@types/passport": {
|
||||||
"version": "1.0.16",
|
"version": "1.0.17",
|
||||||
"resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.16.tgz",
|
"resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.17.tgz",
|
||||||
"integrity": "sha512-FD0qD5hbPWQzaM0wHUnJ/T0BBCJBxCeemtnCwc/ThhTg3x9jfrAcRUmj5Dopza+MfFS9acTe3wk7rcVnRIp/0A==",
|
"integrity": "sha512-aciLyx+wDwT2t2/kJGJR2AEeBz0nJU4WuRX04Wu9Dqc5lSUtwu0WERPHYsLhF9PtseiAMPBGNUOtFjxZ56prsg==",
|
||||||
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/express": "*"
|
"@types/express": "*"
|
||||||
}
|
}
|
||||||
@@ -18287,7 +18239,8 @@
|
|||||||
"node_modules/fast-content-type-parse": {
|
"node_modules/fast-content-type-parse": {
|
||||||
"version": "1.1.0",
|
"version": "1.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-1.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-1.1.0.tgz",
|
||||||
"integrity": "sha512-fBHHqSTFLVnR61C+gltJuE5GkVQMV0S2nqUO8TJ+5Z3qAKG8vAx4FKai1s5jq/inV1+sREynIWSuQ6HgoSXpDQ=="
|
"integrity": "sha512-fBHHqSTFLVnR61C+gltJuE5GkVQMV0S2nqUO8TJ+5Z3qAKG8vAx4FKai1s5jq/inV1+sREynIWSuQ6HgoSXpDQ==",
|
||||||
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/fast-copy": {
|
"node_modules/fast-copy": {
|
||||||
"version": "3.0.1",
|
"version": "3.0.1",
|
||||||
@@ -24775,6 +24728,12 @@
|
|||||||
"jsonwebtoken": "^9.0.2"
|
"jsonwebtoken": "^9.0.2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/octokit-auth-probot/node_modules/universal-user-agent": {
|
||||||
|
"version": "6.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
|
||||||
|
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
"node_modules/odbc": {
|
"node_modules/odbc": {
|
||||||
"version": "2.4.9",
|
"version": "2.4.9",
|
||||||
"resolved": "https://registry.npmjs.org/odbc/-/odbc-2.4.9.tgz",
|
"resolved": "https://registry.npmjs.org/odbc/-/odbc-2.4.9.tgz",
|
||||||
@@ -30704,9 +30663,10 @@
|
|||||||
"integrity": "sha512-G5o6f95b5BggDGuUfKDApKaCgNYy2x7OdHY0zSMF081O0EJobw+1130VONhrA7ezGSV2FNOGyM+KQpQZAr9bIQ=="
|
"integrity": "sha512-G5o6f95b5BggDGuUfKDApKaCgNYy2x7OdHY0zSMF081O0EJobw+1130VONhrA7ezGSV2FNOGyM+KQpQZAr9bIQ=="
|
||||||
},
|
},
|
||||||
"node_modules/universal-user-agent": {
|
"node_modules/universal-user-agent": {
|
||||||
"version": "6.0.1",
|
"version": "7.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.3.tgz",
|
||||||
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="
|
"integrity": "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==",
|
||||||
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
"node_modules/universalify": {
|
"node_modules/universalify": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
@@ -31953,9 +31913,9 @@
|
|||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/xml-crypto": {
|
"node_modules/xml-crypto": {
|
||||||
"version": "6.0.1",
|
"version": "6.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.1.2.tgz",
|
||||||
"integrity": "sha512-v05aU7NS03z4jlZ0iZGRFeZsuKO1UfEbbYiaeRMiATBFs6Jq9+wqKquEMTn4UTrYZ9iGD8yz3KT4L9o2iF682w==",
|
"integrity": "sha512-leBOVQdVi8FvPJrMYoum7Ici9qyxfE4kVi+AkpUoYCSXaQF4IlBm1cneTK9oAxR61LpYxTx7lNcsnBIeRpGW2w==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@xmldom/is-dom-node": "^1.0.1",
|
"@xmldom/is-dom-node": "^1.0.1",
|
||||||
|
@@ -153,11 +153,12 @@
|
|||||||
"@gitbeaker/rest": "^42.5.0",
|
"@gitbeaker/rest": "^42.5.0",
|
||||||
"@google-cloud/kms": "^4.5.0",
|
"@google-cloud/kms": "^4.5.0",
|
||||||
"@infisical/quic": "^1.0.8",
|
"@infisical/quic": "^1.0.8",
|
||||||
"@node-saml/passport-saml": "^5.0.1",
|
"@node-saml/passport-saml": "^5.1.0",
|
||||||
"@octokit/auth-app": "^7.1.1",
|
"@octokit/auth-app": "^7.1.1",
|
||||||
"@octokit/core": "^5.2.1",
|
"@octokit/core": "^5.2.1",
|
||||||
"@octokit/plugin-paginate-graphql": "^4.0.1",
|
"@octokit/plugin-paginate-graphql": "^4.0.1",
|
||||||
"@octokit/plugin-retry": "^5.0.5",
|
"@octokit/plugin-retry": "^5.0.5",
|
||||||
|
"@octokit/request": "8.4.1",
|
||||||
"@octokit/rest": "^20.0.2",
|
"@octokit/rest": "^20.0.2",
|
||||||
"@octokit/webhooks-types": "^7.3.1",
|
"@octokit/webhooks-types": "^7.3.1",
|
||||||
"@octopusdeploy/api-client": "^3.4.1",
|
"@octopusdeploy/api-client": "^3.4.1",
|
||||||
|
4
backend/src/@types/fastify.d.ts
vendored
4
backend/src/@types/fastify.d.ts
vendored
@@ -12,6 +12,8 @@ import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certifi
|
|||||||
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-types";
|
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-types";
|
||||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-types";
|
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-types";
|
||||||
|
import { TEventBusService } from "@app/ee/services/event/event-bus-service";
|
||||||
|
import { TServerSentEventsService } from "@app/ee/services/event/event-sse-service";
|
||||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||||
import { TGithubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service";
|
import { TGithubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service";
|
||||||
@@ -296,6 +298,8 @@ declare module "fastify" {
|
|||||||
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
|
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
|
||||||
pkiTemplate: TPkiTemplatesServiceFactory;
|
pkiTemplate: TPkiTemplatesServiceFactory;
|
||||||
reminder: TReminderServiceFactory;
|
reminder: TReminderServiceFactory;
|
||||||
|
bus: TEventBusService;
|
||||||
|
sse: TServerSentEventsService;
|
||||||
};
|
};
|
||||||
// this is exclusive use for middlewares in which we need to inject data
|
// this is exclusive use for middlewares in which we need to inject data
|
||||||
// everywhere else access using service layer
|
// everywhere else access using service layer
|
||||||
|
@@ -0,0 +1,19 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (!(await knex.schema.hasColumn(TableName.Reminder, "fromDate"))) {
|
||||||
|
await knex.schema.alterTable(TableName.Reminder, (t) => {
|
||||||
|
t.timestamp("fromDate", { useTz: true }).nullable();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasColumn(TableName.Reminder, "fromDate")) {
|
||||||
|
await knex.schema.alterTable(TableName.Reminder, (t) => {
|
||||||
|
t.dropColumn("fromDate");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@@ -14,7 +14,8 @@ export const RemindersSchema = z.object({
|
|||||||
repeatDays: z.number().nullable().optional(),
|
repeatDays: z.number().nullable().optional(),
|
||||||
nextReminderDate: z.date(),
|
nextReminderDate: z.date(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date()
|
updatedAt: z.date(),
|
||||||
|
fromDate: z.date().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TReminders = z.infer<typeof RemindersSchema>;
|
export type TReminders = z.infer<typeof RemindersSchema>;
|
||||||
|
@@ -0,0 +1,16 @@
|
|||||||
|
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
|
||||||
|
import {
|
||||||
|
CreateGitLabDataSourceSchema,
|
||||||
|
GitLabDataSourceSchema,
|
||||||
|
UpdateGitLabDataSourceSchema
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||||
|
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
|
||||||
|
export const registerGitLabSecretScanningRouter = async (server: FastifyZodProvider) =>
|
||||||
|
registerSecretScanningEndpoints({
|
||||||
|
type: SecretScanningDataSource.GitLab,
|
||||||
|
server,
|
||||||
|
responseSchema: GitLabDataSourceSchema,
|
||||||
|
createSchema: CreateGitLabDataSourceSchema,
|
||||||
|
updateSchema: UpdateGitLabDataSourceSchema
|
||||||
|
});
|
@@ -1,3 +1,4 @@
|
|||||||
|
import { registerGitLabSecretScanningRouter } from "@app/ee/routes/v2/secret-scanning-v2-routers/gitlab-secret-scanning-router";
|
||||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
|
||||||
import { registerBitbucketSecretScanningRouter } from "./bitbucket-secret-scanning-router";
|
import { registerBitbucketSecretScanningRouter } from "./bitbucket-secret-scanning-router";
|
||||||
@@ -10,5 +11,6 @@ export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
|
|||||||
(server: FastifyZodProvider) => Promise<void>
|
(server: FastifyZodProvider) => Promise<void>
|
||||||
> = {
|
> = {
|
||||||
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter,
|
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter,
|
||||||
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter
|
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter,
|
||||||
|
[SecretScanningDataSource.GitLab]: registerGitLabSecretScanningRouter
|
||||||
};
|
};
|
||||||
|
@@ -4,6 +4,7 @@ import { SecretScanningConfigsSchema } from "@app/db/schemas";
|
|||||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
import { BitbucketDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
import { BitbucketDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||||
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
|
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||||
|
import { GitLabDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||||
import {
|
import {
|
||||||
SecretScanningFindingStatus,
|
SecretScanningFindingStatus,
|
||||||
SecretScanningScanStatus
|
SecretScanningScanStatus
|
||||||
@@ -24,7 +25,8 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
|||||||
|
|
||||||
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [
|
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [
|
||||||
GitHubDataSourceListItemSchema,
|
GitHubDataSourceListItemSchema,
|
||||||
BitbucketDataSourceListItemSchema
|
BitbucketDataSourceListItemSchema,
|
||||||
|
GitLabDataSourceListItemSchema
|
||||||
]);
|
]);
|
||||||
|
|
||||||
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
|
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
|
||||||
|
@@ -1,8 +1,10 @@
|
|||||||
// weird commonjs-related error in the CI requires us to do the import like this
|
// weird commonjs-related error in the CI requires us to do the import like this
|
||||||
import knex from "knex";
|
import knex from "knex";
|
||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
|
||||||
import { TDbClient } from "@app/db";
|
import { TDbClient } from "@app/db";
|
||||||
import { TableName, TAuditLogs } from "@app/db/schemas";
|
import { TableName, TAuditLogs } from "@app/db/schemas";
|
||||||
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { DatabaseError, GatewayTimeoutError } from "@app/lib/errors";
|
import { DatabaseError, GatewayTimeoutError } from "@app/lib/errors";
|
||||||
import { ormify, selectAllTableCols, TOrmify } from "@app/lib/knex";
|
import { ormify, selectAllTableCols, TOrmify } from "@app/lib/knex";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
@@ -150,43 +152,70 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
|||||||
|
|
||||||
// delete all audit log that have expired
|
// delete all audit log that have expired
|
||||||
const pruneAuditLog: TAuditLogDALFactory["pruneAuditLog"] = async (tx) => {
|
const pruneAuditLog: TAuditLogDALFactory["pruneAuditLog"] = async (tx) => {
|
||||||
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
|
const runPrune = async (dbClient: knex.Knex) => {
|
||||||
const MAX_RETRY_ON_FAILURE = 3;
|
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
|
||||||
|
const MAX_RETRY_ON_FAILURE = 3;
|
||||||
|
|
||||||
const today = new Date();
|
const today = new Date();
|
||||||
let deletedAuditLogIds: { id: string }[] = [];
|
let deletedAuditLogIds: { id: string }[] = [];
|
||||||
let numberOfRetryOnFailure = 0;
|
let numberOfRetryOnFailure = 0;
|
||||||
let isRetrying = false;
|
let isRetrying = false;
|
||||||
|
|
||||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
|
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
|
||||||
do {
|
do {
|
||||||
try {
|
try {
|
||||||
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
|
const findExpiredLogSubQuery = dbClient(TableName.AuditLog)
|
||||||
.where("expiresAt", "<", today)
|
.where("expiresAt", "<", today)
|
||||||
.where("createdAt", "<", today) // to use audit log partition
|
.where("createdAt", "<", today) // to use audit log partition
|
||||||
.orderBy(`${TableName.AuditLog}.createdAt`, "desc")
|
.orderBy(`${TableName.AuditLog}.createdAt`, "desc")
|
||||||
.select("id")
|
.select("id")
|
||||||
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
|
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
|
||||||
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
// eslint-disable-next-line no-await-in-loop
|
||||||
deletedAuditLogIds = await (tx || db)(TableName.AuditLog)
|
deletedAuditLogIds = await dbClient(TableName.AuditLog)
|
||||||
.whereIn("id", findExpiredLogSubQuery)
|
.whereIn("id", findExpiredLogSubQuery)
|
||||||
.del()
|
.del()
|
||||||
.returning("id");
|
.returning("id");
|
||||||
numberOfRetryOnFailure = 0; // reset
|
numberOfRetryOnFailure = 0; // reset
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
numberOfRetryOnFailure += 1;
|
numberOfRetryOnFailure += 1;
|
||||||
logger.error(error, "Failed to delete audit log on pruning");
|
logger.error(error, "Failed to delete audit log on pruning");
|
||||||
} finally {
|
} finally {
|
||||||
// eslint-disable-next-line no-await-in-loop
|
// eslint-disable-next-line no-await-in-loop
|
||||||
await new Promise((resolve) => {
|
await new Promise((resolve) => {
|
||||||
setTimeout(resolve, 10); // time to breathe for db
|
setTimeout(resolve, 10); // time to breathe for db
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
isRetrying = numberOfRetryOnFailure > 0;
|
isRetrying = numberOfRetryOnFailure > 0;
|
||||||
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
|
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
|
||||||
|
};
|
||||||
|
|
||||||
|
if (tx) {
|
||||||
|
await runPrune(tx);
|
||||||
|
} else {
|
||||||
|
const QUERY_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
|
||||||
|
await db.transaction(async (trx) => {
|
||||||
|
await trx.raw(`SET statement_timeout = ${QUERY_TIMEOUT_MS}`);
|
||||||
|
await runPrune(trx);
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return { ...auditLogOrm, pruneAuditLog, find };
|
const create: TAuditLogDALFactory["create"] = async (tx) => {
|
||||||
|
const config = getConfig();
|
||||||
|
|
||||||
|
if (config.DISABLE_AUDIT_LOG_STORAGE) {
|
||||||
|
return {
|
||||||
|
...tx,
|
||||||
|
id: uuidv4(),
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return auditLogOrm.create(tx);
|
||||||
|
};
|
||||||
|
|
||||||
|
return { ...auditLogOrm, create, pruneAuditLog, find };
|
||||||
};
|
};
|
||||||
|
@@ -1,7 +1,8 @@
|
|||||||
import { AxiosError, RawAxiosRequestHeaders } from "axios";
|
import { AxiosError, RawAxiosRequestHeaders } from "axios";
|
||||||
|
|
||||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { TEventBusService } from "@app/ee/services/event/event-bus-service";
|
||||||
|
import { TopicName, toPublishableEvent } from "@app/ee/services/event/types";
|
||||||
import { request } from "@app/lib/config/request";
|
import { request } from "@app/lib/config/request";
|
||||||
import { crypto } from "@app/lib/crypto/cryptography";
|
import { crypto } from "@app/lib/crypto/cryptography";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
@@ -21,6 +22,7 @@ type TAuditLogQueueServiceFactoryDep = {
|
|||||||
queueService: TQueueServiceFactory;
|
queueService: TQueueServiceFactory;
|
||||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
|
eventBusService: TEventBusService;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TAuditLogQueueServiceFactory = {
|
export type TAuditLogQueueServiceFactory = {
|
||||||
@@ -36,133 +38,17 @@ export const auditLogQueueServiceFactory = async ({
|
|||||||
queueService,
|
queueService,
|
||||||
projectDAL,
|
projectDAL,
|
||||||
licenseService,
|
licenseService,
|
||||||
auditLogStreamDAL
|
auditLogStreamDAL,
|
||||||
|
eventBusService
|
||||||
}: TAuditLogQueueServiceFactoryDep): Promise<TAuditLogQueueServiceFactory> => {
|
}: TAuditLogQueueServiceFactoryDep): Promise<TAuditLogQueueServiceFactory> => {
|
||||||
const appCfg = getConfig();
|
|
||||||
|
|
||||||
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
||||||
if (appCfg.USE_PG_QUEUE && appCfg.SHOULD_INIT_PG_QUEUE) {
|
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||||
await queueService.queuePg<QueueName.AuditLog>(QueueJobs.AuditLog, data, {
|
removeOnFail: {
|
||||||
retryLimit: 10,
|
count: 3
|
||||||
retryBackoff: true
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
|
||||||
removeOnFail: {
|
|
||||||
count: 3
|
|
||||||
},
|
|
||||||
removeOnComplete: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (appCfg.SHOULD_INIT_PG_QUEUE) {
|
|
||||||
await queueService.startPg<QueueName.AuditLog>(
|
|
||||||
QueueJobs.AuditLog,
|
|
||||||
async ([job]) => {
|
|
||||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
|
||||||
let { orgId } = job.data;
|
|
||||||
const MS_IN_DAY = 24 * 60 * 60 * 1000;
|
|
||||||
let project;
|
|
||||||
|
|
||||||
if (!orgId) {
|
|
||||||
// it will never be undefined for both org and project id
|
|
||||||
// TODO(akhilmhdh): use caching here in dal to avoid db calls
|
|
||||||
project = await projectDAL.findById(projectId as string);
|
|
||||||
orgId = project.orgId;
|
|
||||||
}
|
|
||||||
|
|
||||||
const plan = await licenseService.getPlan(orgId);
|
|
||||||
if (plan.auditLogsRetentionDays === 0) {
|
|
||||||
// skip inserting if audit log retention is 0 meaning its not supported
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// For project actions, set TTL to project-level audit log retention config
|
|
||||||
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
|
||||||
const ttlInDays =
|
|
||||||
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
|
||||||
? project.auditLogsRetentionDays
|
|
||||||
: plan.auditLogsRetentionDays;
|
|
||||||
|
|
||||||
const ttl = ttlInDays * MS_IN_DAY;
|
|
||||||
|
|
||||||
const auditLog = await auditLogDAL.create({
|
|
||||||
actor: actor.type,
|
|
||||||
actorMetadata: actor.metadata,
|
|
||||||
userAgent,
|
|
||||||
projectId,
|
|
||||||
projectName: project?.name,
|
|
||||||
ipAddress,
|
|
||||||
orgId,
|
|
||||||
eventType: event.type,
|
|
||||||
expiresAt: new Date(Date.now() + ttl),
|
|
||||||
eventMetadata: event.metadata,
|
|
||||||
userAgentType
|
|
||||||
});
|
|
||||||
|
|
||||||
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
|
||||||
await Promise.allSettled(
|
|
||||||
logStreams.map(
|
|
||||||
async ({
|
|
||||||
url,
|
|
||||||
encryptedHeadersTag,
|
|
||||||
encryptedHeadersIV,
|
|
||||||
encryptedHeadersKeyEncoding,
|
|
||||||
encryptedHeadersCiphertext
|
|
||||||
}) => {
|
|
||||||
const streamHeaders =
|
|
||||||
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
|
||||||
? (JSON.parse(
|
|
||||||
crypto
|
|
||||||
.encryption()
|
|
||||||
.symmetric()
|
|
||||||
.decryptWithRootEncryptionKey({
|
|
||||||
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
|
||||||
iv: encryptedHeadersIV,
|
|
||||||
tag: encryptedHeadersTag,
|
|
||||||
ciphertext: encryptedHeadersCiphertext
|
|
||||||
})
|
|
||||||
) as LogStreamHeaders[])
|
|
||||||
: [];
|
|
||||||
|
|
||||||
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
|
||||||
|
|
||||||
if (streamHeaders.length)
|
|
||||||
streamHeaders.forEach(({ key, value }) => {
|
|
||||||
headers[key] = value;
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await request.post(
|
|
||||||
url,
|
|
||||||
{ ...providerSpecificPayload(url), ...auditLog },
|
|
||||||
{
|
|
||||||
headers,
|
|
||||||
// request timeout
|
|
||||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
|
||||||
// connection timeout
|
|
||||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
|
||||||
}
|
|
||||||
);
|
|
||||||
return response;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(
|
|
||||||
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
|
|
||||||
);
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
);
|
|
||||||
},
|
},
|
||||||
{
|
removeOnComplete: true
|
||||||
batchSize: 1,
|
});
|
||||||
workerCount: 30,
|
};
|
||||||
pollingIntervalSeconds: 0.5
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
queueService.start(QueueName.AuditLog, async (job) => {
|
queueService.start(QueueName.AuditLog, async (job) => {
|
||||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||||
@@ -178,88 +64,97 @@ export const auditLogQueueServiceFactory = async ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const plan = await licenseService.getPlan(orgId);
|
const plan = await licenseService.getPlan(orgId);
|
||||||
if (plan.auditLogsRetentionDays === 0) {
|
|
||||||
// skip inserting if audit log retention is 0 meaning its not supported
|
// skip inserting if audit log retention is 0 meaning its not supported
|
||||||
return;
|
if (plan.auditLogsRetentionDays !== 0) {
|
||||||
|
// For project actions, set TTL to project-level audit log retention config
|
||||||
|
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
||||||
|
const ttlInDays =
|
||||||
|
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
||||||
|
? project.auditLogsRetentionDays
|
||||||
|
: plan.auditLogsRetentionDays;
|
||||||
|
|
||||||
|
const ttl = ttlInDays * MS_IN_DAY;
|
||||||
|
|
||||||
|
const auditLog = await auditLogDAL.create({
|
||||||
|
actor: actor.type,
|
||||||
|
actorMetadata: actor.metadata,
|
||||||
|
userAgent,
|
||||||
|
projectId,
|
||||||
|
projectName: project?.name,
|
||||||
|
ipAddress,
|
||||||
|
orgId,
|
||||||
|
eventType: event.type,
|
||||||
|
expiresAt: new Date(Date.now() + ttl),
|
||||||
|
eventMetadata: event.metadata,
|
||||||
|
userAgentType
|
||||||
|
});
|
||||||
|
|
||||||
|
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
||||||
|
await Promise.allSettled(
|
||||||
|
logStreams.map(
|
||||||
|
async ({
|
||||||
|
url,
|
||||||
|
encryptedHeadersTag,
|
||||||
|
encryptedHeadersIV,
|
||||||
|
encryptedHeadersKeyEncoding,
|
||||||
|
encryptedHeadersCiphertext
|
||||||
|
}) => {
|
||||||
|
const streamHeaders =
|
||||||
|
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
||||||
|
? (JSON.parse(
|
||||||
|
crypto
|
||||||
|
.encryption()
|
||||||
|
.symmetric()
|
||||||
|
.decryptWithRootEncryptionKey({
|
||||||
|
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
||||||
|
iv: encryptedHeadersIV,
|
||||||
|
tag: encryptedHeadersTag,
|
||||||
|
ciphertext: encryptedHeadersCiphertext
|
||||||
|
})
|
||||||
|
) as LogStreamHeaders[])
|
||||||
|
: [];
|
||||||
|
|
||||||
|
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||||
|
|
||||||
|
if (streamHeaders.length)
|
||||||
|
streamHeaders.forEach(({ key, value }) => {
|
||||||
|
headers[key] = value;
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await request.post(
|
||||||
|
url,
|
||||||
|
{ ...providerSpecificPayload(url), ...auditLog },
|
||||||
|
{
|
||||||
|
headers,
|
||||||
|
// request timeout
|
||||||
|
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||||
|
// connection timeout
|
||||||
|
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||||
|
}
|
||||||
|
);
|
||||||
|
return response;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
|
||||||
|
);
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// For project actions, set TTL to project-level audit log retention config
|
const publishable = toPublishableEvent(event);
|
||||||
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
|
||||||
const ttlInDays =
|
|
||||||
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
|
||||||
? project.auditLogsRetentionDays
|
|
||||||
: plan.auditLogsRetentionDays;
|
|
||||||
|
|
||||||
const ttl = ttlInDays * MS_IN_DAY;
|
if (publishable) {
|
||||||
|
await eventBusService.publish(TopicName.CoreServers, {
|
||||||
const auditLog = await auditLogDAL.create({
|
type: ProjectType.SecretManager,
|
||||||
actor: actor.type,
|
source: "infiscal",
|
||||||
actorMetadata: actor.metadata,
|
data: publishable.data
|
||||||
userAgent,
|
});
|
||||||
projectId,
|
}
|
||||||
projectName: project?.name,
|
|
||||||
ipAddress,
|
|
||||||
orgId,
|
|
||||||
eventType: event.type,
|
|
||||||
expiresAt: new Date(Date.now() + ttl),
|
|
||||||
eventMetadata: event.metadata,
|
|
||||||
userAgentType
|
|
||||||
});
|
|
||||||
|
|
||||||
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
|
||||||
await Promise.allSettled(
|
|
||||||
logStreams.map(
|
|
||||||
async ({
|
|
||||||
url,
|
|
||||||
encryptedHeadersTag,
|
|
||||||
encryptedHeadersIV,
|
|
||||||
encryptedHeadersKeyEncoding,
|
|
||||||
encryptedHeadersCiphertext
|
|
||||||
}) => {
|
|
||||||
const streamHeaders =
|
|
||||||
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
|
||||||
? (JSON.parse(
|
|
||||||
crypto
|
|
||||||
.encryption()
|
|
||||||
.symmetric()
|
|
||||||
.decryptWithRootEncryptionKey({
|
|
||||||
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
|
||||||
iv: encryptedHeadersIV,
|
|
||||||
tag: encryptedHeadersTag,
|
|
||||||
ciphertext: encryptedHeadersCiphertext
|
|
||||||
})
|
|
||||||
) as LogStreamHeaders[])
|
|
||||||
: [];
|
|
||||||
|
|
||||||
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
|
||||||
|
|
||||||
if (streamHeaders.length)
|
|
||||||
streamHeaders.forEach(({ key, value }) => {
|
|
||||||
headers[key] = value;
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await request.post(
|
|
||||||
url,
|
|
||||||
{ ...providerSpecificPayload(url), ...auditLog },
|
|
||||||
{
|
|
||||||
headers,
|
|
||||||
// request timeout
|
|
||||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
|
||||||
// connection timeout
|
|
||||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
|
||||||
}
|
|
||||||
);
|
|
||||||
return response;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(
|
|
||||||
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
|
|
||||||
);
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
83
backend/src/ee/services/event/event-bus-service.ts
Normal file
83
backend/src/ee/services/event/event-bus-service.ts
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
import Redis from "ioredis";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { logger } from "@app/lib/logger";
|
||||||
|
|
||||||
|
import { EventSchema, TopicName } from "./types";
|
||||||
|
|
||||||
|
export const eventBusFactory = (redis: Redis) => {
|
||||||
|
const publisher = redis.duplicate();
|
||||||
|
// Duplicate the publisher to create a subscriber.
|
||||||
|
// This is necessary because Redis does not allow a single connection to both publish and subscribe.
|
||||||
|
const subscriber = publisher.duplicate();
|
||||||
|
|
||||||
|
const init = async (topics: TopicName[] = Object.values(TopicName)) => {
|
||||||
|
subscriber.on("error", (e) => {
|
||||||
|
logger.error(e, "Event Bus subscriber error");
|
||||||
|
});
|
||||||
|
|
||||||
|
publisher.on("error", (e) => {
|
||||||
|
logger.error(e, "Event Bus publisher error");
|
||||||
|
});
|
||||||
|
|
||||||
|
await subscriber.subscribe(...topics);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Publishes an event to the specified topic.
|
||||||
|
* @param topic - The topic to publish the event to.
|
||||||
|
* @param event - The event data to publish.
|
||||||
|
*/
|
||||||
|
const publish = async <T extends z.input<typeof EventSchema>>(topic: TopicName, event: T) => {
|
||||||
|
const json = JSON.stringify(event);
|
||||||
|
|
||||||
|
return publisher.publish(topic, json, (err) => {
|
||||||
|
if (err) {
|
||||||
|
return logger.error(err, `Error publishing to channel ${topic}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param fn - The function to call when a message is received.
|
||||||
|
* It should accept the parsed event data as an argument.
|
||||||
|
* @template T - The type of the event data, which should match the schema defined in EventSchema.
|
||||||
|
* @returns A function that can be called to unsubscribe from the event bus.
|
||||||
|
*/
|
||||||
|
const subscribe = <T extends z.infer<typeof EventSchema>>(fn: (data: T) => Promise<void> | void) => {
|
||||||
|
// Not using async await cause redis client's `on` method does not expect async listeners.
|
||||||
|
const listener = (channel: string, message: string) => {
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(message) as T;
|
||||||
|
const thenable = fn(parsed);
|
||||||
|
|
||||||
|
// If the function returns a Promise, catch any errors that occur during processing.
|
||||||
|
if (thenable instanceof Promise) {
|
||||||
|
thenable.catch((error) => {
|
||||||
|
logger.error(error, `Error processing message from channel ${channel}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, `Error parsing message data from channel ${channel}`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
subscriber.on("message", listener);
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
subscriber.off("message", listener);
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const close = async () => {
|
||||||
|
try {
|
||||||
|
await publisher.quit();
|
||||||
|
await subscriber.quit();
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "Error closing event bus connections");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return { init, publish, subscribe, close };
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TEventBusService = ReturnType<typeof eventBusFactory>;
|
164
backend/src/ee/services/event/event-sse-service.ts
Normal file
164
backend/src/ee/services/event/event-sse-service.ts
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
/* eslint-disable no-continue */
|
||||||
|
import { subject } from "@casl/ability";
|
||||||
|
import Redis from "ioredis";
|
||||||
|
|
||||||
|
import { KeyStorePrefixes } from "@app/keystore/keystore";
|
||||||
|
import { logger } from "@app/lib/logger";
|
||||||
|
|
||||||
|
import { TEventBusService } from "./event-bus-service";
|
||||||
|
import { createEventStreamClient, EventStreamClient, IEventStreamClientOpts } from "./event-sse-stream";
|
||||||
|
import { EventData, RegisteredEvent, toBusEventName } from "./types";
|
||||||
|
|
||||||
|
const AUTH_REFRESH_INTERVAL = 60 * 1000;
|
||||||
|
const HEART_BEAT_INTERVAL = 15 * 1000;
|
||||||
|
|
||||||
|
export const sseServiceFactory = (bus: TEventBusService, redis: Redis) => {
|
||||||
|
let heartbeatInterval: NodeJS.Timeout | null = null;
|
||||||
|
|
||||||
|
const clients = new Set<EventStreamClient>();
|
||||||
|
|
||||||
|
heartbeatInterval = setInterval(() => {
|
||||||
|
for (const client of clients) {
|
||||||
|
if (client.stream.closed) continue;
|
||||||
|
void client.ping();
|
||||||
|
}
|
||||||
|
}, HEART_BEAT_INTERVAL);
|
||||||
|
|
||||||
|
const refreshInterval = setInterval(() => {
|
||||||
|
for (const client of clients) {
|
||||||
|
if (client.stream.closed) continue;
|
||||||
|
void client.refresh();
|
||||||
|
}
|
||||||
|
}, AUTH_REFRESH_INTERVAL);
|
||||||
|
|
||||||
|
const removeActiveConnection = async (projectId: string, identityId: string, connectionId: string) => {
|
||||||
|
const set = KeyStorePrefixes.ActiveSSEConnectionsSet(projectId, identityId);
|
||||||
|
const key = KeyStorePrefixes.ActiveSSEConnections(projectId, identityId, connectionId);
|
||||||
|
|
||||||
|
await Promise.all([redis.lrem(set, 0, connectionId), redis.del(key)]);
|
||||||
|
};
|
||||||
|
|
||||||
|
const getActiveConnectionsCount = async (projectId: string, identityId: string) => {
|
||||||
|
const set = KeyStorePrefixes.ActiveSSEConnectionsSet(projectId, identityId);
|
||||||
|
const connections = await redis.lrange(set, 0, -1);
|
||||||
|
|
||||||
|
if (connections.length === 0) {
|
||||||
|
return 0; // No active connections
|
||||||
|
}
|
||||||
|
|
||||||
|
const keys = connections.map((c) => KeyStorePrefixes.ActiveSSEConnections(projectId, identityId, c));
|
||||||
|
|
||||||
|
const values = await redis.mget(...keys);
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-plusplus
|
||||||
|
for (let i = 0; i < values.length; i++) {
|
||||||
|
if (values[i] === null) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await removeActiveConnection(projectId, identityId, connections[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return redis.llen(set);
|
||||||
|
};
|
||||||
|
|
||||||
|
const onDisconnect = async (client: EventStreamClient) => {
|
||||||
|
try {
|
||||||
|
client.close();
|
||||||
|
clients.delete(client);
|
||||||
|
await removeActiveConnection(client.auth.projectId, client.auth.actorId, client.id);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "Error during SSE stream disconnection");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
function filterEventsForClient(client: EventStreamClient, event: EventData, registered: RegisteredEvent[]) {
|
||||||
|
const eventType = toBusEventName(event.data.eventType);
|
||||||
|
const match = registered.find((r) => r.event === eventType);
|
||||||
|
if (!match) return;
|
||||||
|
|
||||||
|
const item = event.data.payload;
|
||||||
|
|
||||||
|
if (Array.isArray(item)) {
|
||||||
|
if (item.length === 0) return;
|
||||||
|
|
||||||
|
const baseSubject = {
|
||||||
|
eventType,
|
||||||
|
environment: undefined as string | undefined,
|
||||||
|
secretPath: undefined as string | undefined
|
||||||
|
};
|
||||||
|
|
||||||
|
const filtered = item.filter((ev) => {
|
||||||
|
baseSubject.secretPath = ev.secretPath ?? "/";
|
||||||
|
baseSubject.environment = ev.environment;
|
||||||
|
|
||||||
|
return client.matcher.can("subscribe", subject(event.type, baseSubject));
|
||||||
|
});
|
||||||
|
|
||||||
|
if (filtered.length === 0) return;
|
||||||
|
|
||||||
|
return client.send({
|
||||||
|
...event,
|
||||||
|
data: {
|
||||||
|
...event.data,
|
||||||
|
payload: filtered
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// For single item
|
||||||
|
const baseSubject = {
|
||||||
|
eventType,
|
||||||
|
secretPath: item.secretPath ?? "/",
|
||||||
|
environment: item.environment
|
||||||
|
};
|
||||||
|
|
||||||
|
if (client.matcher.can("subscribe", subject(event.type, baseSubject))) {
|
||||||
|
client.send(event);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const subscribe = async (
|
||||||
|
opts: IEventStreamClientOpts & {
|
||||||
|
onClose?: () => void;
|
||||||
|
}
|
||||||
|
) => {
|
||||||
|
const client = createEventStreamClient(redis, opts);
|
||||||
|
|
||||||
|
// Set up event listener on event bus
|
||||||
|
const unsubscribe = bus.subscribe((event) => {
|
||||||
|
if (event.type !== opts.type) return;
|
||||||
|
filterEventsForClient(client, event, opts.registered);
|
||||||
|
});
|
||||||
|
|
||||||
|
client.stream.on("close", () => {
|
||||||
|
unsubscribe();
|
||||||
|
void onDisconnect(client); // This will never throw
|
||||||
|
});
|
||||||
|
|
||||||
|
await client.open();
|
||||||
|
|
||||||
|
clients.add(client);
|
||||||
|
|
||||||
|
return client;
|
||||||
|
};
|
||||||
|
|
||||||
|
const close = () => {
|
||||||
|
if (heartbeatInterval) {
|
||||||
|
clearInterval(heartbeatInterval);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (refreshInterval) {
|
||||||
|
clearInterval(refreshInterval);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const client of clients) {
|
||||||
|
client.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
clients.clear();
|
||||||
|
};
|
||||||
|
|
||||||
|
return { subscribe, close, getActiveConnectionsCount };
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TServerSentEventsService = ReturnType<typeof sseServiceFactory>;
|
178
backend/src/ee/services/event/event-sse-stream.ts
Normal file
178
backend/src/ee/services/event/event-sse-stream.ts
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
/* eslint-disable no-underscore-dangle */
|
||||||
|
import { Readable } from "node:stream";
|
||||||
|
|
||||||
|
import { MongoAbility, PureAbility } from "@casl/ability";
|
||||||
|
import { MongoQuery } from "@ucast/mongo2js";
|
||||||
|
import Redis from "ioredis";
|
||||||
|
import { nanoid } from "nanoid";
|
||||||
|
|
||||||
|
import { ProjectType } from "@app/db/schemas";
|
||||||
|
import { ProjectPermissionSet } from "@app/ee/services/permission/project-permission";
|
||||||
|
import { KeyStorePrefixes } from "@app/keystore/keystore";
|
||||||
|
import { conditionsMatcher } from "@app/lib/casl";
|
||||||
|
import { logger } from "@app/lib/logger";
|
||||||
|
|
||||||
|
import { EventData, RegisteredEvent } from "./types";
|
||||||
|
|
||||||
|
export const getServerSentEventsHeaders = () =>
|
||||||
|
({
|
||||||
|
"Cache-Control": "no-cache",
|
||||||
|
"Content-Type": "text/event-stream",
|
||||||
|
Connection: "keep-alive",
|
||||||
|
"X-Accel-Buffering": "no"
|
||||||
|
}) as const;
|
||||||
|
|
||||||
|
type TAuthInfo = {
|
||||||
|
actorId: string;
|
||||||
|
projectId: string;
|
||||||
|
permission: MongoAbility<ProjectPermissionSet, MongoQuery>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface IEventStreamClientOpts {
|
||||||
|
type: ProjectType;
|
||||||
|
registered: RegisteredEvent[];
|
||||||
|
onAuthRefresh: (info: TAuthInfo) => Promise<void> | void;
|
||||||
|
getAuthInfo: () => Promise<TAuthInfo> | TAuthInfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface EventMessage {
|
||||||
|
time?: string | number;
|
||||||
|
type: string;
|
||||||
|
data?: unknown;
|
||||||
|
}
|
||||||
|
|
||||||
|
function serializeSseEvent(chunk: EventMessage): string {
|
||||||
|
let payload = "";
|
||||||
|
|
||||||
|
if (chunk.time) payload += `id: ${chunk.time}\n`;
|
||||||
|
if (chunk.type) payload += `event: ${chunk.type}\n`;
|
||||||
|
if (chunk.data) payload += `data: ${JSON.stringify(chunk)}\n`;
|
||||||
|
|
||||||
|
return `${payload}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type EventStreamClient = {
|
||||||
|
id: string;
|
||||||
|
stream: Readable;
|
||||||
|
open: () => Promise<void>;
|
||||||
|
send: (data: EventMessage | EventData) => void;
|
||||||
|
ping: () => Promise<void>;
|
||||||
|
refresh: () => Promise<void>;
|
||||||
|
close: () => void;
|
||||||
|
get auth(): TAuthInfo;
|
||||||
|
signal: AbortSignal;
|
||||||
|
abort: () => void;
|
||||||
|
matcher: PureAbility;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function createEventStreamClient(redis: Redis, options: IEventStreamClientOpts): EventStreamClient {
|
||||||
|
const rules = options.registered.map((r) => ({
|
||||||
|
subject: options.type,
|
||||||
|
action: "subscribe",
|
||||||
|
conditions: {
|
||||||
|
eventType: r.event,
|
||||||
|
secretPath: r.conditions?.secretPath ?? "/",
|
||||||
|
environment: r.conditions?.environmentSlug
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
const id = `sse-${nanoid()}`;
|
||||||
|
const control = new AbortController();
|
||||||
|
const matcher = new PureAbility(rules, { conditionsMatcher });
|
||||||
|
|
||||||
|
let auth: TAuthInfo | undefined;
|
||||||
|
|
||||||
|
const stream = new Readable({
|
||||||
|
objectMode: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// We will manually push data to the stream
|
||||||
|
stream._read = () => {};
|
||||||
|
|
||||||
|
const send = (data: EventMessage | EventData) => {
|
||||||
|
const chunk = serializeSseEvent(data);
|
||||||
|
if (!stream.push(chunk)) {
|
||||||
|
logger.debug("Backpressure detected: dropped manual event");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
stream.on("error", (error: Error) => stream.destroy(error));
|
||||||
|
|
||||||
|
const open = async () => {
|
||||||
|
auth = await options.getAuthInfo();
|
||||||
|
await options.onAuthRefresh(auth);
|
||||||
|
|
||||||
|
const { actorId, projectId } = auth;
|
||||||
|
const set = KeyStorePrefixes.ActiveSSEConnectionsSet(projectId, actorId);
|
||||||
|
const key = KeyStorePrefixes.ActiveSSEConnections(projectId, actorId, id);
|
||||||
|
|
||||||
|
await Promise.all([redis.rpush(set, id), redis.set(key, "1", "EX", 60)]);
|
||||||
|
};
|
||||||
|
|
||||||
|
const ping = async () => {
|
||||||
|
if (!auth) return; // Avoid race condition if ping is called before open
|
||||||
|
|
||||||
|
const { actorId, projectId } = auth;
|
||||||
|
const key = KeyStorePrefixes.ActiveSSEConnections(projectId, actorId, id);
|
||||||
|
|
||||||
|
await redis.set(key, "1", "EX", 60);
|
||||||
|
|
||||||
|
stream.push("1");
|
||||||
|
};
|
||||||
|
|
||||||
|
const close = () => {
|
||||||
|
if (stream.closed) return;
|
||||||
|
stream.push(null);
|
||||||
|
stream.destroy();
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refreshes the connection's auth permissions
|
||||||
|
* Must be called atleast once when connection is opened
|
||||||
|
*/
|
||||||
|
const refresh = async () => {
|
||||||
|
try {
|
||||||
|
auth = await options.getAuthInfo();
|
||||||
|
await options.onAuthRefresh(auth);
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
send({
|
||||||
|
type: "error",
|
||||||
|
data: {
|
||||||
|
...error
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return close();
|
||||||
|
}
|
||||||
|
stream.emit("error", error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const abort = () => {
|
||||||
|
try {
|
||||||
|
control.abort();
|
||||||
|
} catch (error) {
|
||||||
|
logger.debug(error, "Error aborting SSE stream");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
id,
|
||||||
|
stream,
|
||||||
|
open,
|
||||||
|
send,
|
||||||
|
ping,
|
||||||
|
refresh,
|
||||||
|
close,
|
||||||
|
signal: control.signal,
|
||||||
|
abort,
|
||||||
|
matcher,
|
||||||
|
get auth() {
|
||||||
|
if (!auth) {
|
||||||
|
throw new Error("Auth info not set");
|
||||||
|
}
|
||||||
|
|
||||||
|
return auth;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
125
backend/src/ee/services/event/types.ts
Normal file
125
backend/src/ee/services/event/types.ts
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { ProjectType } from "@app/db/schemas";
|
||||||
|
import { Event, EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
|
|
||||||
|
export enum TopicName {
|
||||||
|
CoreServers = "infisical::core-servers"
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum BusEventName {
|
||||||
|
CreateSecret = "secret:create",
|
||||||
|
UpdateSecret = "secret:update",
|
||||||
|
DeleteSecret = "secret:delete"
|
||||||
|
}
|
||||||
|
|
||||||
|
type PublisableEventTypes =
|
||||||
|
| EventType.CREATE_SECRET
|
||||||
|
| EventType.CREATE_SECRETS
|
||||||
|
| EventType.DELETE_SECRET
|
||||||
|
| EventType.DELETE_SECRETS
|
||||||
|
| EventType.UPDATE_SECRETS
|
||||||
|
| EventType.UPDATE_SECRET;
|
||||||
|
|
||||||
|
export function toBusEventName(input: EventType) {
|
||||||
|
switch (input) {
|
||||||
|
case EventType.CREATE_SECRET:
|
||||||
|
case EventType.CREATE_SECRETS:
|
||||||
|
return BusEventName.CreateSecret;
|
||||||
|
case EventType.UPDATE_SECRET:
|
||||||
|
case EventType.UPDATE_SECRETS:
|
||||||
|
return BusEventName.UpdateSecret;
|
||||||
|
case EventType.DELETE_SECRET:
|
||||||
|
case EventType.DELETE_SECRETS:
|
||||||
|
return BusEventName.DeleteSecret;
|
||||||
|
default:
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const isBulkEvent = (event: Event): event is Extract<Event, { metadata: { secrets: Array<unknown> } }> => {
|
||||||
|
return event.type.endsWith("-secrets"); // Feels so wrong
|
||||||
|
};
|
||||||
|
|
||||||
|
export const toPublishableEvent = (event: Event) => {
|
||||||
|
const name = toBusEventName(event.type);
|
||||||
|
|
||||||
|
if (!name) return null;
|
||||||
|
|
||||||
|
const e = event as Extract<Event, { type: PublisableEventTypes }>;
|
||||||
|
|
||||||
|
if (isBulkEvent(e)) {
|
||||||
|
return {
|
||||||
|
name,
|
||||||
|
isBulk: true,
|
||||||
|
data: {
|
||||||
|
eventType: e.type,
|
||||||
|
payload: e.metadata.secrets.map((s) => ({
|
||||||
|
environment: e.metadata.environment,
|
||||||
|
secretPath: e.metadata.secretPath,
|
||||||
|
...s
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
} as const;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
name,
|
||||||
|
isBulk: false,
|
||||||
|
data: {
|
||||||
|
eventType: e.type,
|
||||||
|
payload: {
|
||||||
|
...e.metadata,
|
||||||
|
environment: e.metadata.environment
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} as const;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const EventName = z.nativeEnum(BusEventName);
|
||||||
|
|
||||||
|
const EventSecretPayload = z.object({
|
||||||
|
secretPath: z.string().optional(),
|
||||||
|
secretId: z.string(),
|
||||||
|
secretKey: z.string(),
|
||||||
|
environment: z.string()
|
||||||
|
});
|
||||||
|
|
||||||
|
export type EventSecret = z.infer<typeof EventSecretPayload>;
|
||||||
|
|
||||||
|
export const EventSchema = z.object({
|
||||||
|
datacontenttype: z.literal("application/json").optional().default("application/json"),
|
||||||
|
type: z.nativeEnum(ProjectType),
|
||||||
|
source: z.string(),
|
||||||
|
time: z
|
||||||
|
.string()
|
||||||
|
.optional()
|
||||||
|
.default(() => new Date().toISOString()),
|
||||||
|
data: z.discriminatedUnion("eventType", [
|
||||||
|
z.object({
|
||||||
|
specversion: z.number().optional().default(1),
|
||||||
|
eventType: z.enum([EventType.CREATE_SECRET, EventType.UPDATE_SECRET, EventType.DELETE_SECRET]),
|
||||||
|
payload: EventSecretPayload
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
specversion: z.number().optional().default(1),
|
||||||
|
eventType: z.enum([EventType.CREATE_SECRETS, EventType.UPDATE_SECRETS, EventType.DELETE_SECRETS]),
|
||||||
|
payload: EventSecretPayload.array()
|
||||||
|
})
|
||||||
|
// Add more event types as needed
|
||||||
|
])
|
||||||
|
});
|
||||||
|
|
||||||
|
export type EventData = z.infer<typeof EventSchema>;
|
||||||
|
|
||||||
|
export const EventRegisterSchema = z.object({
|
||||||
|
event: EventName,
|
||||||
|
conditions: z
|
||||||
|
.object({
|
||||||
|
secretPath: z.string().optional().default("/"),
|
||||||
|
environmentSlug: z.string()
|
||||||
|
})
|
||||||
|
.optional()
|
||||||
|
});
|
||||||
|
|
||||||
|
export type RegisteredEvent = z.infer<typeof EventRegisterSchema>;
|
@@ -59,7 +59,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
|||||||
secretScanning: false,
|
secretScanning: false,
|
||||||
enterpriseSecretSyncs: false,
|
enterpriseSecretSyncs: false,
|
||||||
enterpriseAppConnections: false,
|
enterpriseAppConnections: false,
|
||||||
fips: false
|
fips: false,
|
||||||
|
eventSubscriptions: false
|
||||||
});
|
});
|
||||||
|
|
||||||
export const setupLicenseRequestWithStore = (
|
export const setupLicenseRequestWithStore = (
|
||||||
|
@@ -5,13 +5,14 @@
|
|||||||
// TODO(akhilmhdh): With tony find out the api structure and fill it here
|
// TODO(akhilmhdh): With tony find out the api structure and fill it here
|
||||||
|
|
||||||
import { ForbiddenError } from "@casl/ability";
|
import { ForbiddenError } from "@casl/ability";
|
||||||
|
import { AxiosError } from "axios";
|
||||||
import { CronJob } from "cron";
|
import { CronJob } from "cron";
|
||||||
import { Knex } from "knex";
|
import { Knex } from "knex";
|
||||||
|
|
||||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { verifyOfflineLicense } from "@app/lib/crypto";
|
import { verifyOfflineLicense } from "@app/lib/crypto";
|
||||||
import { NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
|
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
|
||||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||||
@@ -603,10 +604,22 @@ export const licenseServiceFactory = ({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const { data } = await licenseServerCloudApi.request.delete(
|
try {
|
||||||
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods/${pmtMethodId}`
|
const { data } = await licenseServerCloudApi.request.delete(
|
||||||
);
|
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods/${pmtMethodId}`
|
||||||
return data;
|
);
|
||||||
|
return data;
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof AxiosError) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||||
|
message: `Failed to remove payment method: ${error.response?.data?.message}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Unable to remove payment method"
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const getOrgTaxIds = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgTaxIdDTO) => {
|
const getOrgTaxIds = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgTaxIdDTO) => {
|
||||||
|
@@ -76,6 +76,7 @@ export type TFeatureSet = {
|
|||||||
enterpriseSecretSyncs: false;
|
enterpriseSecretSyncs: false;
|
||||||
enterpriseAppConnections: false;
|
enterpriseAppConnections: false;
|
||||||
fips: false;
|
fips: false;
|
||||||
|
eventSubscriptions: false;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TOrgPlansTableDTO = {
|
export type TOrgPlansTableDTO = {
|
||||||
|
@@ -161,7 +161,8 @@ const buildAdminPermissionRules = () => {
|
|||||||
ProjectPermissionSecretActions.ReadValue,
|
ProjectPermissionSecretActions.ReadValue,
|
||||||
ProjectPermissionSecretActions.Create,
|
ProjectPermissionSecretActions.Create,
|
||||||
ProjectPermissionSecretActions.Edit,
|
ProjectPermissionSecretActions.Edit,
|
||||||
ProjectPermissionSecretActions.Delete
|
ProjectPermissionSecretActions.Delete,
|
||||||
|
ProjectPermissionSecretActions.Subscribe
|
||||||
],
|
],
|
||||||
ProjectPermissionSub.Secrets
|
ProjectPermissionSub.Secrets
|
||||||
);
|
);
|
||||||
@@ -265,7 +266,8 @@ const buildMemberPermissionRules = () => {
|
|||||||
ProjectPermissionSecretActions.ReadValue,
|
ProjectPermissionSecretActions.ReadValue,
|
||||||
ProjectPermissionSecretActions.Edit,
|
ProjectPermissionSecretActions.Edit,
|
||||||
ProjectPermissionSecretActions.Create,
|
ProjectPermissionSecretActions.Create,
|
||||||
ProjectPermissionSecretActions.Delete
|
ProjectPermissionSecretActions.Delete,
|
||||||
|
ProjectPermissionSecretActions.Subscribe
|
||||||
],
|
],
|
||||||
ProjectPermissionSub.Secrets
|
ProjectPermissionSub.Secrets
|
||||||
);
|
);
|
||||||
|
@@ -36,7 +36,8 @@ export enum ProjectPermissionSecretActions {
|
|||||||
ReadValue = "readValue",
|
ReadValue = "readValue",
|
||||||
Create = "create",
|
Create = "create",
|
||||||
Edit = "edit",
|
Edit = "edit",
|
||||||
Delete = "delete"
|
Delete = "delete",
|
||||||
|
Subscribe = "subscribe"
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum ProjectPermissionCmekActions {
|
export enum ProjectPermissionCmekActions {
|
||||||
@@ -204,6 +205,7 @@ export type SecretSubjectFields = {
|
|||||||
secretPath: string;
|
secretPath: string;
|
||||||
secretName?: string;
|
secretName?: string;
|
||||||
secretTags?: string[];
|
secretTags?: string[];
|
||||||
|
eventType?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type SecretFolderSubjectFields = {
|
export type SecretFolderSubjectFields = {
|
||||||
@@ -483,7 +485,17 @@ const SecretConditionV2Schema = z
|
|||||||
.object({
|
.object({
|
||||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
|
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
|
||||||
})
|
})
|
||||||
.partial()
|
.partial(),
|
||||||
|
eventType: z.union([
|
||||||
|
z.string(),
|
||||||
|
z
|
||||||
|
.object({
|
||||||
|
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
||||||
|
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
||||||
|
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
|
||||||
|
})
|
||||||
|
.partial()
|
||||||
|
])
|
||||||
})
|
})
|
||||||
.partial();
|
.partial();
|
||||||
|
|
||||||
|
@@ -65,7 +65,10 @@ import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
|||||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||||
|
|
||||||
import { TLicenseServiceFactory } from "../license/license-service";
|
import { TLicenseServiceFactory } from "../license/license-service";
|
||||||
import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission/permission-fns";
|
import {
|
||||||
|
hasSecretReadValueOrDescribePermission,
|
||||||
|
throwIfMissingSecretReadValueOrDescribePermission
|
||||||
|
} from "../permission/permission-fns";
|
||||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||||
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
|
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||||
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
|
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
|
||||||
@@ -277,13 +280,19 @@ export const secretApprovalRequestServiceFactory = ({
|
|||||||
) {
|
) {
|
||||||
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
|
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
|
||||||
}
|
}
|
||||||
|
const getHasSecretReadAccess = (environment: string, tags: { slug: string }[], secretPath?: string) => {
|
||||||
const hasSecretReadAccess = permission.can(
|
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
|
||||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
environment,
|
||||||
ProjectPermissionSub.Secrets
|
secretPath: secretPath || "/",
|
||||||
);
|
secretTags: tags.map((i) => i.slug)
|
||||||
|
});
|
||||||
|
return canRead;
|
||||||
|
};
|
||||||
|
|
||||||
let secrets;
|
let secrets;
|
||||||
|
const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [
|
||||||
|
secretApprovalRequest.folderId
|
||||||
|
]);
|
||||||
if (shouldUseSecretV2Bridge) {
|
if (shouldUseSecretV2Bridge) {
|
||||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
type: KmsDataKey.SecretManager,
|
type: KmsDataKey.SecretManager,
|
||||||
@@ -299,8 +308,8 @@ export const secretApprovalRequestServiceFactory = ({
|
|||||||
version: el.version,
|
version: el.version,
|
||||||
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
|
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
|
||||||
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
|
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
|
||||||
secretValueHidden: !hasSecretReadAccess,
|
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
|
||||||
secretValue: !hasSecretReadAccess
|
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||||
: el.secret && el.secret.isRotatedSecret
|
: el.secret && el.secret.isRotatedSecret
|
||||||
? undefined
|
? undefined
|
||||||
@@ -315,8 +324,12 @@ export const secretApprovalRequestServiceFactory = ({
|
|||||||
secretKey: el.secret.key,
|
secretKey: el.secret.key,
|
||||||
id: el.secret.id,
|
id: el.secret.id,
|
||||||
version: el.secret.version,
|
version: el.secret.version,
|
||||||
secretValueHidden: !hasSecretReadAccess,
|
secretValueHidden: !getHasSecretReadAccess(
|
||||||
secretValue: !hasSecretReadAccess
|
secretApprovalRequest.environment,
|
||||||
|
el.tags,
|
||||||
|
secretPath?.[0]?.path
|
||||||
|
),
|
||||||
|
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||||
: el.secret.encryptedValue
|
: el.secret.encryptedValue
|
||||||
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
|
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
|
||||||
@@ -331,8 +344,12 @@ export const secretApprovalRequestServiceFactory = ({
|
|||||||
secretKey: el.secretVersion.key,
|
secretKey: el.secretVersion.key,
|
||||||
id: el.secretVersion.id,
|
id: el.secretVersion.id,
|
||||||
version: el.secretVersion.version,
|
version: el.secretVersion.version,
|
||||||
secretValueHidden: !hasSecretReadAccess,
|
secretValueHidden: !getHasSecretReadAccess(
|
||||||
secretValue: !hasSecretReadAccess
|
secretApprovalRequest.environment,
|
||||||
|
el.tags,
|
||||||
|
secretPath?.[0]?.path
|
||||||
|
),
|
||||||
|
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||||
: el.secretVersion.encryptedValue
|
: el.secretVersion.encryptedValue
|
||||||
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
|
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
|
||||||
@@ -350,7 +367,7 @@ export const secretApprovalRequestServiceFactory = ({
|
|||||||
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
|
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
|
||||||
secrets = encryptedSecrets.map((el) => ({
|
secrets = encryptedSecrets.map((el) => ({
|
||||||
...el,
|
...el,
|
||||||
secretValueHidden: !hasSecretReadAccess,
|
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
|
||||||
...decryptSecretWithBot(el, botKey),
|
...decryptSecretWithBot(el, botKey),
|
||||||
secret: el.secret
|
secret: el.secret
|
||||||
? {
|
? {
|
||||||
@@ -370,9 +387,6 @@ export const secretApprovalRequestServiceFactory = ({
|
|||||||
: undefined
|
: undefined
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [
|
|
||||||
secretApprovalRequest.folderId
|
|
||||||
]);
|
|
||||||
|
|
||||||
return { ...secretApprovalRequest, secretPath: secretPath?.[0]?.path || "/", commits: secrets };
|
return { ...secretApprovalRequest, secretPath: secretPath?.[0]?.path || "/", commits: secrets };
|
||||||
};
|
};
|
||||||
|
@@ -21,6 +21,8 @@ const GRAPH_API_BASE = "https://graph.microsoft.com/v1.0";
|
|||||||
|
|
||||||
type AzureErrorResponse = { error: { message: string } };
|
type AzureErrorResponse = { error: { message: string } };
|
||||||
|
|
||||||
|
const EXPIRY_PADDING_IN_DAYS = 3;
|
||||||
|
|
||||||
const sleep = async () =>
|
const sleep = async () =>
|
||||||
new Promise((resolve) => {
|
new Promise((resolve) => {
|
||||||
setTimeout(resolve, 1000);
|
setTimeout(resolve, 1000);
|
||||||
@@ -33,7 +35,8 @@ export const azureClientSecretRotationFactory: TRotationFactory<
|
|||||||
const {
|
const {
|
||||||
connection,
|
connection,
|
||||||
parameters: { objectId, clientId: clientIdParam },
|
parameters: { objectId, clientId: clientIdParam },
|
||||||
secretsMapping
|
secretsMapping,
|
||||||
|
rotationInterval
|
||||||
} = secretRotation;
|
} = secretRotation;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -50,7 +53,7 @@ export const azureClientSecretRotationFactory: TRotationFactory<
|
|||||||
)}-${now.getFullYear()}`;
|
)}-${now.getFullYear()}`;
|
||||||
|
|
||||||
const endDateTime = new Date();
|
const endDateTime = new Date();
|
||||||
endDateTime.setFullYear(now.getFullYear() + 5);
|
endDateTime.setDate(now.getDate() + rotationInterval * 2 + EXPIRY_PADDING_IN_DAYS); // give 72 hour buffer
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { data } = await request.post<AzureAddPasswordResponse>(
|
const { data } = await request.post<AzureAddPasswordResponse>(
|
||||||
@@ -195,6 +198,12 @@ export const azureClientSecretRotationFactory: TRotationFactory<
|
|||||||
callback
|
callback
|
||||||
) => {
|
) => {
|
||||||
const credentials = await $rotateClientSecret();
|
const credentials = await $rotateClientSecret();
|
||||||
|
|
||||||
|
// 2.5 years as expiry is set to x2 interval for the inactive period of credential
|
||||||
|
if (rotationInterval > Math.floor(365 * 2.5) - EXPIRY_PADDING_IN_DAYS) {
|
||||||
|
throw new BadRequestError({ message: "Azure does not support token duration over 5 years" });
|
||||||
|
}
|
||||||
|
|
||||||
return callback(credentials);
|
return callback(credentials);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@@ -51,6 +51,7 @@ const baseSecretRotationV2Query = ({
|
|||||||
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
|
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
|
||||||
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
|
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
|
||||||
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
|
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
|
||||||
|
db.ref("gatewayId").withSchema(TableName.AppConnection).as("connectionGatewayId"),
|
||||||
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
|
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
|
||||||
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
|
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
|
||||||
db
|
db
|
||||||
@@ -104,6 +105,7 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
|
|||||||
connectionCreatedAt,
|
connectionCreatedAt,
|
||||||
connectionUpdatedAt,
|
connectionUpdatedAt,
|
||||||
connectionVersion,
|
connectionVersion,
|
||||||
|
connectionGatewayId,
|
||||||
connectionIsPlatformManagedCredentials,
|
connectionIsPlatformManagedCredentials,
|
||||||
...el
|
...el
|
||||||
} = secretRotation;
|
} = secretRotation;
|
||||||
@@ -123,6 +125,7 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
|
|||||||
createdAt: connectionCreatedAt,
|
createdAt: connectionCreatedAt,
|
||||||
updatedAt: connectionUpdatedAt,
|
updatedAt: connectionUpdatedAt,
|
||||||
version: connectionVersion,
|
version: connectionVersion,
|
||||||
|
gatewayId: connectionGatewayId,
|
||||||
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
|
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
|
||||||
},
|
},
|
||||||
folder: {
|
folder: {
|
||||||
|
@@ -18,7 +18,8 @@ import {
|
|||||||
TSecretScanningFactoryInitialize,
|
TSecretScanningFactoryInitialize,
|
||||||
TSecretScanningFactoryListRawResources,
|
TSecretScanningFactoryListRawResources,
|
||||||
TSecretScanningFactoryPostInitialization,
|
TSecretScanningFactoryPostInitialization,
|
||||||
TSecretScanningFactoryTeardown
|
TSecretScanningFactoryTeardown,
|
||||||
|
TSecretScanningFactoryValidateConfigUpdate
|
||||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { request } from "@app/lib/config/request";
|
import { request } from "@app/lib/config/request";
|
||||||
@@ -302,6 +303,13 @@ export const BitbucketSecretScanningFactory = () => {
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
|
||||||
|
TBitbucketDataSourceInput["config"],
|
||||||
|
TBitbucketDataSourceWithConnection
|
||||||
|
> = async () => {
|
||||||
|
// no validation required
|
||||||
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
initialize,
|
initialize,
|
||||||
postInitialization,
|
postInitialization,
|
||||||
@@ -309,6 +317,7 @@ export const BitbucketSecretScanningFactory = () => {
|
|||||||
getFullScanPath,
|
getFullScanPath,
|
||||||
getDiffScanResourcePayload,
|
getDiffScanResourcePayload,
|
||||||
getDiffScanFindingsPayload,
|
getDiffScanFindingsPayload,
|
||||||
teardown
|
teardown,
|
||||||
|
validateConfigUpdate
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@@ -20,7 +20,8 @@ import {
|
|||||||
TSecretScanningFactoryInitialize,
|
TSecretScanningFactoryInitialize,
|
||||||
TSecretScanningFactoryListRawResources,
|
TSecretScanningFactoryListRawResources,
|
||||||
TSecretScanningFactoryPostInitialization,
|
TSecretScanningFactoryPostInitialization,
|
||||||
TSecretScanningFactoryTeardown
|
TSecretScanningFactoryTeardown,
|
||||||
|
TSecretScanningFactoryValidateConfigUpdate
|
||||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
@@ -64,7 +65,14 @@ export const GitHubSecretScanningFactory = () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const teardown: TSecretScanningFactoryTeardown<TGitHubDataSourceWithConnection> = async () => {
|
const teardown: TSecretScanningFactoryTeardown<TGitHubDataSourceWithConnection> = async () => {
|
||||||
// no termination required
|
// no teardown required
|
||||||
|
};
|
||||||
|
|
||||||
|
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
|
||||||
|
TGitHubDataSourceInput["config"],
|
||||||
|
TGitHubDataSourceWithConnection
|
||||||
|
> = async () => {
|
||||||
|
// no validation required
|
||||||
};
|
};
|
||||||
|
|
||||||
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
|
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
|
||||||
@@ -238,6 +246,7 @@ export const GitHubSecretScanningFactory = () => {
|
|||||||
getFullScanPath,
|
getFullScanPath,
|
||||||
getDiffScanResourcePayload,
|
getDiffScanResourcePayload,
|
||||||
getDiffScanFindingsPayload,
|
getDiffScanFindingsPayload,
|
||||||
teardown
|
teardown,
|
||||||
|
validateConfigUpdate
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@@ -0,0 +1,9 @@
|
|||||||
|
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
|
||||||
|
export const GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
|
||||||
|
name: "GitLab",
|
||||||
|
type: SecretScanningDataSource.GitLab,
|
||||||
|
connection: AppConnection.GitLab
|
||||||
|
};
|
@@ -0,0 +1,8 @@
|
|||||||
|
export enum GitLabDataSourceScope {
|
||||||
|
Project = "project",
|
||||||
|
Group = "group"
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum GitLabWebHookEvent {
|
||||||
|
Push = "Push Hook"
|
||||||
|
}
|
@@ -0,0 +1,409 @@
|
|||||||
|
import { Camelize, GitbeakerRequestError, GroupHookSchema, ProjectHookSchema } from "@gitbeaker/rest";
|
||||||
|
import { join } from "path";
|
||||||
|
|
||||||
|
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||||
|
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||||
|
import {
|
||||||
|
SecretScanningFindingSeverity,
|
||||||
|
SecretScanningResource
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
import {
|
||||||
|
cloneRepository,
|
||||||
|
convertPatchLineToFileLineNumber,
|
||||||
|
replaceNonChangesWithNewlines
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||||
|
import {
|
||||||
|
TSecretScanningFactoryGetDiffScanFindingsPayload,
|
||||||
|
TSecretScanningFactoryGetDiffScanResourcePayload,
|
||||||
|
TSecretScanningFactoryGetFullScanPath,
|
||||||
|
TSecretScanningFactoryInitialize,
|
||||||
|
TSecretScanningFactoryListRawResources,
|
||||||
|
TSecretScanningFactoryParams,
|
||||||
|
TSecretScanningFactoryPostInitialization,
|
||||||
|
TSecretScanningFactoryTeardown,
|
||||||
|
TSecretScanningFactoryValidateConfigUpdate
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||||
|
import { getConfig } from "@app/lib/config/env";
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||||
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
import { GitLabProjectRegex } from "@app/lib/regex";
|
||||||
|
import {
|
||||||
|
getGitLabConnectionClient,
|
||||||
|
getGitLabInstanceUrl,
|
||||||
|
TGitLabConnection
|
||||||
|
} from "@app/services/app-connection/gitlab";
|
||||||
|
|
||||||
|
import { GitLabDataSourceScope } from "./gitlab-secret-scanning-enums";
|
||||||
|
import {
|
||||||
|
TGitLabDataSourceCredentials,
|
||||||
|
TGitLabDataSourceInput,
|
||||||
|
TGitLabDataSourceWithConnection,
|
||||||
|
TQueueGitLabResourceDiffScan
|
||||||
|
} from "./gitlab-secret-scanning-types";
|
||||||
|
|
||||||
|
const getMainDomain = (instanceUrl: string) => {
|
||||||
|
const url = new URL(instanceUrl);
|
||||||
|
const { hostname } = url;
|
||||||
|
const parts = hostname.split(".");
|
||||||
|
|
||||||
|
if (parts.length >= 2) {
|
||||||
|
return parts.slice(-2).join(".");
|
||||||
|
}
|
||||||
|
|
||||||
|
return hostname;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const GitLabSecretScanningFactory = ({ appConnectionDAL, kmsService }: TSecretScanningFactoryParams) => {
|
||||||
|
const initialize: TSecretScanningFactoryInitialize<
|
||||||
|
TGitLabDataSourceInput,
|
||||||
|
TGitLabConnection,
|
||||||
|
TGitLabDataSourceCredentials
|
||||||
|
> = async ({ payload: { config, name }, connection }, callback) => {
|
||||||
|
const token = alphaNumericNanoId(64);
|
||||||
|
|
||||||
|
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||||
|
const appCfg = getConfig();
|
||||||
|
|
||||||
|
if (config.scope === GitLabDataSourceScope.Project) {
|
||||||
|
const { projectId } = config;
|
||||||
|
const project = await client.Projects.show(projectId);
|
||||||
|
|
||||||
|
if (!project) {
|
||||||
|
throw new BadRequestError({ message: `Could not find project with ID ${projectId}.` });
|
||||||
|
}
|
||||||
|
|
||||||
|
let hook: Camelize<ProjectHookSchema>;
|
||||||
|
try {
|
||||||
|
hook = await client.ProjectHooks.add(projectId, `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`, {
|
||||||
|
token,
|
||||||
|
pushEvents: true,
|
||||||
|
enableSslVerification: true,
|
||||||
|
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||||
|
name: `Infisical Secret Scanning - ${name}`
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof GitbeakerRequestError) {
|
||||||
|
throw new BadRequestError({ message: `${error.message}: ${error.cause?.description ?? "Unknown Error"}` });
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await callback({
|
||||||
|
credentials: {
|
||||||
|
token,
|
||||||
|
hookId: hook.id
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
try {
|
||||||
|
await client.ProjectHooks.remove(projectId, hook.id);
|
||||||
|
} catch {
|
||||||
|
// do nothing, just try to clean up webhook
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// group scope
|
||||||
|
const { groupId } = config;
|
||||||
|
|
||||||
|
const group = await client.Groups.show(groupId);
|
||||||
|
|
||||||
|
if (!group) {
|
||||||
|
throw new BadRequestError({ message: `Could not find group with ID ${groupId}.` });
|
||||||
|
}
|
||||||
|
|
||||||
|
let hook: Camelize<GroupHookSchema>;
|
||||||
|
try {
|
||||||
|
hook = await client.GroupHooks.add(groupId, `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`, {
|
||||||
|
token,
|
||||||
|
pushEvents: true,
|
||||||
|
enableSslVerification: true,
|
||||||
|
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||||
|
name: `Infisical Secret Scanning - ${name}`
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof GitbeakerRequestError) {
|
||||||
|
throw new BadRequestError({ message: `${error.message}: ${error.cause?.description ?? "Unknown Error"}` });
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await callback({
|
||||||
|
credentials: {
|
||||||
|
token,
|
||||||
|
hookId: hook.id
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
try {
|
||||||
|
await client.GroupHooks.remove(groupId, hook.id);
|
||||||
|
} catch {
|
||||||
|
// do nothing, just try to clean up webhook
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const postInitialization: TSecretScanningFactoryPostInitialization<
|
||||||
|
TGitLabDataSourceInput,
|
||||||
|
TGitLabConnection,
|
||||||
|
TGitLabDataSourceCredentials
|
||||||
|
> = async ({ connection, dataSourceId, credentials, payload: { config } }) => {
|
||||||
|
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||||
|
const appCfg = getConfig();
|
||||||
|
|
||||||
|
const hookUrl = `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`;
|
||||||
|
const { hookId } = credentials;
|
||||||
|
|
||||||
|
if (config.scope === GitLabDataSourceScope.Project) {
|
||||||
|
const { projectId } = config;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await client.ProjectHooks.edit(projectId, hookId, hookUrl, {
|
||||||
|
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||||
|
name: `Infisical Secret Scanning - ${dataSourceId}`,
|
||||||
|
custom_headers: [{ key: "x-data-source-id", value: dataSourceId }]
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
try {
|
||||||
|
await client.ProjectHooks.remove(projectId, hookId);
|
||||||
|
} catch {
|
||||||
|
// do nothing, just try to clean up webhook
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// group-scope
|
||||||
|
const { groupId } = config;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await client.GroupHooks.edit(groupId, hookId, hookUrl, {
|
||||||
|
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||||
|
name: `Infisical Secret Scanning - ${dataSourceId}`,
|
||||||
|
custom_headers: [{ key: "x-data-source-id", value: dataSourceId }]
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
try {
|
||||||
|
await client.GroupHooks.remove(groupId, hookId);
|
||||||
|
} catch {
|
||||||
|
// do nothing, just try to clean up webhook
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const listRawResources: TSecretScanningFactoryListRawResources<TGitLabDataSourceWithConnection> = async (
|
||||||
|
dataSource
|
||||||
|
) => {
|
||||||
|
const { connection, config } = dataSource;
|
||||||
|
|
||||||
|
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||||
|
|
||||||
|
if (config.scope === GitLabDataSourceScope.Project) {
|
||||||
|
const { projectId } = config;
|
||||||
|
|
||||||
|
const project = await client.Projects.show(projectId);
|
||||||
|
|
||||||
|
if (!project) {
|
||||||
|
throw new BadRequestError({ message: `Could not find project with ID ${projectId}.` });
|
||||||
|
}
|
||||||
|
|
||||||
|
// scott: even though we have this data we want to get potentially updated name
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
name: project.pathWithNamespace,
|
||||||
|
externalId: project.id.toString(),
|
||||||
|
type: SecretScanningResource.Project
|
||||||
|
}
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
// group-scope
|
||||||
|
|
||||||
|
const { groupId, includeProjects } = config;
|
||||||
|
|
||||||
|
const projects = await client.Groups.allProjects(groupId, {
|
||||||
|
archived: false
|
||||||
|
});
|
||||||
|
|
||||||
|
const filteredProjects: typeof projects = [];
|
||||||
|
if (!includeProjects || includeProjects.includes("*")) {
|
||||||
|
filteredProjects.push(...projects);
|
||||||
|
} else {
|
||||||
|
filteredProjects.push(...projects.filter((project) => includeProjects.includes(project.pathWithNamespace)));
|
||||||
|
}
|
||||||
|
|
||||||
|
return filteredProjects.map(({ id, pathWithNamespace }) => ({
|
||||||
|
name: pathWithNamespace,
|
||||||
|
externalId: id.toString(),
|
||||||
|
type: SecretScanningResource.Project
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
|
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TGitLabDataSourceWithConnection> = async ({
|
||||||
|
dataSource,
|
||||||
|
resourceName,
|
||||||
|
tempFolder
|
||||||
|
}) => {
|
||||||
|
const { connection } = dataSource;
|
||||||
|
|
||||||
|
const instanceUrl = await getGitLabInstanceUrl(connection.credentials.instanceUrl);
|
||||||
|
|
||||||
|
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||||
|
|
||||||
|
const user = await client.Users.showCurrentUser();
|
||||||
|
|
||||||
|
const repoPath = join(tempFolder, "repo.git");
|
||||||
|
|
||||||
|
if (!GitLabProjectRegex.test(resourceName)) {
|
||||||
|
throw new Error("Invalid GitLab project name");
|
||||||
|
}
|
||||||
|
|
||||||
|
await cloneRepository({
|
||||||
|
cloneUrl: `https://${user.username}:${connection.credentials.accessToken}@${getMainDomain(instanceUrl)}/${resourceName}.git`,
|
||||||
|
repoPath
|
||||||
|
});
|
||||||
|
|
||||||
|
return repoPath;
|
||||||
|
};
|
||||||
|
|
||||||
|
const teardown: TSecretScanningFactoryTeardown<
|
||||||
|
TGitLabDataSourceWithConnection,
|
||||||
|
TGitLabDataSourceCredentials
|
||||||
|
> = async ({ dataSource: { connection, config }, credentials: { hookId } }) => {
|
||||||
|
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||||
|
|
||||||
|
if (config.scope === GitLabDataSourceScope.Project) {
|
||||||
|
const { projectId } = config;
|
||||||
|
try {
|
||||||
|
await client.ProjectHooks.remove(projectId, hookId);
|
||||||
|
} catch (error) {
|
||||||
|
// do nothing, just try to clean up webhook
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { groupId } = config;
|
||||||
|
try {
|
||||||
|
await client.GroupHooks.remove(groupId, hookId);
|
||||||
|
} catch (error) {
|
||||||
|
// do nothing, just try to clean up webhook
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
|
||||||
|
TQueueGitLabResourceDiffScan["payload"]
|
||||||
|
> = ({ project }) => {
|
||||||
|
return {
|
||||||
|
name: project.path_with_namespace,
|
||||||
|
externalId: project.id.toString(),
|
||||||
|
type: SecretScanningResource.Project
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
|
||||||
|
TGitLabDataSourceWithConnection,
|
||||||
|
TQueueGitLabResourceDiffScan["payload"]
|
||||||
|
> = async ({ dataSource, payload, resourceName, configPath }) => {
|
||||||
|
const { connection } = dataSource;
|
||||||
|
|
||||||
|
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||||
|
|
||||||
|
const { commits, project } = payload;
|
||||||
|
|
||||||
|
const allFindings: SecretMatch[] = [];
|
||||||
|
|
||||||
|
for (const commit of commits) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
const commitDiffs = await client.Commits.showDiff(project.id, commit.id);
|
||||||
|
|
||||||
|
for (const commitDiff of commitDiffs) {
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
if (commitDiff.deletedFile) continue;
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
const findings = await scanContentAndGetFindings(
|
||||||
|
replaceNonChangesWithNewlines(`\n${commitDiff.diff}`),
|
||||||
|
configPath
|
||||||
|
);
|
||||||
|
|
||||||
|
const adjustedFindings = findings.map((finding) => {
|
||||||
|
const startLine = convertPatchLineToFileLineNumber(commitDiff.diff, finding.StartLine);
|
||||||
|
const endLine =
|
||||||
|
finding.StartLine === finding.EndLine
|
||||||
|
? startLine
|
||||||
|
: convertPatchLineToFileLineNumber(commitDiff.diff, finding.EndLine);
|
||||||
|
const startColumn = finding.StartColumn - 1; // subtract 1 for +
|
||||||
|
const endColumn = finding.EndColumn - 1; // subtract 1 for +
|
||||||
|
const authorName = commit.author.name;
|
||||||
|
const authorEmail = commit.author.email;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...finding,
|
||||||
|
StartLine: startLine,
|
||||||
|
EndLine: endLine,
|
||||||
|
StartColumn: startColumn,
|
||||||
|
EndColumn: endColumn,
|
||||||
|
File: commitDiff.newPath,
|
||||||
|
Commit: commit.id,
|
||||||
|
Author: authorName,
|
||||||
|
Email: authorEmail,
|
||||||
|
Message: commit.message,
|
||||||
|
Fingerprint: `${commit.id}:${commitDiff.newPath}:${finding.RuleID}:${startLine}:${startColumn}`,
|
||||||
|
Date: commit.timestamp,
|
||||||
|
Link: `https://gitlab.com/${resourceName}/blob/${commit.id}/${commitDiff.newPath}#L${startLine}`
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
allFindings.push(...adjustedFindings);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return allFindings.map(
|
||||||
|
({
|
||||||
|
// discard match and secret as we don't want to store
|
||||||
|
Match,
|
||||||
|
Secret,
|
||||||
|
...finding
|
||||||
|
}) => ({
|
||||||
|
details: titleCaseToCamelCase(finding),
|
||||||
|
fingerprint: finding.Fingerprint,
|
||||||
|
severity: SecretScanningFindingSeverity.High,
|
||||||
|
rule: finding.RuleID
|
||||||
|
})
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
|
||||||
|
TGitLabDataSourceInput["config"],
|
||||||
|
TGitLabDataSourceWithConnection
|
||||||
|
> = async ({ config, dataSource }) => {
|
||||||
|
if (dataSource.config.scope !== config.scope) {
|
||||||
|
throw new BadRequestError({ message: "Cannot change Data Source scope after creation." });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
listRawResources,
|
||||||
|
getFullScanPath,
|
||||||
|
initialize,
|
||||||
|
postInitialization,
|
||||||
|
teardown,
|
||||||
|
getDiffScanResourcePayload,
|
||||||
|
getDiffScanFindingsPayload,
|
||||||
|
validateConfigUpdate
|
||||||
|
};
|
||||||
|
};
|
@@ -0,0 +1,101 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { GitLabDataSourceScope } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
|
||||||
|
import {
|
||||||
|
SecretScanningDataSource,
|
||||||
|
SecretScanningResource
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
import {
|
||||||
|
BaseCreateSecretScanningDataSourceSchema,
|
||||||
|
BaseSecretScanningDataSourceSchema,
|
||||||
|
BaseSecretScanningFindingSchema,
|
||||||
|
BaseUpdateSecretScanningDataSourceSchema,
|
||||||
|
GitRepositoryScanFindingDetailsSchema
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
|
||||||
|
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
||||||
|
import { GitLabProjectRegex } from "@app/lib/regex";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
|
||||||
|
export const GitLabDataSourceConfigSchema = z.discriminatedUnion("scope", [
|
||||||
|
z.object({
|
||||||
|
scope: z.literal(GitLabDataSourceScope.Group).describe(SecretScanningDataSources.CONFIG.GITLAB.scope),
|
||||||
|
groupId: z.number().describe(SecretScanningDataSources.CONFIG.GITLAB.groupId),
|
||||||
|
groupName: z.string().trim().max(256).optional().describe(SecretScanningDataSources.CONFIG.GITLAB.groupName),
|
||||||
|
includeProjects: z
|
||||||
|
.array(
|
||||||
|
z
|
||||||
|
.string()
|
||||||
|
.min(1)
|
||||||
|
.max(256)
|
||||||
|
.refine((value) => value === "*" || GitLabProjectRegex.test(value), "Invalid project name format")
|
||||||
|
)
|
||||||
|
.nonempty("One or more projects required")
|
||||||
|
.max(100, "Cannot configure more than 100 projects")
|
||||||
|
.default(["*"])
|
||||||
|
.describe(SecretScanningDataSources.CONFIG.GITLAB.includeProjects)
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
scope: z.literal(GitLabDataSourceScope.Project).describe(SecretScanningDataSources.CONFIG.GITLAB.scope),
|
||||||
|
projectName: z.string().trim().max(256).optional().describe(SecretScanningDataSources.CONFIG.GITLAB.projectName),
|
||||||
|
projectId: z.number().describe(SecretScanningDataSources.CONFIG.GITLAB.projectId)
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
|
export const GitLabDataSourceSchema = BaseSecretScanningDataSourceSchema({
|
||||||
|
type: SecretScanningDataSource.GitLab,
|
||||||
|
isConnectionRequired: true
|
||||||
|
})
|
||||||
|
.extend({
|
||||||
|
config: GitLabDataSourceConfigSchema
|
||||||
|
})
|
||||||
|
.describe(
|
||||||
|
JSON.stringify({
|
||||||
|
title: "GitLab"
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
export const CreateGitLabDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
|
||||||
|
type: SecretScanningDataSource.GitLab,
|
||||||
|
isConnectionRequired: true
|
||||||
|
})
|
||||||
|
.extend({
|
||||||
|
config: GitLabDataSourceConfigSchema
|
||||||
|
})
|
||||||
|
.describe(
|
||||||
|
JSON.stringify({
|
||||||
|
title: "GitLab"
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
export const UpdateGitLabDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(SecretScanningDataSource.GitLab)
|
||||||
|
.extend({
|
||||||
|
config: GitLabDataSourceConfigSchema.optional()
|
||||||
|
})
|
||||||
|
.describe(
|
||||||
|
JSON.stringify({
|
||||||
|
title: "GitLab"
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
export const GitLabDataSourceListItemSchema = z
|
||||||
|
.object({
|
||||||
|
name: z.literal("GitLab"),
|
||||||
|
connection: z.literal(AppConnection.GitLab),
|
||||||
|
type: z.literal(SecretScanningDataSource.GitLab)
|
||||||
|
})
|
||||||
|
.describe(
|
||||||
|
JSON.stringify({
|
||||||
|
title: "GitLab"
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
export const GitLabFindingSchema = BaseSecretScanningFindingSchema.extend({
|
||||||
|
resourceType: z.literal(SecretScanningResource.Project),
|
||||||
|
dataSourceType: z.literal(SecretScanningDataSource.GitLab),
|
||||||
|
details: GitRepositoryScanFindingDetailsSchema
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitLabDataSourceCredentialsSchema = z.object({
|
||||||
|
token: z.string(),
|
||||||
|
hookId: z.number()
|
||||||
|
});
|
@@ -0,0 +1,94 @@
|
|||||||
|
import { GitLabDataSourceScope } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
|
||||||
|
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||||
|
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
|
||||||
|
import { logger } from "@app/lib/logger";
|
||||||
|
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
|
|
||||||
|
import {
|
||||||
|
TGitLabDataSource,
|
||||||
|
TGitLabDataSourceCredentials,
|
||||||
|
THandleGitLabPushEvent
|
||||||
|
} from "./gitlab-secret-scanning-types";
|
||||||
|
|
||||||
|
export const gitlabSecretScanningService = (
|
||||||
|
secretScanningV2DAL: TSecretScanningV2DALFactory,
|
||||||
|
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">,
|
||||||
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||||
|
) => {
|
||||||
|
const handlePushEvent = async ({ payload, token, dataSourceId }: THandleGitLabPushEvent) => {
|
||||||
|
if (!payload.total_commits_count || !payload.project) {
|
||||||
|
logger.warn(
|
||||||
|
`secretScanningV2PushEvent: GitLab - Insufficient data [changes=${
|
||||||
|
payload.total_commits_count ?? 0
|
||||||
|
}] [projectName=${payload.project?.path_with_namespace ?? "unknown"}] [projectId=${payload.project?.id ?? "unknown"}]`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
|
||||||
|
id: dataSourceId,
|
||||||
|
type: SecretScanningDataSource.GitLab
|
||||||
|
})) as TGitLabDataSource | undefined;
|
||||||
|
|
||||||
|
if (!dataSource) {
|
||||||
|
logger.error(
|
||||||
|
`secretScanningV2PushEvent: GitLab - Could not find data source [dataSourceId=${dataSourceId}] [projectId=${payload.project.id}]`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { isAutoScanEnabled, config, encryptedCredentials, projectId } = dataSource;
|
||||||
|
|
||||||
|
if (!encryptedCredentials) {
|
||||||
|
logger.info(
|
||||||
|
`secretScanningV2PushEvent: GitLab - Could not find encrypted credentials [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId
|
||||||
|
});
|
||||||
|
|
||||||
|
const decryptedCredentials = decryptor({ cipherTextBlob: encryptedCredentials });
|
||||||
|
|
||||||
|
const credentials = JSON.parse(decryptedCredentials.toString()) as TGitLabDataSourceCredentials;
|
||||||
|
|
||||||
|
if (token !== credentials.token) {
|
||||||
|
logger.error(
|
||||||
|
`secretScanningV2PushEvent: GitLab - Invalid webhook token [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isAutoScanEnabled) {
|
||||||
|
logger.info(
|
||||||
|
`secretScanningV2PushEvent: GitLab - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
config.scope === GitLabDataSourceScope.Project
|
||||||
|
? config.projectId.toString() === payload.project_id.toString()
|
||||||
|
: config.includeProjects.includes("*") || config.includeProjects.includes(payload.project.path_with_namespace)
|
||||||
|
) {
|
||||||
|
await secretScanningV2Queue.queueResourceDiffScan({
|
||||||
|
dataSourceType: SecretScanningDataSource.GitLab,
|
||||||
|
payload,
|
||||||
|
dataSourceId: dataSource.id
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
logger.info(
|
||||||
|
`secretScanningV2PushEvent: GitLab - ignoring due to repository not being present in config [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
handlePushEvent
|
||||||
|
};
|
||||||
|
};
|
@@ -0,0 +1,97 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
import { TGitLabConnection } from "@app/services/app-connection/gitlab";
|
||||||
|
|
||||||
|
import {
|
||||||
|
CreateGitLabDataSourceSchema,
|
||||||
|
GitLabDataSourceCredentialsSchema,
|
||||||
|
GitLabDataSourceListItemSchema,
|
||||||
|
GitLabDataSourceSchema,
|
||||||
|
GitLabFindingSchema
|
||||||
|
} from "./gitlab-secret-scanning-schemas";
|
||||||
|
|
||||||
|
export type TGitLabDataSource = z.infer<typeof GitLabDataSourceSchema>;
|
||||||
|
|
||||||
|
export type TGitLabDataSourceInput = z.infer<typeof CreateGitLabDataSourceSchema>;
|
||||||
|
|
||||||
|
export type TGitLabDataSourceListItem = z.infer<typeof GitLabDataSourceListItemSchema>;
|
||||||
|
|
||||||
|
export type TGitLabFinding = z.infer<typeof GitLabFindingSchema>;
|
||||||
|
|
||||||
|
export type TGitLabDataSourceWithConnection = TGitLabDataSource & {
|
||||||
|
connection: TGitLabConnection;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TGitLabDataSourceCredentials = z.infer<typeof GitLabDataSourceCredentialsSchema>;
|
||||||
|
|
||||||
|
export type TGitLabDataSourcePushEventPayload = {
|
||||||
|
object_kind: "push";
|
||||||
|
event_name: "push";
|
||||||
|
before: string;
|
||||||
|
after: string;
|
||||||
|
ref: string;
|
||||||
|
ref_protected: boolean;
|
||||||
|
checkout_sha: string;
|
||||||
|
user_id: number;
|
||||||
|
user_name: string;
|
||||||
|
user_username: string;
|
||||||
|
user_email: string;
|
||||||
|
user_avatar: string;
|
||||||
|
project_id: number;
|
||||||
|
project: {
|
||||||
|
id: number;
|
||||||
|
name: string;
|
||||||
|
description: string;
|
||||||
|
web_url: string;
|
||||||
|
avatar_url: string | null;
|
||||||
|
git_ssh_url: string;
|
||||||
|
git_http_url: string;
|
||||||
|
namespace: string;
|
||||||
|
visibility_level: number;
|
||||||
|
path_with_namespace: string;
|
||||||
|
default_branch: string;
|
||||||
|
homepage: string;
|
||||||
|
url: string;
|
||||||
|
ssh_url: string;
|
||||||
|
http_url: string;
|
||||||
|
};
|
||||||
|
repository: {
|
||||||
|
name: string;
|
||||||
|
url: string;
|
||||||
|
description: string;
|
||||||
|
homepage: string;
|
||||||
|
git_http_url: string;
|
||||||
|
git_ssh_url: string;
|
||||||
|
visibility_level: number;
|
||||||
|
};
|
||||||
|
commits: {
|
||||||
|
id: string;
|
||||||
|
message: string;
|
||||||
|
title: string;
|
||||||
|
timestamp: string;
|
||||||
|
url: string;
|
||||||
|
author: {
|
||||||
|
name: string;
|
||||||
|
email: string;
|
||||||
|
};
|
||||||
|
added: string[];
|
||||||
|
modified: string[];
|
||||||
|
removed: string[];
|
||||||
|
}[];
|
||||||
|
total_commits_count: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type THandleGitLabPushEvent = {
|
||||||
|
payload: TGitLabDataSourcePushEventPayload;
|
||||||
|
dataSourceId: string;
|
||||||
|
token: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TQueueGitLabResourceDiffScan = {
|
||||||
|
dataSourceType: SecretScanningDataSource.GitLab;
|
||||||
|
payload: TGitLabDataSourcePushEventPayload;
|
||||||
|
dataSourceId: string;
|
||||||
|
resourceId: string;
|
||||||
|
scanId: string;
|
||||||
|
};
|
@@ -0,0 +1,3 @@
|
|||||||
|
export * from "./gitlab-secret-scanning-constants";
|
||||||
|
export * from "./gitlab-secret-scanning-schemas";
|
||||||
|
export * from "./gitlab-secret-scanning-types";
|
@@ -49,6 +49,7 @@ const baseSecretScanningDataSourceQuery = ({
|
|||||||
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
|
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
|
||||||
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
|
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
|
||||||
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
|
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
|
||||||
|
db.ref("gatewayId").withSchema(TableName.AppConnection).as("connectionGatewayId"),
|
||||||
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
|
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
|
||||||
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
|
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
|
||||||
db
|
db
|
||||||
@@ -82,6 +83,7 @@ const expandSecretScanningDataSource = <
|
|||||||
connectionUpdatedAt,
|
connectionUpdatedAt,
|
||||||
connectionVersion,
|
connectionVersion,
|
||||||
connectionIsPlatformManagedCredentials,
|
connectionIsPlatformManagedCredentials,
|
||||||
|
connectionGatewayId,
|
||||||
...el
|
...el
|
||||||
} = dataSource;
|
} = dataSource;
|
||||||
|
|
||||||
@@ -100,7 +102,8 @@ const expandSecretScanningDataSource = <
|
|||||||
createdAt: connectionCreatedAt,
|
createdAt: connectionCreatedAt,
|
||||||
updatedAt: connectionUpdatedAt,
|
updatedAt: connectionUpdatedAt,
|
||||||
version: connectionVersion,
|
version: connectionVersion,
|
||||||
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
|
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials,
|
||||||
|
gatewayId: connectionGatewayId
|
||||||
}
|
}
|
||||||
: undefined
|
: undefined
|
||||||
};
|
};
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
export enum SecretScanningDataSource {
|
export enum SecretScanningDataSource {
|
||||||
GitHub = "github",
|
GitHub = "github",
|
||||||
Bitbucket = "bitbucket"
|
Bitbucket = "bitbucket",
|
||||||
|
GitLab = "gitlab"
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum SecretScanningScanStatus {
|
export enum SecretScanningScanStatus {
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
import { BitbucketSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-factory";
|
import { BitbucketSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-factory";
|
||||||
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
|
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
|
||||||
|
import { GitLabSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-factory";
|
||||||
|
|
||||||
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
|
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
|
||||||
import {
|
import {
|
||||||
@@ -19,5 +20,6 @@ type TSecretScanningFactoryImplementation = TSecretScanningFactory<
|
|||||||
|
|
||||||
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
|
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
|
||||||
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation,
|
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation,
|
||||||
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation
|
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation,
|
||||||
|
[SecretScanningDataSource.GitLab]: GitLabSecretScanningFactory as TSecretScanningFactoryImplementation
|
||||||
};
|
};
|
||||||
|
@@ -13,6 +13,7 @@ import {
|
|||||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||||
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||||
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
|
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
|
||||||
|
import { GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { crypto } from "@app/lib/crypto";
|
import { crypto } from "@app/lib/crypto";
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
@@ -23,7 +24,8 @@ import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListIte
|
|||||||
|
|
||||||
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
|
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
|
||||||
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
|
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
|
||||||
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
|
||||||
|
[SecretScanningDataSource.GitLab]: GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
||||||
};
|
};
|
||||||
|
|
||||||
export const listSecretScanningDataSourceOptions = () => {
|
export const listSecretScanningDataSourceOptions = () => {
|
||||||
|
@@ -3,15 +3,18 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
|
|||||||
|
|
||||||
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
|
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
|
||||||
[SecretScanningDataSource.GitHub]: "GitHub",
|
[SecretScanningDataSource.GitHub]: "GitHub",
|
||||||
[SecretScanningDataSource.Bitbucket]: "Bitbucket"
|
[SecretScanningDataSource.Bitbucket]: "Bitbucket",
|
||||||
|
[SecretScanningDataSource.GitLab]: "GitLab"
|
||||||
};
|
};
|
||||||
|
|
||||||
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
|
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
|
||||||
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar,
|
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar,
|
||||||
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket
|
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket,
|
||||||
|
[SecretScanningDataSource.GitLab]: AppConnection.GitLab
|
||||||
};
|
};
|
||||||
|
|
||||||
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
|
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
|
||||||
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" },
|
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" },
|
||||||
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" }
|
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" },
|
||||||
|
[SecretScanningDataSource.GitLab]: { verb: "push", noun: "projects" }
|
||||||
};
|
};
|
||||||
|
@@ -16,6 +16,7 @@ import { getConfig } from "@app/lib/config/env";
|
|||||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||||
|
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||||
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
||||||
import { ActorType } from "@app/services/auth/auth-type";
|
import { ActorType } from "@app/services/auth/auth-type";
|
||||||
@@ -48,6 +49,7 @@ type TSecretRotationV2QueueServiceFactoryDep = {
|
|||||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
|
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
|
||||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||||
|
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem">;
|
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem">;
|
||||||
};
|
};
|
||||||
@@ -62,7 +64,8 @@ export const secretScanningV2QueueServiceFactory = async ({
|
|||||||
smtpService,
|
smtpService,
|
||||||
kmsService,
|
kmsService,
|
||||||
auditLogService,
|
auditLogService,
|
||||||
keyStore
|
keyStore,
|
||||||
|
appConnectionDAL
|
||||||
}: TSecretRotationV2QueueServiceFactoryDep) => {
|
}: TSecretRotationV2QueueServiceFactoryDep) => {
|
||||||
const queueDataSourceFullScan = async (
|
const queueDataSourceFullScan = async (
|
||||||
dataSource: TSecretScanningDataSourceWithConnection,
|
dataSource: TSecretScanningDataSourceWithConnection,
|
||||||
@@ -71,7 +74,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
|||||||
try {
|
try {
|
||||||
const { type } = dataSource;
|
const { type } = dataSource;
|
||||||
|
|
||||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
|
const factory = SECRET_SCANNING_FACTORY_MAP[type]({
|
||||||
|
kmsService,
|
||||||
|
appConnectionDAL
|
||||||
|
});
|
||||||
|
|
||||||
const rawResources = await factory.listRawResources(dataSource);
|
const rawResources = await factory.listRawResources(dataSource);
|
||||||
|
|
||||||
@@ -171,7 +177,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
|||||||
let connection: TAppConnection | null = null;
|
let connection: TAppConnection | null = null;
|
||||||
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
|
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
|
||||||
|
|
||||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
|
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]({
|
||||||
|
kmsService,
|
||||||
|
appConnectionDAL
|
||||||
|
});
|
||||||
|
|
||||||
const findingsPath = join(tempFolder, "findings.json");
|
const findingsPath = join(tempFolder, "findings.json");
|
||||||
|
|
||||||
@@ -329,7 +338,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
|||||||
dataSourceId,
|
dataSourceId,
|
||||||
dataSourceType
|
dataSourceType
|
||||||
}: Pick<TQueueSecretScanningResourceDiffScan, "payload" | "dataSourceId" | "dataSourceType">) => {
|
}: Pick<TQueueSecretScanningResourceDiffScan, "payload" | "dataSourceId" | "dataSourceType">) => {
|
||||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]();
|
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]({
|
||||||
|
kmsService,
|
||||||
|
appConnectionDAL
|
||||||
|
});
|
||||||
|
|
||||||
const resourcePayload = factory.getDiffScanResourcePayload(payload);
|
const resourcePayload = factory.getDiffScanResourcePayload(payload);
|
||||||
|
|
||||||
@@ -391,7 +403,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
|||||||
|
|
||||||
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
|
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
|
||||||
|
|
||||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
|
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]({
|
||||||
|
kmsService,
|
||||||
|
appConnectionDAL
|
||||||
|
});
|
||||||
|
|
||||||
const tempFolder = await createTempFolder();
|
const tempFolder = await createTempFolder();
|
||||||
|
|
||||||
|
@@ -46,6 +46,7 @@ import {
|
|||||||
import { DatabaseErrorCode } from "@app/lib/error-codes";
|
import { DatabaseErrorCode } from "@app/lib/error-codes";
|
||||||
import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors";
|
||||||
import { OrgServiceActor } from "@app/lib/types";
|
import { OrgServiceActor } from "@app/lib/types";
|
||||||
|
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||||
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
||||||
@@ -53,12 +54,14 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
|||||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
|
|
||||||
import { bitbucketSecretScanningService } from "./bitbucket/bitbucket-secret-scanning-service";
|
import { bitbucketSecretScanningService } from "./bitbucket/bitbucket-secret-scanning-service";
|
||||||
|
import { gitlabSecretScanningService } from "./gitlab/gitlab-secret-scanning-service";
|
||||||
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
|
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
|
||||||
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
|
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
|
||||||
|
|
||||||
export type TSecretScanningV2ServiceFactoryDep = {
|
export type TSecretScanningV2ServiceFactoryDep = {
|
||||||
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
||||||
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
||||||
|
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
secretScanningV2Queue: Pick<
|
secretScanningV2Queue: Pick<
|
||||||
@@ -76,6 +79,7 @@ export const secretScanningV2ServiceFactory = ({
|
|||||||
appConnectionService,
|
appConnectionService,
|
||||||
licenseService,
|
licenseService,
|
||||||
secretScanningV2Queue,
|
secretScanningV2Queue,
|
||||||
|
appConnectionDAL,
|
||||||
kmsService
|
kmsService
|
||||||
}: TSecretScanningV2ServiceFactoryDep) => {
|
}: TSecretScanningV2ServiceFactoryDep) => {
|
||||||
const $checkListSecretScanningDataSourcesByProjectIdPermissions = async (
|
const $checkListSecretScanningDataSourcesByProjectIdPermissions = async (
|
||||||
@@ -255,7 +259,10 @@ export const secretScanningV2ServiceFactory = ({
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]();
|
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]({
|
||||||
|
appConnectionDAL,
|
||||||
|
kmsService
|
||||||
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const createdDataSource = await factory.initialize(
|
const createdDataSource = await factory.initialize(
|
||||||
@@ -363,6 +370,31 @@ export const secretScanningV2ServiceFactory = ({
|
|||||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let connection: TAppConnection | null = null;
|
||||||
|
if (dataSource.connectionId) {
|
||||||
|
// validates permission to connect and app is valid for data source
|
||||||
|
connection = await appConnectionService.connectAppConnectionById(
|
||||||
|
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[dataSource.type],
|
||||||
|
dataSource.connectionId,
|
||||||
|
actor
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type]({
|
||||||
|
appConnectionDAL,
|
||||||
|
kmsService
|
||||||
|
});
|
||||||
|
|
||||||
|
if (payload.config) {
|
||||||
|
await factory.validateConfigUpdate({
|
||||||
|
dataSource: {
|
||||||
|
...dataSource,
|
||||||
|
connection
|
||||||
|
} as TSecretScanningDataSourceWithConnection,
|
||||||
|
config: payload.config as TSecretScanningDataSourceWithConnection["config"]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const updatedDataSource = await secretScanningV2DAL.dataSources.updateById(dataSourceId, payload);
|
const updatedDataSource = await secretScanningV2DAL.dataSources.updateById(dataSourceId, payload);
|
||||||
|
|
||||||
@@ -416,7 +448,10 @@ export const secretScanningV2ServiceFactory = ({
|
|||||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||||
});
|
});
|
||||||
|
|
||||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
|
const factory = SECRET_SCANNING_FACTORY_MAP[type]({
|
||||||
|
appConnectionDAL,
|
||||||
|
kmsService
|
||||||
|
});
|
||||||
|
|
||||||
let connection: TAppConnection | null = null;
|
let connection: TAppConnection | null = null;
|
||||||
if (dataSource.connection) {
|
if (dataSource.connection) {
|
||||||
@@ -903,6 +938,7 @@ export const secretScanningV2ServiceFactory = ({
|
|||||||
findSecretScanningConfigByProjectId,
|
findSecretScanningConfigByProjectId,
|
||||||
upsertSecretScanningConfig,
|
upsertSecretScanningConfig,
|
||||||
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue),
|
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue),
|
||||||
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
|
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService),
|
||||||
|
gitlab: gitlabSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@@ -21,14 +21,25 @@ import {
|
|||||||
TGitHubFinding,
|
TGitHubFinding,
|
||||||
TQueueGitHubResourceDiffScan
|
TQueueGitHubResourceDiffScan
|
||||||
} from "@app/ee/services/secret-scanning-v2/github";
|
} from "@app/ee/services/secret-scanning-v2/github";
|
||||||
|
import {
|
||||||
|
TGitLabDataSource,
|
||||||
|
TGitLabDataSourceCredentials,
|
||||||
|
TGitLabDataSourceInput,
|
||||||
|
TGitLabDataSourceListItem,
|
||||||
|
TGitLabDataSourceWithConnection,
|
||||||
|
TGitLabFinding,
|
||||||
|
TQueueGitLabResourceDiffScan
|
||||||
|
} from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||||
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||||
import {
|
import {
|
||||||
SecretScanningDataSource,
|
SecretScanningDataSource,
|
||||||
SecretScanningFindingStatus,
|
SecretScanningFindingStatus,
|
||||||
SecretScanningScanStatus
|
SecretScanningScanStatus
|
||||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||||
|
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||||
|
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||||
|
|
||||||
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource;
|
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource | TGitLabDataSource;
|
||||||
|
|
||||||
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
|
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
|
||||||
lastScannedAt?: Date | null;
|
lastScannedAt?: Date | null;
|
||||||
@@ -52,15 +63,25 @@ export type TSecretScanningScanWithDetails = TSecretScanningScans & {
|
|||||||
|
|
||||||
export type TSecretScanningDataSourceWithConnection =
|
export type TSecretScanningDataSourceWithConnection =
|
||||||
| TGitHubDataSourceWithConnection
|
| TGitHubDataSourceWithConnection
|
||||||
| TBitbucketDataSourceWithConnection;
|
| TBitbucketDataSourceWithConnection
|
||||||
|
| TGitLabDataSourceWithConnection;
|
||||||
|
|
||||||
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput | TBitbucketDataSourceInput;
|
export type TSecretScanningDataSourceInput =
|
||||||
|
| TGitHubDataSourceInput
|
||||||
|
| TBitbucketDataSourceInput
|
||||||
|
| TGitLabDataSourceInput;
|
||||||
|
|
||||||
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem | TBitbucketDataSourceListItem;
|
export type TSecretScanningDataSourceListItem =
|
||||||
|
| TGitHubDataSourceListItem
|
||||||
|
| TBitbucketDataSourceListItem
|
||||||
|
| TGitLabDataSourceListItem;
|
||||||
|
|
||||||
export type TSecretScanningDataSourceCredentials = TBitbucketDataSourceCredentials | undefined;
|
export type TSecretScanningDataSourceCredentials =
|
||||||
|
| TBitbucketDataSourceCredentials
|
||||||
|
| TGitLabDataSourceCredentials
|
||||||
|
| undefined;
|
||||||
|
|
||||||
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding;
|
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding | TGitLabFinding;
|
||||||
|
|
||||||
export type TListSecretScanningDataSourcesByProjectId = {
|
export type TListSecretScanningDataSourcesByProjectId = {
|
||||||
projectId: string;
|
projectId: string;
|
||||||
@@ -112,7 +133,10 @@ export type TQueueSecretScanningDataSourceFullScan = {
|
|||||||
scanId: string;
|
scanId: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan | TQueueBitbucketResourceDiffScan;
|
export type TQueueSecretScanningResourceDiffScan =
|
||||||
|
| TQueueGitHubResourceDiffScan
|
||||||
|
| TQueueBitbucketResourceDiffScan
|
||||||
|
| TQueueGitLabResourceDiffScan;
|
||||||
|
|
||||||
export type TQueueSecretScanningSendNotification = {
|
export type TQueueSecretScanningSendNotification = {
|
||||||
dataSource: TSecretScanningDataSources;
|
dataSource: TSecretScanningDataSources;
|
||||||
@@ -170,6 +194,11 @@ export type TSecretScanningFactoryInitialize<
|
|||||||
callback: (parameters: { credentials?: C; externalId?: string }) => Promise<TSecretScanningDataSourceRaw>
|
callback: (parameters: { credentials?: C; externalId?: string }) => Promise<TSecretScanningDataSourceRaw>
|
||||||
) => Promise<TSecretScanningDataSourceRaw>;
|
) => Promise<TSecretScanningDataSourceRaw>;
|
||||||
|
|
||||||
|
export type TSecretScanningFactoryValidateConfigUpdate<
|
||||||
|
C extends TSecretScanningDataSourceInput["config"],
|
||||||
|
T extends TSecretScanningDataSourceWithConnection
|
||||||
|
> = (params: { config: C; dataSource: T }) => Promise<void>;
|
||||||
|
|
||||||
export type TSecretScanningFactoryPostInitialization<
|
export type TSecretScanningFactoryPostInitialization<
|
||||||
P extends TSecretScanningDataSourceInput,
|
P extends TSecretScanningDataSourceInput,
|
||||||
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
||||||
@@ -181,17 +210,23 @@ export type TSecretScanningFactoryTeardown<
|
|||||||
C extends TSecretScanningDataSourceCredentials = undefined
|
C extends TSecretScanningDataSourceCredentials = undefined
|
||||||
> = (params: { dataSource: T; credentials: C }) => Promise<void>;
|
> = (params: { dataSource: T; credentials: C }) => Promise<void>;
|
||||||
|
|
||||||
|
export type TSecretScanningFactoryParams = {
|
||||||
|
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||||
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||||
|
};
|
||||||
|
|
||||||
export type TSecretScanningFactory<
|
export type TSecretScanningFactory<
|
||||||
T extends TSecretScanningDataSourceWithConnection,
|
T extends TSecretScanningDataSourceWithConnection,
|
||||||
P extends TQueueSecretScanningResourceDiffScan["payload"],
|
P extends TQueueSecretScanningResourceDiffScan["payload"],
|
||||||
I extends TSecretScanningDataSourceInput,
|
I extends TSecretScanningDataSourceInput,
|
||||||
C extends TSecretScanningDataSourceCredentials | undefined = undefined
|
C extends TSecretScanningDataSourceCredentials | undefined = undefined
|
||||||
> = () => {
|
> = (params: TSecretScanningFactoryParams) => {
|
||||||
listRawResources: TSecretScanningFactoryListRawResources<T>;
|
listRawResources: TSecretScanningFactoryListRawResources<T>;
|
||||||
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
|
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
|
||||||
initialize: TSecretScanningFactoryInitialize<I, T["connection"] | undefined, C>;
|
initialize: TSecretScanningFactoryInitialize<I, T["connection"] | undefined, C>;
|
||||||
postInitialization: TSecretScanningFactoryPostInitialization<I, T["connection"] | undefined, C>;
|
postInitialization: TSecretScanningFactoryPostInitialization<I, T["connection"] | undefined, C>;
|
||||||
teardown: TSecretScanningFactoryTeardown<T, C>;
|
teardown: TSecretScanningFactoryTeardown<T, C>;
|
||||||
|
validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<I["config"], T>;
|
||||||
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
|
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
|
||||||
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
|
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
|
||||||
};
|
};
|
||||||
|
@@ -2,10 +2,12 @@ import { z } from "zod";
|
|||||||
|
|
||||||
import { BitbucketDataSourceSchema, BitbucketFindingSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
import { BitbucketDataSourceSchema, BitbucketFindingSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||||
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
|
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||||
|
import { GitLabDataSourceSchema, GitLabFindingSchema } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||||
|
|
||||||
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [
|
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [
|
||||||
GitHubDataSourceSchema,
|
GitHubDataSourceSchema,
|
||||||
BitbucketDataSourceSchema
|
BitbucketDataSourceSchema,
|
||||||
|
GitLabDataSourceSchema
|
||||||
]);
|
]);
|
||||||
|
|
||||||
export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType", [
|
export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType", [
|
||||||
@@ -18,5 +20,10 @@ export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType"
|
|||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
title: "Bitbucket"
|
title: "Bitbucket"
|
||||||
})
|
})
|
||||||
|
),
|
||||||
|
GitLabFindingSchema.describe(
|
||||||
|
JSON.stringify({
|
||||||
|
title: "GitLab"
|
||||||
|
})
|
||||||
)
|
)
|
||||||
]);
|
]);
|
||||||
|
@@ -46,7 +46,11 @@ export const KeyStorePrefixes = {
|
|||||||
IdentityAccessTokenStatusUpdate: (identityAccessTokenId: string) =>
|
IdentityAccessTokenStatusUpdate: (identityAccessTokenId: string) =>
|
||||||
`identity-access-token-status:${identityAccessTokenId}`,
|
`identity-access-token-status:${identityAccessTokenId}`,
|
||||||
ServiceTokenStatusUpdate: (serviceTokenId: string) => `service-token-status:${serviceTokenId}`,
|
ServiceTokenStatusUpdate: (serviceTokenId: string) => `service-token-status:${serviceTokenId}`,
|
||||||
GatewayIdentityCredential: (identityId: string) => `gateway-credentials:${identityId}`
|
GatewayIdentityCredential: (identityId: string) => `gateway-credentials:${identityId}`,
|
||||||
|
ActiveSSEConnectionsSet: (projectId: string, identityId: string) =>
|
||||||
|
`sse-connections:${projectId}:${identityId}` as const,
|
||||||
|
ActiveSSEConnections: (projectId: string, identityId: string, connectionId: string) =>
|
||||||
|
`sse-connections:${projectId}:${identityId}:${connectionId}` as const
|
||||||
};
|
};
|
||||||
|
|
||||||
export const KeyStoreTtls = {
|
export const KeyStoreTtls = {
|
||||||
|
@@ -664,6 +664,10 @@ export const ORGANIZATIONS = {
|
|||||||
organizationId: "The ID of the organization to delete the membership from.",
|
organizationId: "The ID of the organization to delete the membership from.",
|
||||||
membershipId: "The ID of the membership to delete."
|
membershipId: "The ID of the membership to delete."
|
||||||
},
|
},
|
||||||
|
BULK_DELETE_USER_MEMBERSHIPS: {
|
||||||
|
organizationId: "The ID of the organization to delete the memberships from.",
|
||||||
|
membershipIds: "The IDs of the memberships to delete."
|
||||||
|
},
|
||||||
LIST_IDENTITY_MEMBERSHIPS: {
|
LIST_IDENTITY_MEMBERSHIPS: {
|
||||||
orgId: "The ID of the organization to get identity memberships from.",
|
orgId: "The ID of the organization to get identity memberships from.",
|
||||||
offset: "The offset to start from. If you enter 10, it will start from the 10th identity membership.",
|
offset: "The offset to start from. If you enter 10, it will start from the 10th identity membership.",
|
||||||
@@ -2253,7 +2257,9 @@ export const AppConnections = {
|
|||||||
AZURE_DEVOPS: {
|
AZURE_DEVOPS: {
|
||||||
code: "The OAuth code to use to connect with Azure DevOps.",
|
code: "The OAuth code to use to connect with Azure DevOps.",
|
||||||
tenantId: "The Tenant ID to use to connect with Azure DevOps.",
|
tenantId: "The Tenant ID to use to connect with Azure DevOps.",
|
||||||
orgName: "The Organization name to use to connect with Azure DevOps."
|
orgName: "The Organization name to use to connect with Azure DevOps.",
|
||||||
|
clientId: "The Client ID to use to connect with Azure Client Secrets.",
|
||||||
|
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
|
||||||
},
|
},
|
||||||
OCI: {
|
OCI: {
|
||||||
userOcid: "The OCID (Oracle Cloud Identifier) of the user making the request.",
|
userOcid: "The OCID (Oracle Cloud Identifier) of the user making the request.",
|
||||||
@@ -2296,6 +2302,9 @@ export const AppConnections = {
|
|||||||
DIGITAL_OCEAN_APP_PLATFORM: {
|
DIGITAL_OCEAN_APP_PLATFORM: {
|
||||||
apiToken: "The API token used to authenticate with Digital Ocean App Platform."
|
apiToken: "The API token used to authenticate with Digital Ocean App Platform."
|
||||||
},
|
},
|
||||||
|
NETLIFY: {
|
||||||
|
accessToken: "The Access token used to authenticate with Netlify."
|
||||||
|
},
|
||||||
OKTA: {
|
OKTA: {
|
||||||
instanceUrl: "The URL used to access your Okta organization.",
|
instanceUrl: "The URL used to access your Okta organization.",
|
||||||
apiToken: "The API token used to authenticate with Okta."
|
apiToken: "The API token used to authenticate with Okta."
|
||||||
@@ -2400,12 +2409,18 @@ export const SecretSyncs = {
|
|||||||
env: "The name of the GitHub environment."
|
env: "The name of the GitHub environment."
|
||||||
},
|
},
|
||||||
AZURE_KEY_VAULT: {
|
AZURE_KEY_VAULT: {
|
||||||
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/"
|
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/",
|
||||||
|
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
|
||||||
|
clientId: "The Client ID to use to connect with Azure Client Secrets.",
|
||||||
|
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
|
||||||
},
|
},
|
||||||
AZURE_APP_CONFIGURATION: {
|
AZURE_APP_CONFIGURATION: {
|
||||||
configurationUrl:
|
configurationUrl:
|
||||||
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
|
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
|
||||||
label: "An optional label to assign to secrets created in Azure App Configuration."
|
label: "An optional label to assign to secrets created in Azure App Configuration.",
|
||||||
|
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
|
||||||
|
clientId: "The Client ID to use to connect with Azure Client Secrets.",
|
||||||
|
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
|
||||||
},
|
},
|
||||||
AZURE_DEVOPS: {
|
AZURE_DEVOPS: {
|
||||||
devopsProjectId: "The ID of the Azure DevOps project to sync secrets to.",
|
devopsProjectId: "The ID of the Azure DevOps project to sync secrets to.",
|
||||||
@@ -2521,6 +2536,13 @@ export const SecretSyncs = {
|
|||||||
workspaceSlug: "The Bitbucket Workspace slug to sync secrets to.",
|
workspaceSlug: "The Bitbucket Workspace slug to sync secrets to.",
|
||||||
repositorySlug: "The Bitbucket Repository slug to sync secrets to.",
|
repositorySlug: "The Bitbucket Repository slug to sync secrets to.",
|
||||||
environmentId: "The Bitbucket Deployment Environment uuid to sync secrets to."
|
environmentId: "The Bitbucket Deployment Environment uuid to sync secrets to."
|
||||||
|
},
|
||||||
|
NETLIFY: {
|
||||||
|
accountId: "The ID of the Netlify account to sync secrets to.",
|
||||||
|
accountName: "The name of the Netlify account to sync secrets to.",
|
||||||
|
siteName: "The name of the Netlify site to sync secrets to.",
|
||||||
|
siteId: "The ID of the Netlify site to sync secrets to.",
|
||||||
|
context: "The Netlify context to sync secrets to."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -2702,6 +2724,14 @@ export const SecretScanningDataSources = {
|
|||||||
GITHUB: {
|
GITHUB: {
|
||||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||||
},
|
},
|
||||||
|
GITLAB: {
|
||||||
|
includeProjects: 'The projects to include when scanning. Defaults to all projects (["*"]).',
|
||||||
|
scope: "The GitLab scope scanning should occur at (project or group level).",
|
||||||
|
projectId: "The ID of the project to scan.",
|
||||||
|
projectName: "The name of the project to scan.",
|
||||||
|
groupId: "The ID of the group to scan projects from.",
|
||||||
|
groupName: "The name of the group to scan projects from."
|
||||||
|
},
|
||||||
BITBUCKET: {
|
BITBUCKET: {
|
||||||
workspaceSlug: "The workspace to scan.",
|
workspaceSlug: "The workspace to scan.",
|
||||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||||
|
@@ -59,6 +59,7 @@ const envSchema = z
|
|||||||
AUDIT_LOGS_DB_ROOT_CERT: zpStr(
|
AUDIT_LOGS_DB_ROOT_CERT: zpStr(
|
||||||
z.string().describe("Postgres database base64-encoded CA cert for Audit logs").optional()
|
z.string().describe("Postgres database base64-encoded CA cert for Audit logs").optional()
|
||||||
),
|
),
|
||||||
|
DISABLE_AUDIT_LOG_STORAGE: zodStrBool.default("false").optional().describe("Disable audit log storage"),
|
||||||
MAX_LEASE_LIMIT: z.coerce.number().default(10000),
|
MAX_LEASE_LIMIT: z.coerce.number().default(10000),
|
||||||
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
|
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
|
||||||
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
|
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
|
||||||
@@ -482,6 +483,15 @@ export const overwriteSchema: {
|
|||||||
fields: { key: keyof TEnvConfig; description?: string }[];
|
fields: { key: keyof TEnvConfig; description?: string }[];
|
||||||
};
|
};
|
||||||
} = {
|
} = {
|
||||||
|
auditLogs: {
|
||||||
|
name: "Audit Logs",
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
key: "DISABLE_AUDIT_LOG_STORAGE",
|
||||||
|
description: "Disable audit log storage"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
aws: {
|
aws: {
|
||||||
name: "AWS",
|
name: "AWS",
|
||||||
fields: [
|
fields: [
|
||||||
@@ -496,7 +506,7 @@ export const overwriteSchema: {
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
azureAppConfiguration: {
|
azureAppConfiguration: {
|
||||||
name: "Azure App Configuration",
|
name: "Azure App Connection: App Configuration",
|
||||||
fields: [
|
fields: [
|
||||||
{
|
{
|
||||||
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID",
|
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID",
|
||||||
@@ -509,7 +519,7 @@ export const overwriteSchema: {
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
azureKeyVault: {
|
azureKeyVault: {
|
||||||
name: "Azure Key Vault",
|
name: "Azure App Connection: Key Vault",
|
||||||
fields: [
|
fields: [
|
||||||
{
|
{
|
||||||
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID",
|
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID",
|
||||||
@@ -522,7 +532,7 @@ export const overwriteSchema: {
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
azureClientSecrets: {
|
azureClientSecrets: {
|
||||||
name: "Azure Client Secrets",
|
name: "Azure App Connection: Client Secrets",
|
||||||
fields: [
|
fields: [
|
||||||
{
|
{
|
||||||
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID",
|
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID",
|
||||||
@@ -535,7 +545,7 @@ export const overwriteSchema: {
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
azureDevOps: {
|
azureDevOps: {
|
||||||
name: "Azure DevOps",
|
name: "Azure App Connection: DevOps",
|
||||||
fields: [
|
fields: [
|
||||||
{
|
{
|
||||||
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID",
|
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID",
|
||||||
|
@@ -11,3 +11,5 @@ export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]
|
|||||||
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
|
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
|
||||||
|
|
||||||
export const BasicRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
|
export const BasicRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
|
||||||
|
|
||||||
|
export const GitLabProjectRegex = new RE2(/^[a-zA-Z0-9._-]+(?:\/[a-zA-Z0-9._-]+)+$/);
|
||||||
|
@@ -22,6 +22,7 @@ import { crypto } from "@app/lib/crypto";
|
|||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
import { QueueWorkerProfile } from "@app/lib/types";
|
import { QueueWorkerProfile } from "@app/lib/types";
|
||||||
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
|
||||||
|
import { ExternalPlatforms } from "@app/services/external-migration/external-migration-types";
|
||||||
import {
|
import {
|
||||||
TFailedIntegrationSyncEmailsPayload,
|
TFailedIntegrationSyncEmailsPayload,
|
||||||
TIntegrationSyncPayload,
|
TIntegrationSyncPayload,
|
||||||
@@ -228,6 +229,7 @@ export type TQueueJobTypes = {
|
|||||||
name: QueueJobs.ImportSecretsFromExternalSource;
|
name: QueueJobs.ImportSecretsFromExternalSource;
|
||||||
payload: {
|
payload: {
|
||||||
actorEmail: string;
|
actorEmail: string;
|
||||||
|
importType: ExternalPlatforms;
|
||||||
data: {
|
data: {
|
||||||
iv: string;
|
iv: string;
|
||||||
tag: string;
|
tag: string;
|
||||||
|
@@ -22,6 +22,7 @@ export type TAuthMode =
|
|||||||
orgId: string;
|
orgId: string;
|
||||||
authMethod: AuthMethod;
|
authMethod: AuthMethod;
|
||||||
isMfaVerified?: boolean;
|
isMfaVerified?: boolean;
|
||||||
|
token: AuthModeJwtTokenPayload;
|
||||||
}
|
}
|
||||||
| {
|
| {
|
||||||
authMode: AuthMode.API_KEY;
|
authMode: AuthMode.API_KEY;
|
||||||
@@ -30,6 +31,7 @@ export type TAuthMode =
|
|||||||
userId: string;
|
userId: string;
|
||||||
user: TUsers;
|
user: TUsers;
|
||||||
orgId: string;
|
orgId: string;
|
||||||
|
token: string;
|
||||||
}
|
}
|
||||||
| {
|
| {
|
||||||
authMode: AuthMode.SERVICE_TOKEN;
|
authMode: AuthMode.SERVICE_TOKEN;
|
||||||
@@ -38,6 +40,7 @@ export type TAuthMode =
|
|||||||
serviceTokenId: string;
|
serviceTokenId: string;
|
||||||
orgId: string;
|
orgId: string;
|
||||||
authMethod: null;
|
authMethod: null;
|
||||||
|
token: string;
|
||||||
}
|
}
|
||||||
| {
|
| {
|
||||||
authMode: AuthMode.IDENTITY_ACCESS_TOKEN;
|
authMode: AuthMode.IDENTITY_ACCESS_TOKEN;
|
||||||
@@ -47,6 +50,7 @@ export type TAuthMode =
|
|||||||
orgId: string;
|
orgId: string;
|
||||||
authMethod: null;
|
authMethod: null;
|
||||||
isInstanceAdmin?: boolean;
|
isInstanceAdmin?: boolean;
|
||||||
|
token: TIdentityAccessTokenJwtPayload;
|
||||||
}
|
}
|
||||||
| {
|
| {
|
||||||
authMode: AuthMode.SCIM_TOKEN;
|
authMode: AuthMode.SCIM_TOKEN;
|
||||||
@@ -56,7 +60,7 @@ export type TAuthMode =
|
|||||||
authMethod: null;
|
authMethod: null;
|
||||||
};
|
};
|
||||||
|
|
||||||
const extractAuth = async (req: FastifyRequest, jwtSecret: string) => {
|
export const extractAuth = async (req: FastifyRequest, jwtSecret: string) => {
|
||||||
const apiKey = req.headers?.["x-api-key"];
|
const apiKey = req.headers?.["x-api-key"];
|
||||||
if (apiKey) {
|
if (apiKey) {
|
||||||
return { authMode: AuthMode.API_KEY, token: apiKey, actor: ActorType.USER } as const;
|
return { authMode: AuthMode.API_KEY, token: apiKey, actor: ActorType.USER } as const;
|
||||||
@@ -133,7 +137,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
|
|||||||
actor,
|
actor,
|
||||||
orgId: orgId as string,
|
orgId: orgId as string,
|
||||||
authMethod: token.authMethod,
|
authMethod: token.authMethod,
|
||||||
isMfaVerified: token.isMfaVerified
|
isMfaVerified: token.isMfaVerified,
|
||||||
|
token
|
||||||
};
|
};
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@@ -148,7 +153,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
|
|||||||
identityId: identity.identityId,
|
identityId: identity.identityId,
|
||||||
identityName: identity.name,
|
identityName: identity.name,
|
||||||
authMethod: null,
|
authMethod: null,
|
||||||
isInstanceAdmin: serverCfg?.adminIdentityIds?.includes(identity.identityId)
|
isInstanceAdmin: serverCfg?.adminIdentityIds?.includes(identity.identityId),
|
||||||
|
token
|
||||||
};
|
};
|
||||||
if (token?.identityAuth?.oidc) {
|
if (token?.identityAuth?.oidc) {
|
||||||
requestContext.set("identityAuthInfo", {
|
requestContext.set("identityAuthInfo", {
|
||||||
@@ -179,7 +185,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
|
|||||||
serviceToken,
|
serviceToken,
|
||||||
serviceTokenId: serviceToken.id,
|
serviceTokenId: serviceToken.id,
|
||||||
actor,
|
actor,
|
||||||
authMethod: null
|
authMethod: null,
|
||||||
|
token
|
||||||
};
|
};
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@@ -191,7 +198,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
|
|||||||
actor,
|
actor,
|
||||||
user,
|
user,
|
||||||
orgId: "API_KEY", // We set the orgId to an arbitrary value, since we can't link an API key to a specific org. We have to deprecate API keys soon!
|
orgId: "API_KEY", // We set the orgId to an arbitrary value, since we can't link an API key to a specific org. We have to deprecate API keys soon!
|
||||||
authMethod: null
|
authMethod: null,
|
||||||
|
token: token as string
|
||||||
};
|
};
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@@ -4,6 +4,8 @@ import { Probot } from "probot";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { TBitbucketPushEvent } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-types";
|
import { TBitbucketPushEvent } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-types";
|
||||||
|
import { TGitLabDataSourcePushEventPayload } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||||
|
import { GitLabWebHookEvent } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||||
@@ -113,4 +115,36 @@ export const registerSecretScanningV2Webhooks = async (server: FastifyZodProvide
|
|||||||
return res.send("ok");
|
return res.send("ok");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// gitlab push event webhook
|
||||||
|
server.route({
|
||||||
|
method: "POST",
|
||||||
|
url: "/gitlab",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
handler: async (req, res) => {
|
||||||
|
const event = req.headers["x-gitlab-event"] as GitLabWebHookEvent;
|
||||||
|
const token = req.headers["x-gitlab-token"] as string;
|
||||||
|
const dataSourceId = req.headers["x-data-source-id"] as string;
|
||||||
|
|
||||||
|
if (event !== GitLabWebHookEvent.Push) {
|
||||||
|
return res.status(400).send({ message: `Event type not supported: ${event as string}` });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
return res.status(401).send({ message: "Unauthorized: Missing token" });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!dataSourceId) return res.status(400).send({ message: "Data Source ID header is required" });
|
||||||
|
|
||||||
|
await server.services.secretScanningV2.gitlab.handlePushEvent({
|
||||||
|
dataSourceId,
|
||||||
|
payload: req.body as TGitLabDataSourcePushEventPayload,
|
||||||
|
token
|
||||||
|
});
|
||||||
|
|
||||||
|
return res.send("ok");
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
@@ -31,6 +31,8 @@ import { buildDynamicSecretProviders } from "@app/ee/services/dynamic-secret/pro
|
|||||||
import { dynamicSecretLeaseDALFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-dal";
|
import { dynamicSecretLeaseDALFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-dal";
|
||||||
import { dynamicSecretLeaseQueueServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-queue";
|
import { dynamicSecretLeaseQueueServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-queue";
|
||||||
import { dynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
import { dynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||||
|
import { eventBusFactory } from "@app/ee/services/event/event-bus-service";
|
||||||
|
import { sseServiceFactory } from "@app/ee/services/event/event-sse-service";
|
||||||
import { externalKmsDALFactory } from "@app/ee/services/external-kms/external-kms-dal";
|
import { externalKmsDALFactory } from "@app/ee/services/external-kms/external-kms-dal";
|
||||||
import { externalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
import { externalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||||
import { gatewayDALFactory } from "@app/ee/services/gateway/gateway-dal";
|
import { gatewayDALFactory } from "@app/ee/services/gateway/gateway-dal";
|
||||||
@@ -495,6 +497,9 @@ export const registerRoutes = async (
|
|||||||
const projectMicrosoftTeamsConfigDAL = projectMicrosoftTeamsConfigDALFactory(db);
|
const projectMicrosoftTeamsConfigDAL = projectMicrosoftTeamsConfigDALFactory(db);
|
||||||
const secretScanningV2DAL = secretScanningV2DALFactory(db);
|
const secretScanningV2DAL = secretScanningV2DALFactory(db);
|
||||||
|
|
||||||
|
const eventBusService = eventBusFactory(server.redis);
|
||||||
|
const sseService = sseServiceFactory(eventBusService, server.redis);
|
||||||
|
|
||||||
const permissionService = permissionServiceFactory({
|
const permissionService = permissionServiceFactory({
|
||||||
permissionDAL,
|
permissionDAL,
|
||||||
orgRoleDAL,
|
orgRoleDAL,
|
||||||
@@ -552,7 +557,8 @@ export const registerRoutes = async (
|
|||||||
queueService,
|
queueService,
|
||||||
projectDAL,
|
projectDAL,
|
||||||
licenseService,
|
licenseService,
|
||||||
auditLogStreamDAL
|
auditLogStreamDAL,
|
||||||
|
eventBusService
|
||||||
});
|
});
|
||||||
|
|
||||||
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
|
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
|
||||||
@@ -1933,7 +1939,8 @@ export const registerRoutes = async (
|
|||||||
projectMembershipDAL,
|
projectMembershipDAL,
|
||||||
smtpService,
|
smtpService,
|
||||||
kmsService,
|
kmsService,
|
||||||
keyStore
|
keyStore,
|
||||||
|
appConnectionDAL
|
||||||
});
|
});
|
||||||
|
|
||||||
const secretScanningV2Service = secretScanningV2ServiceFactory({
|
const secretScanningV2Service = secretScanningV2ServiceFactory({
|
||||||
@@ -1942,7 +1949,8 @@ export const registerRoutes = async (
|
|||||||
licenseService,
|
licenseService,
|
||||||
secretScanningV2DAL,
|
secretScanningV2DAL,
|
||||||
secretScanningV2Queue,
|
secretScanningV2Queue,
|
||||||
kmsService
|
kmsService,
|
||||||
|
appConnectionDAL
|
||||||
});
|
});
|
||||||
|
|
||||||
// setup the communication with license key server
|
// setup the communication with license key server
|
||||||
@@ -1966,6 +1974,7 @@ export const registerRoutes = async (
|
|||||||
await kmsService.startService();
|
await kmsService.startService();
|
||||||
await microsoftTeamsService.start();
|
await microsoftTeamsService.start();
|
||||||
await dynamicSecretQueueService.init();
|
await dynamicSecretQueueService.init();
|
||||||
|
await eventBusService.init();
|
||||||
|
|
||||||
// inject all services
|
// inject all services
|
||||||
server.decorate<FastifyZodProvider["services"]>("services", {
|
server.decorate<FastifyZodProvider["services"]>("services", {
|
||||||
@@ -2072,7 +2081,9 @@ export const registerRoutes = async (
|
|||||||
githubOrgSync: githubOrgSyncConfigService,
|
githubOrgSync: githubOrgSyncConfigService,
|
||||||
folderCommit: folderCommitService,
|
folderCommit: folderCommitService,
|
||||||
secretScanningV2: secretScanningV2Service,
|
secretScanningV2: secretScanningV2Service,
|
||||||
reminder: reminderService
|
reminder: reminderService,
|
||||||
|
bus: eventBusService,
|
||||||
|
sse: sseService
|
||||||
});
|
});
|
||||||
|
|
||||||
const cronJobs: CronJob[] = [];
|
const cronJobs: CronJob[] = [];
|
||||||
@@ -2133,7 +2144,8 @@ export const registerRoutes = async (
|
|||||||
inviteOnlySignup: z.boolean().optional(),
|
inviteOnlySignup: z.boolean().optional(),
|
||||||
redisConfigured: z.boolean().optional(),
|
redisConfigured: z.boolean().optional(),
|
||||||
secretScanningConfigured: z.boolean().optional(),
|
secretScanningConfigured: z.boolean().optional(),
|
||||||
samlDefaultOrgSlug: z.string().optional()
|
samlDefaultOrgSlug: z.string().optional(),
|
||||||
|
auditLogStorageDisabled: z.boolean().optional()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -2160,7 +2172,8 @@ export const registerRoutes = async (
|
|||||||
inviteOnlySignup: Boolean(serverCfg.allowSignUp),
|
inviteOnlySignup: Boolean(serverCfg.allowSignUp),
|
||||||
redisConfigured: cfg.isRedisConfigured,
|
redisConfigured: cfg.isRedisConfigured,
|
||||||
secretScanningConfigured: cfg.isSecretScanningConfigured,
|
secretScanningConfigured: cfg.isSecretScanningConfigured,
|
||||||
samlDefaultOrgSlug: cfg.samlDefaultOrgSlug
|
samlDefaultOrgSlug: cfg.samlDefaultOrgSlug,
|
||||||
|
auditLogStorageDisabled: Boolean(cfg.DISABLE_AUDIT_LOG_STORAGE)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -2188,5 +2201,7 @@ export const registerRoutes = async (
|
|||||||
server.addHook("onClose", async () => {
|
server.addHook("onClose", async () => {
|
||||||
cronJobs.forEach((job) => job.stop());
|
cronJobs.forEach((job) => job.stop());
|
||||||
await telemetryService.flushAll();
|
await telemetryService.flushAll();
|
||||||
|
await eventBusService.close();
|
||||||
|
sseService.close();
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
@@ -464,6 +464,42 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "DELETE",
|
||||||
|
url: "/user-management/users",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
body: z.object({
|
||||||
|
userIds: z.string().array()
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
users: UsersSchema.pick({
|
||||||
|
username: true,
|
||||||
|
firstName: true,
|
||||||
|
lastName: true,
|
||||||
|
email: true,
|
||||||
|
id: true
|
||||||
|
}).array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: (req, res, done) => {
|
||||||
|
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
|
||||||
|
verifySuperAdmin(req, res, done);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
handler: async (req) => {
|
||||||
|
const users = await server.services.superAdmin.deleteUsers(req.body.userIds);
|
||||||
|
|
||||||
|
return {
|
||||||
|
users
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
server.route({
|
server.route({
|
||||||
method: "PATCH",
|
method: "PATCH",
|
||||||
url: "/user-management/users/:userId/admin-access",
|
url: "/user-management/users/:userId/admin-access",
|
||||||
|
@@ -75,6 +75,10 @@ import {
|
|||||||
import { LdapConnectionListItemSchema, SanitizedLdapConnectionSchema } from "@app/services/app-connection/ldap";
|
import { LdapConnectionListItemSchema, SanitizedLdapConnectionSchema } from "@app/services/app-connection/ldap";
|
||||||
import { MsSqlConnectionListItemSchema, SanitizedMsSqlConnectionSchema } from "@app/services/app-connection/mssql";
|
import { MsSqlConnectionListItemSchema, SanitizedMsSqlConnectionSchema } from "@app/services/app-connection/mssql";
|
||||||
import { MySqlConnectionListItemSchema, SanitizedMySqlConnectionSchema } from "@app/services/app-connection/mysql";
|
import { MySqlConnectionListItemSchema, SanitizedMySqlConnectionSchema } from "@app/services/app-connection/mysql";
|
||||||
|
import {
|
||||||
|
NetlifyConnectionListItemSchema,
|
||||||
|
SanitizedNetlifyConnectionSchema
|
||||||
|
} from "@app/services/app-connection/netlify";
|
||||||
import { OktaConnectionListItemSchema, SanitizedOktaConnectionSchema } from "@app/services/app-connection/okta";
|
import { OktaConnectionListItemSchema, SanitizedOktaConnectionSchema } from "@app/services/app-connection/okta";
|
||||||
import {
|
import {
|
||||||
PostgresConnectionListItemSchema,
|
PostgresConnectionListItemSchema,
|
||||||
@@ -145,6 +149,7 @@ const SanitizedAppConnectionSchema = z.union([
|
|||||||
...SanitizedChecklyConnectionSchema.options,
|
...SanitizedChecklyConnectionSchema.options,
|
||||||
...SanitizedSupabaseConnectionSchema.options,
|
...SanitizedSupabaseConnectionSchema.options,
|
||||||
...SanitizedDigitalOceanConnectionSchema.options,
|
...SanitizedDigitalOceanConnectionSchema.options,
|
||||||
|
...SanitizedNetlifyConnectionSchema.options,
|
||||||
...SanitizedOktaConnectionSchema.options
|
...SanitizedOktaConnectionSchema.options
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@@ -184,6 +189,7 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
|||||||
ChecklyConnectionListItemSchema,
|
ChecklyConnectionListItemSchema,
|
||||||
SupabaseConnectionListItemSchema,
|
SupabaseConnectionListItemSchema,
|
||||||
DigitalOceanConnectionListItemSchema,
|
DigitalOceanConnectionListItemSchema,
|
||||||
|
NetlifyConnectionListItemSchema,
|
||||||
OktaConnectionListItemSchema
|
OktaConnectionListItemSchema
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@@ -46,7 +46,6 @@ export const registerCloudflareConnectionRouter = async (server: FastifyZodProvi
|
|||||||
const { connectionId } = req.params;
|
const { connectionId } = req.params;
|
||||||
|
|
||||||
const projects = await server.services.appConnection.cloudflare.listPagesProjects(connectionId, req.permission);
|
const projects = await server.services.appConnection.cloudflare.listPagesProjects(connectionId, req.permission);
|
||||||
|
|
||||||
return projects;
|
return projects;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -73,9 +72,36 @@ export const registerCloudflareConnectionRouter = async (server: FastifyZodProvi
|
|||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
const { connectionId } = req.params;
|
const { connectionId } = req.params;
|
||||||
|
|
||||||
const projects = await server.services.appConnection.cloudflare.listWorkersScripts(connectionId, req.permission);
|
const scripts = await server.services.appConnection.cloudflare.listWorkersScripts(connectionId, req.permission);
|
||||||
|
return scripts;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return projects;
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: `/:connectionId/cloudflare-zones`,
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
params: z.object({
|
||||||
|
connectionId: z.string().uuid()
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z
|
||||||
|
.object({
|
||||||
|
id: z.string(),
|
||||||
|
name: z.string()
|
||||||
|
})
|
||||||
|
.array()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { connectionId } = req.params;
|
||||||
|
|
||||||
|
const zones = await server.services.appConnection.cloudflare.listZones(connectionId, req.permission);
|
||||||
|
return zones;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
@@ -26,6 +26,7 @@ import { registerHumanitecConnectionRouter } from "./humanitec-connection-router
|
|||||||
import { registerLdapConnectionRouter } from "./ldap-connection-router";
|
import { registerLdapConnectionRouter } from "./ldap-connection-router";
|
||||||
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
|
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
|
||||||
import { registerMySqlConnectionRouter } from "./mysql-connection-router";
|
import { registerMySqlConnectionRouter } from "./mysql-connection-router";
|
||||||
|
import { registerNetlifyConnectionRouter } from "./netlify-connection-router";
|
||||||
import { registerOktaConnectionRouter } from "./okta-connection-router";
|
import { registerOktaConnectionRouter } from "./okta-connection-router";
|
||||||
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
|
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
|
||||||
import { registerRailwayConnectionRouter } from "./railway-connection-router";
|
import { registerRailwayConnectionRouter } from "./railway-connection-router";
|
||||||
@@ -76,5 +77,6 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
|||||||
[AppConnection.Checkly]: registerChecklyConnectionRouter,
|
[AppConnection.Checkly]: registerChecklyConnectionRouter,
|
||||||
[AppConnection.Supabase]: registerSupabaseConnectionRouter,
|
[AppConnection.Supabase]: registerSupabaseConnectionRouter,
|
||||||
[AppConnection.DigitalOcean]: registerDigitalOceanConnectionRouter,
|
[AppConnection.DigitalOcean]: registerDigitalOceanConnectionRouter,
|
||||||
|
[AppConnection.Netlify]: registerNetlifyConnectionRouter,
|
||||||
[AppConnection.Okta]: registerOktaConnectionRouter
|
[AppConnection.Okta]: registerOktaConnectionRouter
|
||||||
};
|
};
|
||||||
|
@@ -0,0 +1,87 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { readLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import {
|
||||||
|
CreateNetlifyConnectionSchema,
|
||||||
|
SanitizedNetlifyConnectionSchema,
|
||||||
|
UpdateNetlifyConnectionSchema
|
||||||
|
} from "@app/services/app-connection/netlify";
|
||||||
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
|
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||||
|
|
||||||
|
export const registerNetlifyConnectionRouter = async (server: FastifyZodProvider) => {
|
||||||
|
registerAppConnectionEndpoints({
|
||||||
|
app: AppConnection.Netlify,
|
||||||
|
server,
|
||||||
|
sanitizedResponseSchema: SanitizedNetlifyConnectionSchema,
|
||||||
|
createSchema: CreateNetlifyConnectionSchema,
|
||||||
|
updateSchema: UpdateNetlifyConnectionSchema
|
||||||
|
});
|
||||||
|
|
||||||
|
// The below endpoints are not exposed and for Infisical App use
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: `/:connectionId/accounts`,
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
params: z.object({
|
||||||
|
connectionId: z.string().uuid()
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
accounts: z
|
||||||
|
.object({
|
||||||
|
name: z.string(),
|
||||||
|
id: z.string()
|
||||||
|
})
|
||||||
|
.array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { connectionId } = req.params;
|
||||||
|
|
||||||
|
const accounts = await server.services.appConnection.netlify.listAccounts(connectionId, req.permission);
|
||||||
|
|
||||||
|
return { accounts };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: `/:connectionId/accounts/:accountId/sites`,
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
params: z.object({
|
||||||
|
connectionId: z.string().uuid(),
|
||||||
|
accountId: z.string()
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
sites: z
|
||||||
|
.object({
|
||||||
|
name: z.string(),
|
||||||
|
id: z.string()
|
||||||
|
})
|
||||||
|
.array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { connectionId, accountId } = req.params;
|
||||||
|
|
||||||
|
const sites = await server.services.appConnection.netlify.listSites(connectionId, req.permission, accountId);
|
||||||
|
|
||||||
|
return { sites };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
118
backend/src/server/routes/v1/event-router.ts
Normal file
118
backend/src/server/routes/v1/event-router.ts
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-floating-promises */
|
||||||
|
import { subject } from "@casl/ability";
|
||||||
|
import { pipeline } from "stream/promises";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { ActionProjectType, ProjectType } from "@app/db/schemas";
|
||||||
|
import { getServerSentEventsHeaders } from "@app/ee/services/event/event-sse-stream";
|
||||||
|
import { EventRegisterSchema } from "@app/ee/services/event/types";
|
||||||
|
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||||
|
import { BadRequestError, ForbiddenRequestError, RateLimitError } from "@app/lib/errors";
|
||||||
|
import { readLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
|
export const registerEventRouter = async (server: FastifyZodProvider) => {
|
||||||
|
server.route({
|
||||||
|
method: "POST",
|
||||||
|
url: "/subscribe/project-events",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
body: z.object({
|
||||||
|
projectId: z.string().trim(),
|
||||||
|
register: z.array(EventRegisterSchema).max(10)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req, reply) => {
|
||||||
|
try {
|
||||||
|
const { sse, permission, identityAccessToken, authToken, license } = req.server.services;
|
||||||
|
|
||||||
|
const plan = await license.getPlan(req.auth.orgId);
|
||||||
|
|
||||||
|
if (!plan.eventSubscriptions) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message:
|
||||||
|
"Failed to use event subscriptions due to plan restriction. Upgrade plan to access enterprise event subscriptions."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const count = await sse.getActiveConnectionsCount(req.body.projectId, req.permission.id);
|
||||||
|
|
||||||
|
if (count >= 5) {
|
||||||
|
throw new RateLimitError({
|
||||||
|
message: `Too many active connections for project ${req.body.projectId}. Please close some connections before opening a new one.`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const client = await sse.subscribe({
|
||||||
|
type: ProjectType.SecretManager,
|
||||||
|
registered: req.body.register,
|
||||||
|
async getAuthInfo() {
|
||||||
|
const ability = await permission.getProjectPermission({
|
||||||
|
actor: req.auth.actor,
|
||||||
|
projectId: req.body.projectId,
|
||||||
|
actionProjectType: ActionProjectType.Any,
|
||||||
|
actorAuthMethod: req.auth.authMethod,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorOrgId: req.permission.orgId
|
||||||
|
});
|
||||||
|
|
||||||
|
return { permission: ability.permission, actorId: req.permission.id, projectId: req.body.projectId };
|
||||||
|
},
|
||||||
|
async onAuthRefresh(info) {
|
||||||
|
switch (req.auth.authMode) {
|
||||||
|
case AuthMode.JWT:
|
||||||
|
await authToken.fnValidateJwtIdentity(req.auth.token);
|
||||||
|
break;
|
||||||
|
case AuthMode.IDENTITY_ACCESS_TOKEN:
|
||||||
|
await identityAccessToken.fnValidateIdentityAccessToken(req.auth.token, req.realIp);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error("Unsupported authentication method");
|
||||||
|
}
|
||||||
|
|
||||||
|
req.body.register.forEach((r) => {
|
||||||
|
const allowed = info.permission.can(
|
||||||
|
ProjectPermissionSecretActions.Subscribe,
|
||||||
|
subject(ProjectPermissionSub.Secrets, {
|
||||||
|
environment: r.conditions?.environmentSlug ?? "",
|
||||||
|
secretPath: r.conditions?.secretPath ?? "/",
|
||||||
|
eventType: r.event
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!allowed) {
|
||||||
|
throw new ForbiddenRequestError({
|
||||||
|
name: "PermissionDenied",
|
||||||
|
message: `You are not allowed to subscribe on secrets`,
|
||||||
|
details: {
|
||||||
|
event: r.event,
|
||||||
|
environmentSlug: r.conditions?.environmentSlug,
|
||||||
|
secretPath: r.conditions?.secretPath ?? "/"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Switches to manual response and enable SSE streaming
|
||||||
|
reply.hijack();
|
||||||
|
reply.raw.writeHead(200, getServerSentEventsHeaders()).flushHeaders();
|
||||||
|
reply.raw.on("close", client.abort);
|
||||||
|
|
||||||
|
await pipeline(client.stream, reply.raw, { signal: client.signal });
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof Error && error.name === "AbortError") {
|
||||||
|
// If the stream is aborted, we don't need to do anything
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
@@ -13,6 +13,7 @@ import { registerCaRouter } from "./certificate-authority-router";
|
|||||||
import { CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP } from "./certificate-authority-routers";
|
import { CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP } from "./certificate-authority-routers";
|
||||||
import { registerCertRouter } from "./certificate-router";
|
import { registerCertRouter } from "./certificate-router";
|
||||||
import { registerCertificateTemplateRouter } from "./certificate-template-router";
|
import { registerCertificateTemplateRouter } from "./certificate-template-router";
|
||||||
|
import { registerEventRouter } from "./event-router";
|
||||||
import { registerExternalGroupOrgRoleMappingRouter } from "./external-group-org-role-mapping-router";
|
import { registerExternalGroupOrgRoleMappingRouter } from "./external-group-org-role-mapping-router";
|
||||||
import { registerIdentityAccessTokenRouter } from "./identity-access-token-router";
|
import { registerIdentityAccessTokenRouter } from "./identity-access-token-router";
|
||||||
import { registerIdentityAliCloudAuthRouter } from "./identity-alicloud-auth-router";
|
import { registerIdentityAliCloudAuthRouter } from "./identity-alicloud-auth-router";
|
||||||
@@ -183,4 +184,6 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
|
|||||||
},
|
},
|
||||||
{ prefix: "/reminders" }
|
{ prefix: "/reminders" }
|
||||||
);
|
);
|
||||||
|
|
||||||
|
await server.register(registerEventRouter, { prefix: "/events" });
|
||||||
};
|
};
|
||||||
|
@@ -22,6 +22,7 @@ export const registerSecretReminderRouter = async (server: FastifyZodProvider) =
|
|||||||
message: z.string().trim().max(1024).optional(),
|
message: z.string().trim().max(1024).optional(),
|
||||||
repeatDays: z.number().min(1).nullable().optional(),
|
repeatDays: z.number().min(1).nullable().optional(),
|
||||||
nextReminderDate: z.string().datetime().nullable().optional(),
|
nextReminderDate: z.string().datetime().nullable().optional(),
|
||||||
|
fromDate: z.string().datetime().nullable().optional(),
|
||||||
recipients: z.string().array().optional()
|
recipients: z.string().array().optional()
|
||||||
})
|
})
|
||||||
.refine((data) => {
|
.refine((data) => {
|
||||||
@@ -45,6 +46,7 @@ export const registerSecretReminderRouter = async (server: FastifyZodProvider) =
|
|||||||
message: req.body.message,
|
message: req.body.message,
|
||||||
repeatDays: req.body.repeatDays,
|
repeatDays: req.body.repeatDays,
|
||||||
nextReminderDate: req.body.nextReminderDate,
|
nextReminderDate: req.body.nextReminderDate,
|
||||||
|
fromDate: req.body.fromDate,
|
||||||
recipients: req.body.recipients
|
recipients: req.body.recipients
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@@ -21,6 +21,7 @@ import { registerGitLabSyncRouter } from "./gitlab-sync-router";
|
|||||||
import { registerHCVaultSyncRouter } from "./hc-vault-sync-router";
|
import { registerHCVaultSyncRouter } from "./hc-vault-sync-router";
|
||||||
import { registerHerokuSyncRouter } from "./heroku-sync-router";
|
import { registerHerokuSyncRouter } from "./heroku-sync-router";
|
||||||
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
|
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
|
||||||
|
import { registerNetlifySyncRouter } from "./netlify-sync-router";
|
||||||
import { registerRailwaySyncRouter } from "./railway-sync-router";
|
import { registerRailwaySyncRouter } from "./railway-sync-router";
|
||||||
import { registerRenderSyncRouter } from "./render-sync-router";
|
import { registerRenderSyncRouter } from "./render-sync-router";
|
||||||
import { registerSupabaseSyncRouter } from "./supabase-sync-router";
|
import { registerSupabaseSyncRouter } from "./supabase-sync-router";
|
||||||
@@ -61,5 +62,6 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
|
|||||||
[SecretSync.Railway]: registerRailwaySyncRouter,
|
[SecretSync.Railway]: registerRailwaySyncRouter,
|
||||||
[SecretSync.Checkly]: registerChecklySyncRouter,
|
[SecretSync.Checkly]: registerChecklySyncRouter,
|
||||||
[SecretSync.DigitalOceanAppPlatform]: registerDigitalOceanAppPlatformSyncRouter,
|
[SecretSync.DigitalOceanAppPlatform]: registerDigitalOceanAppPlatformSyncRouter,
|
||||||
|
[SecretSync.Netlify]: registerNetlifySyncRouter,
|
||||||
[SecretSync.Bitbucket]: registerBitbucketSyncRouter
|
[SecretSync.Bitbucket]: registerBitbucketSyncRouter
|
||||||
};
|
};
|
||||||
|
@@ -0,0 +1,17 @@
|
|||||||
|
import {
|
||||||
|
CreateNetlifySyncSchema,
|
||||||
|
NetlifySyncSchema,
|
||||||
|
UpdateNetlifySyncSchema
|
||||||
|
} from "@app/services/secret-sync/netlify/netlify-sync-schemas";
|
||||||
|
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||||
|
|
||||||
|
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
|
||||||
|
|
||||||
|
export const registerNetlifySyncRouter = async (server: FastifyZodProvider) =>
|
||||||
|
registerSyncSecretsEndpoints({
|
||||||
|
destination: SecretSync.Netlify,
|
||||||
|
server,
|
||||||
|
responseSchema: NetlifySyncSchema,
|
||||||
|
createSchema: CreateNetlifySyncSchema,
|
||||||
|
updateSchema: UpdateNetlifySyncSchema
|
||||||
|
});
|
@@ -44,6 +44,7 @@ import { GitLabSyncListItemSchema, GitLabSyncSchema } from "@app/services/secret
|
|||||||
import { HCVaultSyncListItemSchema, HCVaultSyncSchema } from "@app/services/secret-sync/hc-vault";
|
import { HCVaultSyncListItemSchema, HCVaultSyncSchema } from "@app/services/secret-sync/hc-vault";
|
||||||
import { HerokuSyncListItemSchema, HerokuSyncSchema } from "@app/services/secret-sync/heroku";
|
import { HerokuSyncListItemSchema, HerokuSyncSchema } from "@app/services/secret-sync/heroku";
|
||||||
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
|
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
|
||||||
|
import { NetlifySyncListItemSchema, NetlifySyncSchema } from "@app/services/secret-sync/netlify";
|
||||||
import { RailwaySyncListItemSchema, RailwaySyncSchema } from "@app/services/secret-sync/railway/railway-sync-schemas";
|
import { RailwaySyncListItemSchema, RailwaySyncSchema } from "@app/services/secret-sync/railway/railway-sync-schemas";
|
||||||
import { RenderSyncListItemSchema, RenderSyncSchema } from "@app/services/secret-sync/render/render-sync-schemas";
|
import { RenderSyncListItemSchema, RenderSyncSchema } from "@app/services/secret-sync/render/render-sync-schemas";
|
||||||
import { SupabaseSyncListItemSchema, SupabaseSyncSchema } from "@app/services/secret-sync/supabase";
|
import { SupabaseSyncListItemSchema, SupabaseSyncSchema } from "@app/services/secret-sync/supabase";
|
||||||
@@ -82,6 +83,7 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
|
|||||||
RailwaySyncSchema,
|
RailwaySyncSchema,
|
||||||
ChecklySyncSchema,
|
ChecklySyncSchema,
|
||||||
DigitalOceanAppPlatformSyncSchema,
|
DigitalOceanAppPlatformSyncSchema,
|
||||||
|
NetlifySyncSchema,
|
||||||
BitbucketSyncSchema
|
BitbucketSyncSchema
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@@ -114,6 +116,7 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
|||||||
RailwaySyncListItemSchema,
|
RailwaySyncListItemSchema,
|
||||||
ChecklySyncListItemSchema,
|
ChecklySyncListItemSchema,
|
||||||
SupabaseSyncListItemSchema,
|
SupabaseSyncListItemSchema,
|
||||||
|
NetlifySyncListItemSchema,
|
||||||
BitbucketSyncListItemSchema
|
BitbucketSyncListItemSchema
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@@ -264,6 +264,48 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "DELETE",
|
||||||
|
url: "/:organizationId/memberships",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
hide: false,
|
||||||
|
tags: [ApiDocsTags.Organizations],
|
||||||
|
description: "Bulk delete organization user memberships",
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
params: z.object({
|
||||||
|
organizationId: z.string().trim().describe(ORGANIZATIONS.BULK_DELETE_USER_MEMBERSHIPS.organizationId)
|
||||||
|
}),
|
||||||
|
body: z.object({
|
||||||
|
membershipIds: z.string().trim().array().describe(ORGANIZATIONS.BULK_DELETE_USER_MEMBERSHIPS.membershipIds)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
memberships: OrgMembershipsSchema.array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
if (req.auth.actor !== ActorType.USER) return;
|
||||||
|
|
||||||
|
const memberships = await server.services.org.bulkDeleteOrgMemberships({
|
||||||
|
userId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
orgId: req.params.organizationId,
|
||||||
|
membershipIds: req.body.membershipIds,
|
||||||
|
actorOrgId: req.permission.orgId
|
||||||
|
});
|
||||||
|
return { memberships };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
server.route({
|
server.route({
|
||||||
// TODO: re-think endpoint structure in future so users only need to pass in membershipId bc organizationId is redundant
|
// TODO: re-think endpoint structure in future so users only need to pass in membershipId bc organizationId is redundant
|
||||||
method: "GET",
|
method: "GET",
|
||||||
|
@@ -34,6 +34,7 @@ export enum AppConnection {
|
|||||||
Checkly = "checkly",
|
Checkly = "checkly",
|
||||||
Supabase = "supabase",
|
Supabase = "supabase",
|
||||||
DigitalOcean = "digital-ocean",
|
DigitalOcean = "digital-ocean",
|
||||||
|
Netlify = "netlify",
|
||||||
Okta = "okta"
|
Okta = "okta"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -97,6 +97,7 @@ import { getLdapConnectionListItem, LdapConnectionMethod, validateLdapConnection
|
|||||||
import { getMsSqlConnectionListItem, MsSqlConnectionMethod } from "./mssql";
|
import { getMsSqlConnectionListItem, MsSqlConnectionMethod } from "./mssql";
|
||||||
import { MySqlConnectionMethod } from "./mysql/mysql-connection-enums";
|
import { MySqlConnectionMethod } from "./mysql/mysql-connection-enums";
|
||||||
import { getMySqlConnectionListItem } from "./mysql/mysql-connection-fns";
|
import { getMySqlConnectionListItem } from "./mysql/mysql-connection-fns";
|
||||||
|
import { getNetlifyConnectionListItem, validateNetlifyConnectionCredentials } from "./netlify";
|
||||||
import { getOktaConnectionListItem, OktaConnectionMethod, validateOktaConnectionCredentials } from "./okta";
|
import { getOktaConnectionListItem, OktaConnectionMethod, validateOktaConnectionCredentials } from "./okta";
|
||||||
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
|
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
|
||||||
import { getRailwayConnectionListItem, validateRailwayConnectionCredentials } from "./railway";
|
import { getRailwayConnectionListItem, validateRailwayConnectionCredentials } from "./railway";
|
||||||
@@ -163,6 +164,7 @@ export const listAppConnectionOptions = () => {
|
|||||||
getChecklyConnectionListItem(),
|
getChecklyConnectionListItem(),
|
||||||
getSupabaseConnectionListItem(),
|
getSupabaseConnectionListItem(),
|
||||||
getDigitalOceanConnectionListItem(),
|
getDigitalOceanConnectionListItem(),
|
||||||
|
getNetlifyConnectionListItem(),
|
||||||
getOktaConnectionListItem()
|
getOktaConnectionListItem()
|
||||||
].sort((a, b) => a.name.localeCompare(b.name));
|
].sort((a, b) => a.name.localeCompare(b.name));
|
||||||
};
|
};
|
||||||
@@ -251,7 +253,8 @@ export const validateAppConnectionCredentials = async (
|
|||||||
[AppConnection.Checkly]: validateChecklyConnectionCredentials as TAppConnectionCredentialsValidator,
|
[AppConnection.Checkly]: validateChecklyConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||||
[AppConnection.Supabase]: validateSupabaseConnectionCredentials as TAppConnectionCredentialsValidator,
|
[AppConnection.Supabase]: validateSupabaseConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||||
[AppConnection.DigitalOcean]: validateDigitalOceanConnectionCredentials as TAppConnectionCredentialsValidator,
|
[AppConnection.DigitalOcean]: validateDigitalOceanConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||||
[AppConnection.Okta]: validateOktaConnectionCredentials as TAppConnectionCredentialsValidator
|
[AppConnection.Okta]: validateOktaConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||||
|
[AppConnection.Netlify]: validateNetlifyConnectionCredentials as TAppConnectionCredentialsValidator
|
||||||
};
|
};
|
||||||
|
|
||||||
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection, gatewayService);
|
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection, gatewayService);
|
||||||
@@ -381,6 +384,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
|
|||||||
[AppConnection.Checkly]: platformManagedCredentialsNotSupported,
|
[AppConnection.Checkly]: platformManagedCredentialsNotSupported,
|
||||||
[AppConnection.Supabase]: platformManagedCredentialsNotSupported,
|
[AppConnection.Supabase]: platformManagedCredentialsNotSupported,
|
||||||
[AppConnection.DigitalOcean]: platformManagedCredentialsNotSupported,
|
[AppConnection.DigitalOcean]: platformManagedCredentialsNotSupported,
|
||||||
|
[AppConnection.Netlify]: platformManagedCredentialsNotSupported,
|
||||||
[AppConnection.Okta]: platformManagedCredentialsNotSupported
|
[AppConnection.Okta]: platformManagedCredentialsNotSupported
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@@ -36,6 +36,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
|||||||
[AppConnection.Checkly]: "Checkly",
|
[AppConnection.Checkly]: "Checkly",
|
||||||
[AppConnection.Supabase]: "Supabase",
|
[AppConnection.Supabase]: "Supabase",
|
||||||
[AppConnection.DigitalOcean]: "DigitalOcean App Platform",
|
[AppConnection.DigitalOcean]: "DigitalOcean App Platform",
|
||||||
|
[AppConnection.Netlify]: "Netlify",
|
||||||
[AppConnection.Okta]: "Okta"
|
[AppConnection.Okta]: "Okta"
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -75,5 +76,6 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
|
|||||||
[AppConnection.Checkly]: AppConnectionPlanType.Regular,
|
[AppConnection.Checkly]: AppConnectionPlanType.Regular,
|
||||||
[AppConnection.Supabase]: AppConnectionPlanType.Regular,
|
[AppConnection.Supabase]: AppConnectionPlanType.Regular,
|
||||||
[AppConnection.DigitalOcean]: AppConnectionPlanType.Regular,
|
[AppConnection.DigitalOcean]: AppConnectionPlanType.Regular,
|
||||||
|
[AppConnection.Netlify]: AppConnectionPlanType.Regular,
|
||||||
[AppConnection.Okta]: AppConnectionPlanType.Regular
|
[AppConnection.Okta]: AppConnectionPlanType.Regular
|
||||||
};
|
};
|
||||||
|
@@ -81,6 +81,8 @@ import { humanitecConnectionService } from "./humanitec/humanitec-connection-ser
|
|||||||
import { ValidateLdapConnectionCredentialsSchema } from "./ldap";
|
import { ValidateLdapConnectionCredentialsSchema } from "./ldap";
|
||||||
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
|
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
|
||||||
import { ValidateMySqlConnectionCredentialsSchema } from "./mysql";
|
import { ValidateMySqlConnectionCredentialsSchema } from "./mysql";
|
||||||
|
import { ValidateNetlifyConnectionCredentialsSchema } from "./netlify";
|
||||||
|
import { netlifyConnectionService } from "./netlify/netlify-connection-service";
|
||||||
import { ValidateOktaConnectionCredentialsSchema } from "./okta";
|
import { ValidateOktaConnectionCredentialsSchema } from "./okta";
|
||||||
import { oktaConnectionService } from "./okta/okta-connection-service";
|
import { oktaConnectionService } from "./okta/okta-connection-service";
|
||||||
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
|
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
|
||||||
@@ -148,6 +150,7 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
|
|||||||
[AppConnection.Checkly]: ValidateChecklyConnectionCredentialsSchema,
|
[AppConnection.Checkly]: ValidateChecklyConnectionCredentialsSchema,
|
||||||
[AppConnection.Supabase]: ValidateSupabaseConnectionCredentialsSchema,
|
[AppConnection.Supabase]: ValidateSupabaseConnectionCredentialsSchema,
|
||||||
[AppConnection.DigitalOcean]: ValidateDigitalOceanConnectionCredentialsSchema,
|
[AppConnection.DigitalOcean]: ValidateDigitalOceanConnectionCredentialsSchema,
|
||||||
|
[AppConnection.Netlify]: ValidateNetlifyConnectionCredentialsSchema,
|
||||||
[AppConnection.Okta]: ValidateOktaConnectionCredentialsSchema
|
[AppConnection.Okta]: ValidateOktaConnectionCredentialsSchema
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -611,6 +614,7 @@ export const appConnectionServiceFactory = ({
|
|||||||
checkly: checklyConnectionService(connectAppConnectionById),
|
checkly: checklyConnectionService(connectAppConnectionById),
|
||||||
supabase: supabaseConnectionService(connectAppConnectionById),
|
supabase: supabaseConnectionService(connectAppConnectionById),
|
||||||
digitalOcean: digitalOceanAppPlatformConnectionService(connectAppConnectionById),
|
digitalOcean: digitalOceanAppPlatformConnectionService(connectAppConnectionById),
|
||||||
|
netlify: netlifyConnectionService(connectAppConnectionById),
|
||||||
okta: oktaConnectionService(connectAppConnectionById)
|
okta: oktaConnectionService(connectAppConnectionById)
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@@ -149,6 +149,12 @@ import {
|
|||||||
} from "./ldap";
|
} from "./ldap";
|
||||||
import { TMsSqlConnection, TMsSqlConnectionInput, TValidateMsSqlConnectionCredentialsSchema } from "./mssql";
|
import { TMsSqlConnection, TMsSqlConnectionInput, TValidateMsSqlConnectionCredentialsSchema } from "./mssql";
|
||||||
import { TMySqlConnection, TMySqlConnectionInput, TValidateMySqlConnectionCredentialsSchema } from "./mysql";
|
import { TMySqlConnection, TMySqlConnectionInput, TValidateMySqlConnectionCredentialsSchema } from "./mysql";
|
||||||
|
import {
|
||||||
|
TNetlifyConnection,
|
||||||
|
TNetlifyConnectionConfig,
|
||||||
|
TNetlifyConnectionInput,
|
||||||
|
TValidateNetlifyConnectionCredentialsSchema
|
||||||
|
} from "./netlify";
|
||||||
import {
|
import {
|
||||||
TOktaConnection,
|
TOktaConnection,
|
||||||
TOktaConnectionConfig,
|
TOktaConnectionConfig,
|
||||||
@@ -245,6 +251,7 @@ export type TAppConnection = { id: string } & (
|
|||||||
| TChecklyConnection
|
| TChecklyConnection
|
||||||
| TSupabaseConnection
|
| TSupabaseConnection
|
||||||
| TDigitalOceanConnection
|
| TDigitalOceanConnection
|
||||||
|
| TNetlifyConnection
|
||||||
| TOktaConnection
|
| TOktaConnection
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -288,6 +295,7 @@ export type TAppConnectionInput = { id: string } & (
|
|||||||
| TChecklyConnectionInput
|
| TChecklyConnectionInput
|
||||||
| TSupabaseConnectionInput
|
| TSupabaseConnectionInput
|
||||||
| TDigitalOceanConnectionInput
|
| TDigitalOceanConnectionInput
|
||||||
|
| TNetlifyConnectionInput
|
||||||
| TOktaConnectionInput
|
| TOktaConnectionInput
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -339,6 +347,7 @@ export type TAppConnectionConfig =
|
|||||||
| TChecklyConnectionConfig
|
| TChecklyConnectionConfig
|
||||||
| TSupabaseConnectionConfig
|
| TSupabaseConnectionConfig
|
||||||
| TDigitalOceanConnectionConfig
|
| TDigitalOceanConnectionConfig
|
||||||
|
| TNetlifyConnectionConfig
|
||||||
| TOktaConnectionConfig;
|
| TOktaConnectionConfig;
|
||||||
|
|
||||||
export type TValidateAppConnectionCredentialsSchema =
|
export type TValidateAppConnectionCredentialsSchema =
|
||||||
@@ -377,6 +386,7 @@ export type TValidateAppConnectionCredentialsSchema =
|
|||||||
| TValidateChecklyConnectionCredentialsSchema
|
| TValidateChecklyConnectionCredentialsSchema
|
||||||
| TValidateSupabaseConnectionCredentialsSchema
|
| TValidateSupabaseConnectionCredentialsSchema
|
||||||
| TValidateDigitalOceanCredentialsSchema
|
| TValidateDigitalOceanCredentialsSchema
|
||||||
|
| TValidateNetlifyConnectionCredentialsSchema
|
||||||
| TValidateOktaConnectionCredentialsSchema;
|
| TValidateOktaConnectionCredentialsSchema;
|
||||||
|
|
||||||
export type TListAwsConnectionKmsKeys = {
|
export type TListAwsConnectionKmsKeys = {
|
||||||
|
@@ -1,3 +1,4 @@
|
|||||||
export enum AzureAppConfigurationConnectionMethod {
|
export enum AzureAppConfigurationConnectionMethod {
|
||||||
OAuth = "oauth"
|
OAuth = "oauth",
|
||||||
|
ClientSecret = "client-secret"
|
||||||
}
|
}
|
||||||
|
@@ -1,3 +1,4 @@
|
|||||||
|
/* eslint-disable no-case-declarations */
|
||||||
import { AxiosError, AxiosResponse } from "axios";
|
import { AxiosError, AxiosResponse } from "axios";
|
||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
@@ -19,7 +20,10 @@ export const getAzureAppConfigurationConnectionListItem = () => {
|
|||||||
return {
|
return {
|
||||||
name: "Azure App Configuration" as const,
|
name: "Azure App Configuration" as const,
|
||||||
app: AppConnection.AzureAppConfiguration as const,
|
app: AppConnection.AzureAppConfiguration as const,
|
||||||
methods: Object.values(AzureAppConfigurationConnectionMethod) as [AzureAppConfigurationConnectionMethod.OAuth],
|
methods: Object.values(AzureAppConfigurationConnectionMethod) as [
|
||||||
|
AzureAppConfigurationConnectionMethod.OAuth,
|
||||||
|
AzureAppConfigurationConnectionMethod.ClientSecret
|
||||||
|
],
|
||||||
oauthClientId: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID
|
oauthClientId: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@@ -35,71 +39,111 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
|
|||||||
SITE_URL
|
SITE_URL
|
||||||
} = getConfig();
|
} = getConfig();
|
||||||
|
|
||||||
if (
|
|
||||||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID ||
|
|
||||||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET
|
|
||||||
) {
|
|
||||||
throw new InternalServerError({
|
|
||||||
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
|
|
||||||
let tokenError: AxiosError | null = null;
|
|
||||||
|
|
||||||
try {
|
|
||||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
|
||||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
|
|
||||||
new URLSearchParams({
|
|
||||||
grant_type: "authorization_code",
|
|
||||||
code: inputCredentials.code,
|
|
||||||
scope: `openid offline_access https://azconfig.io/.default`,
|
|
||||||
client_id: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
|
|
||||||
client_secret: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
|
|
||||||
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
|
||||||
})
|
|
||||||
);
|
|
||||||
} catch (e: unknown) {
|
|
||||||
if (e instanceof AxiosError) {
|
|
||||||
tokenError = e;
|
|
||||||
} else {
|
|
||||||
throw new BadRequestError({
|
|
||||||
message: `Unable to validate connection: verify credentials`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (tokenError) {
|
|
||||||
if (tokenError instanceof AxiosError) {
|
|
||||||
throw new BadRequestError({
|
|
||||||
message: `Failed to get access token: ${
|
|
||||||
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
|
||||||
}`
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
throw new InternalServerError({
|
|
||||||
message: "Failed to get access token"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tokenResp) {
|
|
||||||
throw new InternalServerError({
|
|
||||||
message: `Failed to get access token: Token was empty with no error`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (method) {
|
switch (method) {
|
||||||
case AzureAppConfigurationConnectionMethod.OAuth:
|
case AzureAppConfigurationConnectionMethod.OAuth:
|
||||||
|
if (
|
||||||
|
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID ||
|
||||||
|
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET
|
||||||
|
) {
|
||||||
|
throw new InternalServerError({
|
||||||
|
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
|
||||||
|
let tokenError: AxiosError | null = null;
|
||||||
|
const oauthCredentials = inputCredentials as { code: string; tenantId?: string };
|
||||||
|
try {
|
||||||
|
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||||
|
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
||||||
|
new URLSearchParams({
|
||||||
|
grant_type: "authorization_code",
|
||||||
|
code: oauthCredentials.code,
|
||||||
|
scope: `openid offline_access https://azconfig.io/.default`,
|
||||||
|
client_id: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
|
||||||
|
client_secret: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
|
||||||
|
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
||||||
|
})
|
||||||
|
);
|
||||||
|
} catch (e: unknown) {
|
||||||
|
if (e instanceof AxiosError) {
|
||||||
|
tokenError = e;
|
||||||
|
} else {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Unable to validate connection: verify credentials`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tokenError) {
|
||||||
|
if (tokenError instanceof AxiosError) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to get access token: ${
|
||||||
|
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||||
|
}`
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
throw new InternalServerError({
|
||||||
|
message: "Failed to get access token"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!tokenResp) {
|
||||||
|
throw new InternalServerError({
|
||||||
|
message: `Failed to get access token: Token was empty with no error`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
tenantId: inputCredentials.tenantId,
|
tenantId: oauthCredentials.tenantId,
|
||||||
accessToken: tokenResp.data.access_token,
|
accessToken: tokenResp.data.access_token,
|
||||||
refreshToken: tokenResp.data.refresh_token,
|
refreshToken: tokenResp.data.refresh_token,
|
||||||
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
|
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
|
||||||
};
|
};
|
||||||
|
|
||||||
|
case AzureAppConfigurationConnectionMethod.ClientSecret:
|
||||||
|
const { tenantId, clientId, clientSecret } = inputCredentials as {
|
||||||
|
tenantId: string;
|
||||||
|
clientId: string;
|
||||||
|
clientSecret: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||||
|
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||||
|
new URLSearchParams({
|
||||||
|
grant_type: "client_credentials",
|
||||||
|
scope: `https://azconfig.io/.default`,
|
||||||
|
client_id: clientId,
|
||||||
|
client_secret: clientSecret
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
tenantId,
|
||||||
|
accessToken: clientData.access_token,
|
||||||
|
expiresAt: Date.now() + clientData.expires_in * 1000,
|
||||||
|
clientId,
|
||||||
|
clientSecret
|
||||||
|
};
|
||||||
|
} catch (e: unknown) {
|
||||||
|
if (e instanceof AxiosError) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to get access token: ${
|
||||||
|
(e?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||||
|
}`
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
throw new InternalServerError({
|
||||||
|
message: "Failed to get access token"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
throw new InternalServerError({
|
throw new InternalServerError({
|
||||||
message: `Unhandled Azure connection method: ${method as AzureAppConfigurationConnectionMethod}`
|
message: `Unhandled Azure App Configuration connection method: ${method as AzureAppConfigurationConnectionMethod}`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@@ -22,6 +22,29 @@ export const AzureAppConfigurationConnectionOAuthOutputCredentialsSchema = z.obj
|
|||||||
expiresAt: z.number()
|
expiresAt: z.number()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const AzureAppConfigurationConnectionClientSecretInputCredentialsSchema = z.object({
|
||||||
|
clientId: z
|
||||||
|
.string()
|
||||||
|
.uuid()
|
||||||
|
.trim()
|
||||||
|
.min(1, "Client ID required")
|
||||||
|
.max(50, "Client ID must be at most 50 characters long"),
|
||||||
|
clientSecret: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1, "Client Secret required")
|
||||||
|
.max(50, "Client Secret must be at most 50 characters long"),
|
||||||
|
tenantId: z.string().uuid().trim().min(1, "Tenant ID required")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema = z.object({
|
||||||
|
clientId: z.string(),
|
||||||
|
clientSecret: z.string(),
|
||||||
|
tenantId: z.string(),
|
||||||
|
accessToken: z.string(),
|
||||||
|
expiresAt: z.number()
|
||||||
|
});
|
||||||
|
|
||||||
export const ValidateAzureAppConfigurationConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
export const ValidateAzureAppConfigurationConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||||
z.object({
|
z.object({
|
||||||
method: z
|
method: z
|
||||||
@@ -30,6 +53,14 @@ export const ValidateAzureAppConfigurationConnectionCredentialsSchema = z.discri
|
|||||||
credentials: AzureAppConfigurationConnectionOAuthInputCredentialsSchema.describe(
|
credentials: AzureAppConfigurationConnectionOAuthInputCredentialsSchema.describe(
|
||||||
AppConnections.CREATE(AppConnection.AzureAppConfiguration).credentials
|
AppConnections.CREATE(AppConnection.AzureAppConfiguration).credentials
|
||||||
)
|
)
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
method: z
|
||||||
|
.literal(AzureAppConfigurationConnectionMethod.ClientSecret)
|
||||||
|
.describe(AppConnections.CREATE(AppConnection.AzureAppConfiguration).method),
|
||||||
|
credentials: AzureAppConfigurationConnectionClientSecretInputCredentialsSchema.describe(
|
||||||
|
AppConnections.CREATE(AppConnection.AzureAppConfiguration).credentials
|
||||||
|
)
|
||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@@ -39,9 +70,13 @@ export const CreateAzureAppConfigurationConnectionSchema = ValidateAzureAppConfi
|
|||||||
|
|
||||||
export const UpdateAzureAppConfigurationConnectionSchema = z
|
export const UpdateAzureAppConfigurationConnectionSchema = z
|
||||||
.object({
|
.object({
|
||||||
credentials: AzureAppConfigurationConnectionOAuthInputCredentialsSchema.optional().describe(
|
credentials: z
|
||||||
AppConnections.UPDATE(AppConnection.AzureAppConfiguration).credentials
|
.union([
|
||||||
)
|
AzureAppConfigurationConnectionOAuthInputCredentialsSchema,
|
||||||
|
AzureAppConfigurationConnectionClientSecretInputCredentialsSchema
|
||||||
|
])
|
||||||
|
.optional()
|
||||||
|
.describe(AppConnections.UPDATE(AppConnection.AzureAppConfiguration).credentials)
|
||||||
})
|
})
|
||||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureAppConfiguration));
|
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureAppConfiguration));
|
||||||
|
|
||||||
@@ -55,6 +90,10 @@ export const AzureAppConfigurationConnectionSchema = z.intersection(
|
|||||||
z.object({
|
z.object({
|
||||||
method: z.literal(AzureAppConfigurationConnectionMethod.OAuth),
|
method: z.literal(AzureAppConfigurationConnectionMethod.OAuth),
|
||||||
credentials: AzureAppConfigurationConnectionOAuthOutputCredentialsSchema
|
credentials: AzureAppConfigurationConnectionOAuthOutputCredentialsSchema
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
method: z.literal(AzureAppConfigurationConnectionMethod.ClientSecret),
|
||||||
|
credentials: AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema
|
||||||
})
|
})
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
@@ -65,6 +104,13 @@ export const SanitizedAzureAppConfigurationConnectionSchema = z.discriminatedUni
|
|||||||
credentials: AzureAppConfigurationConnectionOAuthOutputCredentialsSchema.pick({
|
credentials: AzureAppConfigurationConnectionOAuthOutputCredentialsSchema.pick({
|
||||||
tenantId: true
|
tenantId: true
|
||||||
})
|
})
|
||||||
|
}),
|
||||||
|
BaseAzureAppConfigurationConnectionSchema.extend({
|
||||||
|
method: z.literal(AzureAppConfigurationConnectionMethod.ClientSecret),
|
||||||
|
credentials: AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema.pick({
|
||||||
|
clientId: true,
|
||||||
|
tenantId: true
|
||||||
|
})
|
||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
|
|||||||
|
|
||||||
import { AppConnection } from "../app-connection-enums";
|
import { AppConnection } from "../app-connection-enums";
|
||||||
import {
|
import {
|
||||||
|
AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema,
|
||||||
AzureAppConfigurationConnectionOAuthOutputCredentialsSchema,
|
AzureAppConfigurationConnectionOAuthOutputCredentialsSchema,
|
||||||
AzureAppConfigurationConnectionSchema,
|
AzureAppConfigurationConnectionSchema,
|
||||||
CreateAzureAppConfigurationConnectionSchema,
|
CreateAzureAppConfigurationConnectionSchema,
|
||||||
@@ -39,3 +40,7 @@ export type ExchangeCodeAzureResponse = {
|
|||||||
export type TAzureAppConfigurationConnectionCredentials = z.infer<
|
export type TAzureAppConfigurationConnectionCredentials = z.infer<
|
||||||
typeof AzureAppConfigurationConnectionOAuthOutputCredentialsSchema
|
typeof AzureAppConfigurationConnectionOAuthOutputCredentialsSchema
|
||||||
>;
|
>;
|
||||||
|
|
||||||
|
export type TAzureAppConfigurationConnectionClientSecretCredentials = z.infer<
|
||||||
|
typeof AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema
|
||||||
|
>;
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
export enum AzureDevOpsConnectionMethod {
|
export enum AzureDevOpsConnectionMethod {
|
||||||
OAuth = "oauth",
|
OAuth = "oauth",
|
||||||
AccessToken = "access-token"
|
AccessToken = "access-token",
|
||||||
|
ClientSecret = "client-secret"
|
||||||
}
|
}
|
||||||
|
@@ -18,6 +18,7 @@ import { AppConnection } from "../app-connection-enums";
|
|||||||
import { AzureDevOpsConnectionMethod } from "./azure-devops-enums";
|
import { AzureDevOpsConnectionMethod } from "./azure-devops-enums";
|
||||||
import {
|
import {
|
||||||
ExchangeCodeAzureResponse,
|
ExchangeCodeAzureResponse,
|
||||||
|
TAzureDevOpsConnectionClientSecretCredentials,
|
||||||
TAzureDevOpsConnectionConfig,
|
TAzureDevOpsConnectionConfig,
|
||||||
TAzureDevOpsConnectionCredentials
|
TAzureDevOpsConnectionCredentials
|
||||||
} from "./azure-devops-types";
|
} from "./azure-devops-types";
|
||||||
@@ -30,7 +31,8 @@ export const getAzureDevopsConnectionListItem = () => {
|
|||||||
app: AppConnection.AzureDevOps as const,
|
app: AppConnection.AzureDevOps as const,
|
||||||
methods: Object.values(AzureDevOpsConnectionMethod) as [
|
methods: Object.values(AzureDevOpsConnectionMethod) as [
|
||||||
AzureDevOpsConnectionMethod.OAuth,
|
AzureDevOpsConnectionMethod.OAuth,
|
||||||
AzureDevOpsConnectionMethod.AccessToken
|
AzureDevOpsConnectionMethod.AccessToken,
|
||||||
|
AzureDevOpsConnectionMethod.ClientSecret
|
||||||
],
|
],
|
||||||
oauthClientId: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID
|
oauthClientId: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID
|
||||||
};
|
};
|
||||||
@@ -53,11 +55,7 @@ export const getAzureDevopsConnection = async (
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const credentials = (await decryptAppConnectionCredentials({
|
const currentTime = Date.now();
|
||||||
orgId: appConnection.orgId,
|
|
||||||
kmsService,
|
|
||||||
encryptedCredentials: appConnection.encryptedCredentials
|
|
||||||
})) as TAzureDevOpsConnectionCredentials;
|
|
||||||
|
|
||||||
// Handle different connection methods
|
// Handle different connection methods
|
||||||
switch (appConnection.method) {
|
switch (appConnection.method) {
|
||||||
@@ -69,12 +67,17 @@ export const getAzureDevopsConnection = async (
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!("refreshToken" in credentials)) {
|
const oauthCredentials = (await decryptAppConnectionCredentials({
|
||||||
|
orgId: appConnection.orgId,
|
||||||
|
kmsService,
|
||||||
|
encryptedCredentials: appConnection.encryptedCredentials
|
||||||
|
})) as TAzureDevOpsConnectionCredentials;
|
||||||
|
|
||||||
|
if (!("refreshToken" in oauthCredentials)) {
|
||||||
throw new BadRequestError({ message: "Invalid OAuth credentials" });
|
throw new BadRequestError({ message: "Invalid OAuth credentials" });
|
||||||
}
|
}
|
||||||
|
|
||||||
const { refreshToken, tenantId } = credentials;
|
const { refreshToken, tenantId } = oauthCredentials;
|
||||||
const currentTime = Date.now();
|
|
||||||
|
|
||||||
const { data } = await request.post<ExchangeCodeAzureResponse>(
|
const { data } = await request.post<ExchangeCodeAzureResponse>(
|
||||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||||
@@ -87,29 +90,75 @@ export const getAzureDevopsConnection = async (
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
const updatedCredentials = {
|
const updatedOAuthCredentials = {
|
||||||
...credentials,
|
...oauthCredentials,
|
||||||
accessToken: data.access_token,
|
accessToken: data.access_token,
|
||||||
expiresAt: currentTime + data.expires_in * 1000,
|
expiresAt: currentTime + data.expires_in * 1000,
|
||||||
refreshToken: data.refresh_token
|
refreshToken: data.refresh_token
|
||||||
};
|
};
|
||||||
|
|
||||||
const encryptedCredentials = await encryptAppConnectionCredentials({
|
const encryptedOAuthCredentials = await encryptAppConnectionCredentials({
|
||||||
credentials: updatedCredentials,
|
credentials: updatedOAuthCredentials,
|
||||||
orgId: appConnection.orgId,
|
orgId: appConnection.orgId,
|
||||||
kmsService
|
kmsService
|
||||||
});
|
});
|
||||||
|
|
||||||
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials });
|
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedOAuthCredentials });
|
||||||
|
|
||||||
return data.access_token;
|
return data.access_token;
|
||||||
|
|
||||||
case AzureDevOpsConnectionMethod.AccessToken:
|
case AzureDevOpsConnectionMethod.AccessToken:
|
||||||
if (!("accessToken" in credentials)) {
|
const accessTokenCredentials = (await decryptAppConnectionCredentials({
|
||||||
|
orgId: appConnection.orgId,
|
||||||
|
kmsService,
|
||||||
|
encryptedCredentials: appConnection.encryptedCredentials
|
||||||
|
})) as { accessToken: string };
|
||||||
|
|
||||||
|
if (!("accessToken" in accessTokenCredentials)) {
|
||||||
throw new BadRequestError({ message: "Invalid API token credentials" });
|
throw new BadRequestError({ message: "Invalid API token credentials" });
|
||||||
}
|
}
|
||||||
// For access token, return the basic auth token directly
|
// For access token, return the basic auth token directly
|
||||||
return credentials.accessToken;
|
return accessTokenCredentials.accessToken;
|
||||||
|
|
||||||
|
case AzureDevOpsConnectionMethod.ClientSecret:
|
||||||
|
const clientSecretCredentials = (await decryptAppConnectionCredentials({
|
||||||
|
orgId: appConnection.orgId,
|
||||||
|
kmsService,
|
||||||
|
encryptedCredentials: appConnection.encryptedCredentials
|
||||||
|
})) as TAzureDevOpsConnectionClientSecretCredentials;
|
||||||
|
|
||||||
|
const { accessToken, expiresAt, clientId, clientSecret, tenantId: clientTenantId } = clientSecretCredentials;
|
||||||
|
|
||||||
|
// Check if token is still valid (with 5 minute buffer)
|
||||||
|
if (accessToken && expiresAt && expiresAt > currentTime + 300000) {
|
||||||
|
return accessToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||||
|
IntegrationUrls.AZURE_TOKEN_URL.replace("common", clientTenantId || "common"),
|
||||||
|
new URLSearchParams({
|
||||||
|
grant_type: "client_credentials",
|
||||||
|
scope: `https://app.vssps.visualstudio.com/.default`,
|
||||||
|
client_id: clientId,
|
||||||
|
client_secret: clientSecret
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
const updatedClientCredentials = {
|
||||||
|
...clientSecretCredentials,
|
||||||
|
accessToken: clientData.access_token,
|
||||||
|
expiresAt: currentTime + clientData.expires_in * 1000
|
||||||
|
};
|
||||||
|
|
||||||
|
const encryptedClientCredentials = await encryptAppConnectionCredentials({
|
||||||
|
credentials: updatedClientCredentials,
|
||||||
|
orgId: appConnection.orgId,
|
||||||
|
kmsService
|
||||||
|
});
|
||||||
|
|
||||||
|
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedClientCredentials });
|
||||||
|
|
||||||
|
return clientData.access_token;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
throw new BadRequestError({ message: `Unsupported connection method` });
|
throw new BadRequestError({ message: `Unsupported connection method` });
|
||||||
@@ -138,7 +187,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
|
|||||||
let tokenError: AxiosError | null = null;
|
let tokenError: AxiosError | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const oauthCredentials = inputCredentials as { code: string; tenantId: string };
|
const oauthCredentials = inputCredentials as { code: string; tenantId: string; orgName: string };
|
||||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
||||||
new URLSearchParams({
|
new URLSearchParams({
|
||||||
@@ -262,9 +311,67 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
case AzureDevOpsConnectionMethod.ClientSecret:
|
||||||
|
const { tenantId, clientId, clientSecret, orgName } = inputCredentials as {
|
||||||
|
tenantId: string;
|
||||||
|
clientId: string;
|
||||||
|
clientSecret: string;
|
||||||
|
orgName: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
// First, get the access token using client credentials flow
|
||||||
|
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||||
|
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||||
|
new URLSearchParams({
|
||||||
|
grant_type: "client_credentials",
|
||||||
|
scope: `https://app.vssps.visualstudio.com/.default`,
|
||||||
|
client_id: clientId,
|
||||||
|
client_secret: clientSecret
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
// Validate access to the specific organization
|
||||||
|
const response = await request.get(
|
||||||
|
`${IntegrationUrls.AZURE_DEVOPS_API_URL}/${encodeURIComponent(orgName)}/_apis/projects?api-version=7.2-preview.2&$top=1`,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${clientData.access_token}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (response.status !== 200) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to validate connection to organization '${orgName}': ${response.status}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
tenantId,
|
||||||
|
clientId,
|
||||||
|
clientSecret,
|
||||||
|
orgName,
|
||||||
|
accessToken: clientData.access_token,
|
||||||
|
expiresAt: Date.now() + clientData.expires_in * 1000
|
||||||
|
};
|
||||||
|
} catch (e: unknown) {
|
||||||
|
if (e instanceof AxiosError) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to authenticate with Azure DevOps using client credentials: ${
|
||||||
|
(e?.response?.data as { error_description?: string })?.error_description || e.message
|
||||||
|
}`
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
throw new InternalServerError({
|
||||||
|
message: "Failed to validate Azure DevOps client credentials"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
throw new InternalServerError({
|
throw new InternalServerError({
|
||||||
message: `Unhandled Azure connection method: ${method as AzureDevOpsConnectionMethod}`
|
message: `Unhandled Azure DevOps connection method: ${method as AzureDevOpsConnectionMethod}`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@@ -38,6 +38,42 @@ export const AzureDevOpsConnectionAccessTokenOutputCredentialsSchema = z.object(
|
|||||||
orgName: z.string()
|
orgName: z.string()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const AzureDevOpsConnectionClientSecretInputCredentialsSchema = z.object({
|
||||||
|
clientId: z
|
||||||
|
.string()
|
||||||
|
.uuid()
|
||||||
|
.trim()
|
||||||
|
.min(1, "Client ID required")
|
||||||
|
.max(50, "Client ID must be at most 50 characters long")
|
||||||
|
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.clientId),
|
||||||
|
clientSecret: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1, "Client Secret required")
|
||||||
|
.max(50, "Client Secret must be at most 50 characters long")
|
||||||
|
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.clientSecret),
|
||||||
|
tenantId: z
|
||||||
|
.string()
|
||||||
|
.uuid()
|
||||||
|
.trim()
|
||||||
|
.min(1, "Tenant ID required")
|
||||||
|
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.tenantId),
|
||||||
|
orgName: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1, "Organization name required")
|
||||||
|
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.orgName)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AzureDevOpsConnectionClientSecretOutputCredentialsSchema = z.object({
|
||||||
|
clientId: z.string(),
|
||||||
|
clientSecret: z.string(),
|
||||||
|
tenantId: z.string(),
|
||||||
|
orgName: z.string(),
|
||||||
|
accessToken: z.string(),
|
||||||
|
expiresAt: z.number()
|
||||||
|
});
|
||||||
|
|
||||||
export const ValidateAzureDevOpsConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
export const ValidateAzureDevOpsConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||||
z.object({
|
z.object({
|
||||||
method: z
|
method: z
|
||||||
@@ -54,6 +90,14 @@ export const ValidateAzureDevOpsConnectionCredentialsSchema = z.discriminatedUni
|
|||||||
credentials: AzureDevOpsConnectionAccessTokenInputCredentialsSchema.describe(
|
credentials: AzureDevOpsConnectionAccessTokenInputCredentialsSchema.describe(
|
||||||
AppConnections.CREATE(AppConnection.AzureDevOps).credentials
|
AppConnections.CREATE(AppConnection.AzureDevOps).credentials
|
||||||
)
|
)
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
method: z
|
||||||
|
.literal(AzureDevOpsConnectionMethod.ClientSecret)
|
||||||
|
.describe(AppConnections.CREATE(AppConnection.AzureDevOps).method),
|
||||||
|
credentials: AzureDevOpsConnectionClientSecretInputCredentialsSchema.describe(
|
||||||
|
AppConnections.CREATE(AppConnection.AzureDevOps).credentials
|
||||||
|
)
|
||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@@ -64,7 +108,11 @@ export const CreateAzureDevOpsConnectionSchema = ValidateAzureDevOpsConnectionCr
|
|||||||
export const UpdateAzureDevOpsConnectionSchema = z
|
export const UpdateAzureDevOpsConnectionSchema = z
|
||||||
.object({
|
.object({
|
||||||
credentials: z
|
credentials: z
|
||||||
.union([AzureDevOpsConnectionOAuthInputCredentialsSchema, AzureDevOpsConnectionAccessTokenInputCredentialsSchema])
|
.union([
|
||||||
|
AzureDevOpsConnectionOAuthInputCredentialsSchema,
|
||||||
|
AzureDevOpsConnectionAccessTokenInputCredentialsSchema,
|
||||||
|
AzureDevOpsConnectionClientSecretInputCredentialsSchema
|
||||||
|
])
|
||||||
.optional()
|
.optional()
|
||||||
.describe(AppConnections.UPDATE(AppConnection.AzureDevOps).credentials)
|
.describe(AppConnections.UPDATE(AppConnection.AzureDevOps).credentials)
|
||||||
})
|
})
|
||||||
@@ -84,6 +132,10 @@ export const AzureDevOpsConnectionSchema = z.intersection(
|
|||||||
z.object({
|
z.object({
|
||||||
method: z.literal(AzureDevOpsConnectionMethod.AccessToken),
|
method: z.literal(AzureDevOpsConnectionMethod.AccessToken),
|
||||||
credentials: AzureDevOpsConnectionAccessTokenOutputCredentialsSchema
|
credentials: AzureDevOpsConnectionAccessTokenOutputCredentialsSchema
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
method: z.literal(AzureDevOpsConnectionMethod.ClientSecret),
|
||||||
|
credentials: AzureDevOpsConnectionClientSecretOutputCredentialsSchema
|
||||||
})
|
})
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
@@ -101,6 +153,14 @@ export const SanitizedAzureDevOpsConnectionSchema = z.discriminatedUnion("method
|
|||||||
credentials: AzureDevOpsConnectionAccessTokenOutputCredentialsSchema.pick({
|
credentials: AzureDevOpsConnectionAccessTokenOutputCredentialsSchema.pick({
|
||||||
orgName: true
|
orgName: true
|
||||||
})
|
})
|
||||||
|
}),
|
||||||
|
BaseAzureDevOpsConnectionSchema.extend({
|
||||||
|
method: z.literal(AzureDevOpsConnectionMethod.ClientSecret),
|
||||||
|
credentials: AzureDevOpsConnectionClientSecretOutputCredentialsSchema.pick({
|
||||||
|
clientId: true,
|
||||||
|
tenantId: true,
|
||||||
|
orgName: true
|
||||||
|
})
|
||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@@ -52,6 +52,11 @@ const getAuthHeaders = (appConnection: TAzureDevOpsConnection, accessToken: stri
|
|||||||
Authorization: `Basic ${basicAuthToken}`,
|
Authorization: `Basic ${basicAuthToken}`,
|
||||||
Accept: "application/json"
|
Accept: "application/json"
|
||||||
};
|
};
|
||||||
|
case AzureDevOpsConnectionMethod.ClientSecret:
|
||||||
|
return {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
Accept: "application/json"
|
||||||
|
};
|
||||||
default:
|
default:
|
||||||
throw new BadRequestError({ message: "Unsupported connection method" });
|
throw new BadRequestError({ message: "Unsupported connection method" });
|
||||||
}
|
}
|
||||||
|
@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
|
|||||||
|
|
||||||
import { AppConnection } from "../app-connection-enums";
|
import { AppConnection } from "../app-connection-enums";
|
||||||
import {
|
import {
|
||||||
|
AzureDevOpsConnectionClientSecretOutputCredentialsSchema,
|
||||||
AzureDevOpsConnectionOAuthOutputCredentialsSchema,
|
AzureDevOpsConnectionOAuthOutputCredentialsSchema,
|
||||||
AzureDevOpsConnectionSchema,
|
AzureDevOpsConnectionSchema,
|
||||||
CreateAzureDevOpsConnectionSchema,
|
CreateAzureDevOpsConnectionSchema,
|
||||||
@@ -27,6 +28,10 @@ export type TAzureDevOpsConnectionConfig = DiscriminativePick<
|
|||||||
|
|
||||||
export type TAzureDevOpsConnectionCredentials = z.infer<typeof AzureDevOpsConnectionOAuthOutputCredentialsSchema>;
|
export type TAzureDevOpsConnectionCredentials = z.infer<typeof AzureDevOpsConnectionOAuthOutputCredentialsSchema>;
|
||||||
|
|
||||||
|
export type TAzureDevOpsConnectionClientSecretCredentials = z.infer<
|
||||||
|
typeof AzureDevOpsConnectionClientSecretOutputCredentialsSchema
|
||||||
|
>;
|
||||||
|
|
||||||
export interface ExchangeCodeAzureResponse {
|
export interface ExchangeCodeAzureResponse {
|
||||||
token_type: string;
|
token_type: string;
|
||||||
scope: string;
|
scope: string;
|
||||||
|
@@ -1,3 +1,4 @@
|
|||||||
export enum AzureKeyVaultConnectionMethod {
|
export enum AzureKeyVaultConnectionMethod {
|
||||||
OAuth = "oauth"
|
OAuth = "oauth",
|
||||||
|
ClientSecret = "client-secret"
|
||||||
}
|
}
|
||||||
|
@@ -1,3 +1,4 @@
|
|||||||
|
/* eslint-disable no-case-declarations */
|
||||||
import { AxiosError, AxiosResponse } from "axios";
|
import { AxiosError, AxiosResponse } from "axios";
|
||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
@@ -16,25 +17,16 @@ import { AppConnection } from "../app-connection-enums";
|
|||||||
import { AzureKeyVaultConnectionMethod } from "./azure-key-vault-connection-enums";
|
import { AzureKeyVaultConnectionMethod } from "./azure-key-vault-connection-enums";
|
||||||
import {
|
import {
|
||||||
ExchangeCodeAzureResponse,
|
ExchangeCodeAzureResponse,
|
||||||
|
TAzureKeyVaultConnectionClientSecretCredentials,
|
||||||
TAzureKeyVaultConnectionConfig,
|
TAzureKeyVaultConnectionConfig,
|
||||||
TAzureKeyVaultConnectionCredentials
|
TAzureKeyVaultConnectionCredentials
|
||||||
} from "./azure-key-vault-connection-types";
|
} from "./azure-key-vault-connection-types";
|
||||||
|
|
||||||
export const getAzureConnectionAccessToken = async (
|
export const getAzureConnectionAccessToken = async (
|
||||||
connectionId: string,
|
connectionId: string,
|
||||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">,
|
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">,
|
||||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||||
) => {
|
) => {
|
||||||
const appCfg = getConfig();
|
|
||||||
if (
|
|
||||||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID ||
|
|
||||||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET
|
|
||||||
) {
|
|
||||||
throw new BadRequestError({
|
|
||||||
message: `Azure environment variables have not been configured`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const appConnection = await appConnectionDAL.findById(connectionId);
|
const appConnection = await appConnectionDAL.findById(connectionId);
|
||||||
|
|
||||||
if (!appConnection) {
|
if (!appConnection) {
|
||||||
@@ -49,49 +41,101 @@ export const getAzureConnectionAccessToken = async (
|
|||||||
throw new BadRequestError({ message: `Connection with ID '${connectionId}' is not a valid Azure connection` });
|
throw new BadRequestError({ message: `Connection with ID '${connectionId}' is not a valid Azure connection` });
|
||||||
}
|
}
|
||||||
|
|
||||||
const credentials = (await decryptAppConnectionCredentials({
|
const currentTime = Date.now();
|
||||||
orgId: appConnection.orgId,
|
|
||||||
kmsService,
|
|
||||||
encryptedCredentials: appConnection.encryptedCredentials
|
|
||||||
})) as TAzureKeyVaultConnectionCredentials;
|
|
||||||
|
|
||||||
const { data } = await request.post<ExchangeCodeAzureResponse>(
|
switch (appConnection.method) {
|
||||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", credentials.tenantId || "common"),
|
case AzureKeyVaultConnectionMethod.OAuth:
|
||||||
new URLSearchParams({
|
const appCfg = getConfig();
|
||||||
grant_type: "refresh_token",
|
if (
|
||||||
scope: `openid offline_access`,
|
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID ||
|
||||||
client_id: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
|
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET
|
||||||
client_secret: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
|
) {
|
||||||
refresh_token: credentials.refreshToken
|
throw new BadRequestError({
|
||||||
})
|
message: `Azure environment variables have not been configured`
|
||||||
);
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const accessExpiresAt = new Date();
|
const oauthCredentials = (await decryptAppConnectionCredentials({
|
||||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
|
orgId: appConnection.orgId,
|
||||||
|
kmsService,
|
||||||
|
encryptedCredentials: appConnection.encryptedCredentials
|
||||||
|
})) as TAzureKeyVaultConnectionCredentials;
|
||||||
|
|
||||||
const updatedCredentials = {
|
const { data } = await request.post<ExchangeCodeAzureResponse>(
|
||||||
...credentials,
|
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
||||||
accessToken: data.access_token,
|
new URLSearchParams({
|
||||||
expiresAt: accessExpiresAt.getTime(),
|
grant_type: "refresh_token",
|
||||||
refreshToken: data.refresh_token
|
scope: `openid offline_access https://vault.azure.net/.default`,
|
||||||
};
|
client_id: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
|
||||||
|
client_secret: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
|
||||||
|
refresh_token: oauthCredentials.refreshToken
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
const encryptedCredentials = await encryptAppConnectionCredentials({
|
const updatedOAuthCredentials = {
|
||||||
credentials: updatedCredentials,
|
...oauthCredentials,
|
||||||
orgId: appConnection.orgId,
|
accessToken: data.access_token,
|
||||||
kmsService
|
expiresAt: currentTime + data.expires_in * 1000,
|
||||||
});
|
refreshToken: data.refresh_token
|
||||||
|
};
|
||||||
|
|
||||||
await appConnectionDAL.update(
|
const encryptedOAuthCredentials = await encryptAppConnectionCredentials({
|
||||||
{ id: connectionId },
|
credentials: updatedOAuthCredentials,
|
||||||
{
|
orgId: appConnection.orgId,
|
||||||
encryptedCredentials
|
kmsService
|
||||||
}
|
});
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedOAuthCredentials });
|
||||||
accessToken: data.access_token
|
|
||||||
};
|
return {
|
||||||
|
accessToken: data.access_token
|
||||||
|
};
|
||||||
|
|
||||||
|
case AzureKeyVaultConnectionMethod.ClientSecret:
|
||||||
|
const clientSecretCredentials = (await decryptAppConnectionCredentials({
|
||||||
|
orgId: appConnection.orgId,
|
||||||
|
kmsService,
|
||||||
|
encryptedCredentials: appConnection.encryptedCredentials
|
||||||
|
})) as TAzureKeyVaultConnectionClientSecretCredentials;
|
||||||
|
|
||||||
|
const { accessToken, expiresAt, clientId, clientSecret, tenantId } = clientSecretCredentials;
|
||||||
|
|
||||||
|
// Check if token is still valid (with 5 minute buffer)
|
||||||
|
if (accessToken && expiresAt && expiresAt > currentTime + 300000) {
|
||||||
|
return { accessToken };
|
||||||
|
}
|
||||||
|
|
||||||
|
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||||
|
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||||
|
new URLSearchParams({
|
||||||
|
grant_type: "client_credentials",
|
||||||
|
scope: `https://vault.azure.net/.default`,
|
||||||
|
client_id: clientId,
|
||||||
|
client_secret: clientSecret
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
const updatedClientCredentials = {
|
||||||
|
...clientSecretCredentials,
|
||||||
|
accessToken: clientData.access_token,
|
||||||
|
expiresAt: currentTime + clientData.expires_in * 1000
|
||||||
|
};
|
||||||
|
|
||||||
|
const encryptedClientCredentials = await encryptAppConnectionCredentials({
|
||||||
|
credentials: updatedClientCredentials,
|
||||||
|
orgId: appConnection.orgId,
|
||||||
|
kmsService
|
||||||
|
});
|
||||||
|
|
||||||
|
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedClientCredentials });
|
||||||
|
|
||||||
|
return { accessToken: clientData.access_token };
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw new InternalServerError({
|
||||||
|
message: `Unhandled Azure Key Vault connection method: ${appConnection.method as AzureKeyVaultConnectionMethod}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getAzureKeyVaultConnectionListItem = () => {
|
export const getAzureKeyVaultConnectionListItem = () => {
|
||||||
@@ -100,7 +144,10 @@ export const getAzureKeyVaultConnectionListItem = () => {
|
|||||||
return {
|
return {
|
||||||
name: "Azure Key Vault" as const,
|
name: "Azure Key Vault" as const,
|
||||||
app: AppConnection.AzureKeyVault as const,
|
app: AppConnection.AzureKeyVault as const,
|
||||||
methods: Object.values(AzureKeyVaultConnectionMethod) as [AzureKeyVaultConnectionMethod.OAuth],
|
methods: Object.values(AzureKeyVaultConnectionMethod) as [
|
||||||
|
AzureKeyVaultConnectionMethod.OAuth,
|
||||||
|
AzureKeyVaultConnectionMethod.ClientSecret
|
||||||
|
],
|
||||||
oauthClientId: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID
|
oauthClientId: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@@ -111,68 +158,108 @@ export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureK
|
|||||||
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID, INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET, SITE_URL } =
|
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID, INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET, SITE_URL } =
|
||||||
getConfig();
|
getConfig();
|
||||||
|
|
||||||
if (!INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || !INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET) {
|
|
||||||
throw new InternalServerError({
|
|
||||||
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
|
|
||||||
let tokenError: AxiosError | null = null;
|
|
||||||
|
|
||||||
try {
|
|
||||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
|
||||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
|
|
||||||
new URLSearchParams({
|
|
||||||
grant_type: "authorization_code",
|
|
||||||
code: inputCredentials.code,
|
|
||||||
scope: `openid offline_access https://vault.azure.net/.default`,
|
|
||||||
client_id: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
|
|
||||||
client_secret: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
|
|
||||||
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
|
||||||
})
|
|
||||||
);
|
|
||||||
} catch (e: unknown) {
|
|
||||||
if (e instanceof AxiosError) {
|
|
||||||
tokenError = e;
|
|
||||||
} else {
|
|
||||||
throw new BadRequestError({
|
|
||||||
message: `Unable to validate connection: verify credentials`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (tokenError) {
|
|
||||||
if (tokenError instanceof AxiosError) {
|
|
||||||
throw new BadRequestError({
|
|
||||||
message: `Failed to get access token: ${
|
|
||||||
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
|
||||||
}`
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
throw new InternalServerError({
|
|
||||||
message: "Failed to get access token"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tokenResp) {
|
|
||||||
throw new InternalServerError({
|
|
||||||
message: `Failed to get access token: Token was empty with no error`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (method) {
|
switch (method) {
|
||||||
case AzureKeyVaultConnectionMethod.OAuth:
|
case AzureKeyVaultConnectionMethod.OAuth:
|
||||||
|
if (!INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || !INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET) {
|
||||||
|
throw new InternalServerError({
|
||||||
|
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
|
||||||
|
let tokenError: AxiosError | null = null;
|
||||||
|
const oauthCredentials = inputCredentials as { code: string; tenantId?: string };
|
||||||
|
try {
|
||||||
|
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||||
|
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
||||||
|
new URLSearchParams({
|
||||||
|
grant_type: "authorization_code",
|
||||||
|
code: oauthCredentials.code,
|
||||||
|
scope: `openid offline_access https://vault.azure.net/.default`,
|
||||||
|
client_id: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
|
||||||
|
client_secret: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
|
||||||
|
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
||||||
|
})
|
||||||
|
);
|
||||||
|
} catch (e: unknown) {
|
||||||
|
if (e instanceof AxiosError) {
|
||||||
|
tokenError = e;
|
||||||
|
} else {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Unable to validate connection: verify credentials`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tokenError) {
|
||||||
|
if (tokenError instanceof AxiosError) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to get access token: ${
|
||||||
|
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||||
|
}`
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
throw new InternalServerError({
|
||||||
|
message: "Failed to get access token"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!tokenResp) {
|
||||||
|
throw new InternalServerError({
|
||||||
|
message: `Failed to get access token: Token was empty with no error`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
tenantId: inputCredentials.tenantId,
|
tenantId: oauthCredentials.tenantId,
|
||||||
accessToken: tokenResp.data.access_token,
|
accessToken: tokenResp.data.access_token,
|
||||||
refreshToken: tokenResp.data.refresh_token,
|
refreshToken: tokenResp.data.refresh_token,
|
||||||
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
|
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
|
||||||
};
|
};
|
||||||
|
|
||||||
|
case AzureKeyVaultConnectionMethod.ClientSecret:
|
||||||
|
const { tenantId, clientId, clientSecret } = inputCredentials as {
|
||||||
|
tenantId: string;
|
||||||
|
clientId: string;
|
||||||
|
clientSecret: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||||
|
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||||
|
new URLSearchParams({
|
||||||
|
grant_type: "client_credentials",
|
||||||
|
scope: `https://vault.azure.net/.default`,
|
||||||
|
client_id: clientId,
|
||||||
|
client_secret: clientSecret
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
tenantId,
|
||||||
|
accessToken: clientData.access_token,
|
||||||
|
expiresAt: Date.now() + clientData.expires_in * 1000,
|
||||||
|
clientId,
|
||||||
|
clientSecret
|
||||||
|
};
|
||||||
|
} catch (e: unknown) {
|
||||||
|
if (e instanceof AxiosError) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to get access token: ${
|
||||||
|
(e?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||||
|
}`
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
throw new InternalServerError({
|
||||||
|
message: "Failed to get access token"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
throw new InternalServerError({
|
throw new InternalServerError({
|
||||||
message: `Unhandled Azure connection method: ${method as AzureKeyVaultConnectionMethod}`
|
message: `Unhandled Azure Key Vault connection method: ${method as AzureKeyVaultConnectionMethod}`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@@ -22,6 +22,29 @@ export const AzureKeyVaultConnectionOAuthOutputCredentialsSchema = z.object({
|
|||||||
expiresAt: z.number()
|
expiresAt: z.number()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const AzureKeyVaultConnectionClientSecretInputCredentialsSchema = z.object({
|
||||||
|
clientId: z
|
||||||
|
.string()
|
||||||
|
.uuid()
|
||||||
|
.trim()
|
||||||
|
.min(1, "Client ID required")
|
||||||
|
.max(50, "Client ID must be at most 50 characters long"),
|
||||||
|
clientSecret: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1, "Client Secret required")
|
||||||
|
.max(50, "Client Secret must be at most 50 characters long"),
|
||||||
|
tenantId: z.string().uuid().trim().min(1, "Tenant ID required")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AzureKeyVaultConnectionClientSecretOutputCredentialsSchema = z.object({
|
||||||
|
clientId: z.string(),
|
||||||
|
clientSecret: z.string(),
|
||||||
|
tenantId: z.string(),
|
||||||
|
accessToken: z.string(),
|
||||||
|
expiresAt: z.number()
|
||||||
|
});
|
||||||
|
|
||||||
export const ValidateAzureKeyVaultConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
export const ValidateAzureKeyVaultConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||||
z.object({
|
z.object({
|
||||||
method: z
|
method: z
|
||||||
@@ -30,6 +53,14 @@ export const ValidateAzureKeyVaultConnectionCredentialsSchema = z.discriminatedU
|
|||||||
credentials: AzureKeyVaultConnectionOAuthInputCredentialsSchema.describe(
|
credentials: AzureKeyVaultConnectionOAuthInputCredentialsSchema.describe(
|
||||||
AppConnections.CREATE(AppConnection.AzureKeyVault).credentials
|
AppConnections.CREATE(AppConnection.AzureKeyVault).credentials
|
||||||
)
|
)
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
method: z
|
||||||
|
.literal(AzureKeyVaultConnectionMethod.ClientSecret)
|
||||||
|
.describe(AppConnections.CREATE(AppConnection.AzureKeyVault).method),
|
||||||
|
credentials: AzureKeyVaultConnectionClientSecretInputCredentialsSchema.describe(
|
||||||
|
AppConnections.CREATE(AppConnection.AzureKeyVault).credentials
|
||||||
|
)
|
||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@@ -39,9 +70,13 @@ export const CreateAzureKeyVaultConnectionSchema = ValidateAzureKeyVaultConnecti
|
|||||||
|
|
||||||
export const UpdateAzureKeyVaultConnectionSchema = z
|
export const UpdateAzureKeyVaultConnectionSchema = z
|
||||||
.object({
|
.object({
|
||||||
credentials: AzureKeyVaultConnectionOAuthInputCredentialsSchema.optional().describe(
|
credentials: z
|
||||||
AppConnections.UPDATE(AppConnection.AzureKeyVault).credentials
|
.union([
|
||||||
)
|
AzureKeyVaultConnectionOAuthInputCredentialsSchema,
|
||||||
|
AzureKeyVaultConnectionClientSecretInputCredentialsSchema
|
||||||
|
])
|
||||||
|
.optional()
|
||||||
|
.describe(AppConnections.UPDATE(AppConnection.AzureKeyVault).credentials)
|
||||||
})
|
})
|
||||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureKeyVault));
|
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureKeyVault));
|
||||||
|
|
||||||
@@ -55,6 +90,10 @@ export const AzureKeyVaultConnectionSchema = z.intersection(
|
|||||||
z.object({
|
z.object({
|
||||||
method: z.literal(AzureKeyVaultConnectionMethod.OAuth),
|
method: z.literal(AzureKeyVaultConnectionMethod.OAuth),
|
||||||
credentials: AzureKeyVaultConnectionOAuthOutputCredentialsSchema
|
credentials: AzureKeyVaultConnectionOAuthOutputCredentialsSchema
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
method: z.literal(AzureKeyVaultConnectionMethod.ClientSecret),
|
||||||
|
credentials: AzureKeyVaultConnectionClientSecretOutputCredentialsSchema
|
||||||
})
|
})
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
@@ -65,6 +104,13 @@ export const SanitizedAzureKeyVaultConnectionSchema = z.discriminatedUnion("meth
|
|||||||
credentials: AzureKeyVaultConnectionOAuthOutputCredentialsSchema.pick({
|
credentials: AzureKeyVaultConnectionOAuthOutputCredentialsSchema.pick({
|
||||||
tenantId: true
|
tenantId: true
|
||||||
})
|
})
|
||||||
|
}),
|
||||||
|
BaseAzureKeyVaultConnectionSchema.extend({
|
||||||
|
method: z.literal(AzureKeyVaultConnectionMethod.ClientSecret),
|
||||||
|
credentials: AzureKeyVaultConnectionClientSecretOutputCredentialsSchema.pick({
|
||||||
|
clientId: true,
|
||||||
|
tenantId: true
|
||||||
|
})
|
||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
|
|||||||
|
|
||||||
import { AppConnection } from "../app-connection-enums";
|
import { AppConnection } from "../app-connection-enums";
|
||||||
import {
|
import {
|
||||||
|
AzureKeyVaultConnectionClientSecretOutputCredentialsSchema,
|
||||||
AzureKeyVaultConnectionOAuthOutputCredentialsSchema,
|
AzureKeyVaultConnectionOAuthOutputCredentialsSchema,
|
||||||
AzureKeyVaultConnectionSchema,
|
AzureKeyVaultConnectionSchema,
|
||||||
CreateAzureKeyVaultConnectionSchema,
|
CreateAzureKeyVaultConnectionSchema,
|
||||||
@@ -36,3 +37,7 @@ export type ExchangeCodeAzureResponse = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export type TAzureKeyVaultConnectionCredentials = z.infer<typeof AzureKeyVaultConnectionOAuthOutputCredentialsSchema>;
|
export type TAzureKeyVaultConnectionCredentials = z.infer<typeof AzureKeyVaultConnectionOAuthOutputCredentialsSchema>;
|
||||||
|
|
||||||
|
export type TAzureKeyVaultConnectionClientSecretCredentials = z.infer<
|
||||||
|
typeof AzureKeyVaultConnectionClientSecretOutputCredentialsSchema
|
||||||
|
>;
|
||||||
|
@@ -10,7 +10,8 @@ import {
|
|||||||
TCloudflareConnection,
|
TCloudflareConnection,
|
||||||
TCloudflareConnectionConfig,
|
TCloudflareConnectionConfig,
|
||||||
TCloudflarePagesProject,
|
TCloudflarePagesProject,
|
||||||
TCloudflareWorkersScript
|
TCloudflareWorkersScript,
|
||||||
|
TCloudflareZone
|
||||||
} from "./cloudflare-connection-types";
|
} from "./cloudflare-connection-types";
|
||||||
|
|
||||||
export const getCloudflareConnectionListItem = () => {
|
export const getCloudflareConnectionListItem = () => {
|
||||||
@@ -66,6 +67,27 @@ export const listCloudflareWorkersScripts = async (
|
|||||||
}));
|
}));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const listCloudflareZones = async (appConnection: TCloudflareConnection): Promise<TCloudflareZone[]> => {
|
||||||
|
const {
|
||||||
|
credentials: { apiToken }
|
||||||
|
} = appConnection;
|
||||||
|
|
||||||
|
const { data } = await request.get<{ result: { name: string; id: string }[] }>(
|
||||||
|
`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/zones`,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${apiToken}`,
|
||||||
|
Accept: "application/json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return data.result.map((a) => ({
|
||||||
|
name: a.name,
|
||||||
|
id: a.id
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
export const validateCloudflareConnectionCredentials = async (config: TCloudflareConnectionConfig) => {
|
export const validateCloudflareConnectionCredentials = async (config: TCloudflareConnectionConfig) => {
|
||||||
const { apiToken, accountId } = config.credentials;
|
const { apiToken, accountId } = config.credentials;
|
||||||
|
|
||||||
|
@@ -2,7 +2,11 @@ import { logger } from "@app/lib/logger";
|
|||||||
import { OrgServiceActor } from "@app/lib/types";
|
import { OrgServiceActor } from "@app/lib/types";
|
||||||
|
|
||||||
import { AppConnection } from "../app-connection-enums";
|
import { AppConnection } from "../app-connection-enums";
|
||||||
import { listCloudflarePagesProjects, listCloudflareWorkersScripts } from "./cloudflare-connection-fns";
|
import {
|
||||||
|
listCloudflarePagesProjects,
|
||||||
|
listCloudflareWorkersScripts,
|
||||||
|
listCloudflareZones
|
||||||
|
} from "./cloudflare-connection-fns";
|
||||||
import { TCloudflareConnection } from "./cloudflare-connection-types";
|
import { TCloudflareConnection } from "./cloudflare-connection-types";
|
||||||
|
|
||||||
type TGetAppConnectionFunc = (
|
type TGetAppConnectionFunc = (
|
||||||
@@ -16,7 +20,6 @@ export const cloudflareConnectionService = (getAppConnection: TGetAppConnectionF
|
|||||||
const appConnection = await getAppConnection(AppConnection.Cloudflare, connectionId, actor);
|
const appConnection = await getAppConnection(AppConnection.Cloudflare, connectionId, actor);
|
||||||
try {
|
try {
|
||||||
const projects = await listCloudflarePagesProjects(appConnection);
|
const projects = await listCloudflarePagesProjects(appConnection);
|
||||||
|
|
||||||
return projects;
|
return projects;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(
|
logger.error(
|
||||||
@@ -30,9 +33,8 @@ export const cloudflareConnectionService = (getAppConnection: TGetAppConnectionF
|
|||||||
const listWorkersScripts = async (connectionId: string, actor: OrgServiceActor) => {
|
const listWorkersScripts = async (connectionId: string, actor: OrgServiceActor) => {
|
||||||
const appConnection = await getAppConnection(AppConnection.Cloudflare, connectionId, actor);
|
const appConnection = await getAppConnection(AppConnection.Cloudflare, connectionId, actor);
|
||||||
try {
|
try {
|
||||||
const projects = await listCloudflareWorkersScripts(appConnection);
|
const scripts = await listCloudflareWorkersScripts(appConnection);
|
||||||
|
return scripts;
|
||||||
return projects;
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(
|
logger.error(
|
||||||
error,
|
error,
|
||||||
@@ -42,8 +44,20 @@ export const cloudflareConnectionService = (getAppConnection: TGetAppConnectionF
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const listZones = async (connectionId: string, actor: OrgServiceActor) => {
|
||||||
|
const appConnection = await getAppConnection(AppConnection.Cloudflare, connectionId, actor);
|
||||||
|
try {
|
||||||
|
const zones = await listCloudflareZones(appConnection);
|
||||||
|
return zones;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, `Failed to list Cloudflare Zones for Cloudflare connection [connectionId=${connectionId}]`);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
listPagesProjects,
|
listPagesProjects,
|
||||||
listWorkersScripts
|
listWorkersScripts,
|
||||||
|
listZones
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@@ -32,3 +32,8 @@ export type TCloudflarePagesProject = {
|
|||||||
export type TCloudflareWorkersScript = {
|
export type TCloudflareWorkersScript = {
|
||||||
id: string;
|
id: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type TCloudflareZone = {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
};
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import { createAppAuth } from "@octokit/auth-app";
|
import { createAppAuth } from "@octokit/auth-app";
|
||||||
|
import { request } from "@octokit/request";
|
||||||
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
|
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
|
||||||
import https from "https";
|
import https from "https";
|
||||||
import RE2 from "re2";
|
import RE2 from "re2";
|
||||||
@@ -12,7 +13,6 @@ import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
|||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||||
import { getAppConnectionMethodName } from "@app/services/app-connection/app-connection-fns";
|
import { getAppConnectionMethodName } from "@app/services/app-connection/app-connection-fns";
|
||||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
|
||||||
|
|
||||||
import { AppConnection } from "../app-connection-enums";
|
import { AppConnection } from "../app-connection-enums";
|
||||||
import { GitHubConnectionMethod } from "./github-connection-enums";
|
import { GitHubConnectionMethod } from "./github-connection-enums";
|
||||||
@@ -30,6 +30,23 @@ export const getGitHubConnectionListItem = () => {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const getGitHubInstanceApiUrl = async (config: {
|
||||||
|
credentials: Pick<TGitHubConnectionConfig["credentials"], "host" | "instanceType">;
|
||||||
|
}) => {
|
||||||
|
const host = config.credentials.host || "github.com";
|
||||||
|
|
||||||
|
await blockLocalAndPrivateIpAddresses(host);
|
||||||
|
|
||||||
|
let apiBase: string;
|
||||||
|
if (config.credentials.instanceType === "server") {
|
||||||
|
apiBase = `${host}/api/v3`;
|
||||||
|
} else {
|
||||||
|
apiBase = `api.${host}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiBase;
|
||||||
|
};
|
||||||
|
|
||||||
export const requestWithGitHubGateway = async <T>(
|
export const requestWithGitHubGateway = async <T>(
|
||||||
appConnection: { gatewayId?: string | null },
|
appConnection: { gatewayId?: string | null },
|
||||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||||
@@ -73,7 +90,10 @@ export const requestWithGitHubGateway = async <T>(
|
|||||||
return await httpRequest.request(finalRequestConfig);
|
return await httpRequest.request(finalRequestConfig);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const axiosError = error as AxiosError;
|
const axiosError = error as AxiosError;
|
||||||
logger.error("Error during GitHub gateway request:", axiosError.message, axiosError.response?.data);
|
logger.error(
|
||||||
|
{ message: axiosError.message, data: axiosError.response?.data },
|
||||||
|
"Error during GitHub gateway request:"
|
||||||
|
);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -112,7 +132,10 @@ export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) =>
|
|||||||
const appAuth = createAppAuth({
|
const appAuth = createAppAuth({
|
||||||
appId,
|
appId,
|
||||||
privateKey: appPrivateKey,
|
privateKey: appPrivateKey,
|
||||||
installationId: appConnection.credentials.installationId
|
installationId: appConnection.credentials.installationId,
|
||||||
|
request: request.defaults({
|
||||||
|
baseUrl: `https://${await getGitHubInstanceApiUrl(appConnection)}`
|
||||||
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
const { token } = await appAuth({ type: "installation" });
|
const { token } = await appAuth({ type: "installation" });
|
||||||
@@ -141,7 +164,7 @@ export const makePaginatedGitHubRequest = async <T, R = T[]>(
|
|||||||
|
|
||||||
const token =
|
const token =
|
||||||
method === GitHubConnectionMethod.OAuth ? credentials.accessToken : await getGitHubAppAuthToken(appConnection);
|
method === GitHubConnectionMethod.OAuth ? credentials.accessToken : await getGitHubAppAuthToken(appConnection);
|
||||||
let url: string | null = `https://api.${credentials.host || "github.com"}${path}`;
|
let url: string | null = `https://${await getGitHubInstanceApiUrl(appConnection)}${path}`;
|
||||||
let results: T[] = [];
|
let results: T[] = [];
|
||||||
let i = 0;
|
let i = 0;
|
||||||
|
|
||||||
@@ -325,6 +348,8 @@ export const validateGitHubConnectionCredentials = async (
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (e: unknown) {
|
} catch (e: unknown) {
|
||||||
|
logger.error(e, "Unable to verify GitHub connection");
|
||||||
|
|
||||||
if (e instanceof BadRequestError) {
|
if (e instanceof BadRequestError) {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
@@ -355,7 +380,7 @@ export const validateGitHubConnectionCredentials = async (
|
|||||||
};
|
};
|
||||||
}[];
|
}[];
|
||||||
}>(config, gatewayService, {
|
}>(config, gatewayService, {
|
||||||
url: IntegrationUrls.GITHUB_USER_INSTALLATIONS.replace("api.github.com", `api.${host}`),
|
url: `https://${await getGitHubInstanceApiUrl(config)}/user/installations`,
|
||||||
headers: {
|
headers: {
|
||||||
Accept: "application/json",
|
Accept: "application/json",
|
||||||
Authorization: `Bearer ${tokenResp.data.access_token}`,
|
Authorization: `Bearer ${tokenResp.data.access_token}`,
|
||||||
@@ -377,11 +402,15 @@ export const validateGitHubConnectionCredentials = async (
|
|||||||
switch (method) {
|
switch (method) {
|
||||||
case GitHubConnectionMethod.App:
|
case GitHubConnectionMethod.App:
|
||||||
return {
|
return {
|
||||||
installationId: credentials.installationId
|
installationId: credentials.installationId,
|
||||||
|
instanceType: credentials.instanceType,
|
||||||
|
host: credentials.host
|
||||||
};
|
};
|
||||||
case GitHubConnectionMethod.OAuth:
|
case GitHubConnectionMethod.OAuth:
|
||||||
return {
|
return {
|
||||||
accessToken: tokenResp.data.access_token
|
accessToken: tokenResp.data.access_token,
|
||||||
|
instanceType: credentials.instanceType,
|
||||||
|
host: credentials.host
|
||||||
};
|
};
|
||||||
default:
|
default:
|
||||||
throw new InternalServerError({
|
throw new InternalServerError({
|
||||||
|
@@ -10,26 +10,59 @@ import {
|
|||||||
|
|
||||||
import { GitHubConnectionMethod } from "./github-connection-enums";
|
import { GitHubConnectionMethod } from "./github-connection-enums";
|
||||||
|
|
||||||
export const GitHubConnectionOAuthInputCredentialsSchema = z.object({
|
export const GitHubConnectionOAuthInputCredentialsSchema = z.union([
|
||||||
code: z.string().trim().min(1, "OAuth code required"),
|
z.object({
|
||||||
host: z.string().trim().optional()
|
code: z.string().trim().min(1, "OAuth code required"),
|
||||||
});
|
instanceType: z.literal("server"),
|
||||||
|
host: z.string().trim().min(1, "Host is required for server instance type")
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
code: z.string().trim().min(1, "OAuth code required"),
|
||||||
|
instanceType: z.literal("cloud").optional(),
|
||||||
|
host: z.string().trim().optional()
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
export const GitHubConnectionAppInputCredentialsSchema = z.object({
|
export const GitHubConnectionAppInputCredentialsSchema = z.union([
|
||||||
code: z.string().trim().min(1, "GitHub App code required"),
|
z.object({
|
||||||
installationId: z.string().min(1, "GitHub App Installation ID required"),
|
code: z.string().trim().min(1, "GitHub App code required"),
|
||||||
host: z.string().trim().optional()
|
installationId: z.string().min(1, "GitHub App Installation ID required"),
|
||||||
});
|
instanceType: z.literal("server"),
|
||||||
|
host: z.string().trim().min(1, "Host is required for server instance type")
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
code: z.string().trim().min(1, "GitHub App code required"),
|
||||||
|
installationId: z.string().min(1, "GitHub App Installation ID required"),
|
||||||
|
instanceType: z.literal("cloud").optional(),
|
||||||
|
host: z.string().trim().optional()
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
export const GitHubConnectionOAuthOutputCredentialsSchema = z.object({
|
export const GitHubConnectionOAuthOutputCredentialsSchema = z.union([
|
||||||
accessToken: z.string(),
|
z.object({
|
||||||
host: z.string().trim().optional()
|
accessToken: z.string(),
|
||||||
});
|
instanceType: z.literal("server"),
|
||||||
|
host: z.string().trim().min(1)
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
accessToken: z.string(),
|
||||||
|
instanceType: z.literal("cloud").optional(),
|
||||||
|
host: z.string().trim().optional()
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
export const GitHubConnectionAppOutputCredentialsSchema = z.object({
|
export const GitHubConnectionAppOutputCredentialsSchema = z.union([
|
||||||
installationId: z.string(),
|
z.object({
|
||||||
host: z.string().trim().optional()
|
installationId: z.string(),
|
||||||
});
|
instanceType: z.literal("server"),
|
||||||
|
host: z.string().trim().min(1)
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
installationId: z.string(),
|
||||||
|
instanceType: z.literal("cloud").optional(),
|
||||||
|
host: z.string().trim().optional()
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
export const ValidateGitHubConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
export const ValidateGitHubConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||||
z.object({
|
z.object({
|
||||||
@@ -84,11 +117,17 @@ export const GitHubConnectionSchema = z.intersection(
|
|||||||
export const SanitizedGitHubConnectionSchema = z.discriminatedUnion("method", [
|
export const SanitizedGitHubConnectionSchema = z.discriminatedUnion("method", [
|
||||||
BaseGitHubConnectionSchema.extend({
|
BaseGitHubConnectionSchema.extend({
|
||||||
method: z.literal(GitHubConnectionMethod.App),
|
method: z.literal(GitHubConnectionMethod.App),
|
||||||
credentials: GitHubConnectionAppOutputCredentialsSchema.pick({})
|
credentials: z.object({
|
||||||
|
instanceType: z.union([z.literal("server"), z.literal("cloud")]).optional(),
|
||||||
|
host: z.string().optional()
|
||||||
|
})
|
||||||
}),
|
}),
|
||||||
BaseGitHubConnectionSchema.extend({
|
BaseGitHubConnectionSchema.extend({
|
||||||
method: z.literal(GitHubConnectionMethod.OAuth),
|
method: z.literal(GitHubConnectionMethod.OAuth),
|
||||||
credentials: GitHubConnectionOAuthOutputCredentialsSchema.pick({})
|
credentials: z.object({
|
||||||
|
instanceType: z.union([z.literal("server"), z.literal("cloud")]).optional(),
|
||||||
|
host: z.string().optional()
|
||||||
|
})
|
||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@@ -222,6 +222,37 @@ export const validateGitLabConnectionCredentials = async (config: TGitLabConnect
|
|||||||
return inputCredentials;
|
return inputCredentials;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const getGitLabConnectionClient = async (
|
||||||
|
appConnection: TGitLabConnection,
|
||||||
|
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
|
||||||
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||||
|
) => {
|
||||||
|
let { accessToken } = appConnection.credentials;
|
||||||
|
|
||||||
|
if (
|
||||||
|
appConnection.method === GitLabConnectionMethod.OAuth &&
|
||||||
|
appConnection.credentials.refreshToken &&
|
||||||
|
new Date(appConnection.credentials.expiresAt) < new Date()
|
||||||
|
) {
|
||||||
|
accessToken = await refreshGitLabToken(
|
||||||
|
appConnection.credentials.refreshToken,
|
||||||
|
appConnection.id,
|
||||||
|
appConnection.orgId,
|
||||||
|
appConnectionDAL,
|
||||||
|
kmsService,
|
||||||
|
appConnection.credentials.instanceUrl
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const client = await getGitLabClient(
|
||||||
|
accessToken,
|
||||||
|
appConnection.credentials.instanceUrl,
|
||||||
|
appConnection.method === GitLabConnectionMethod.OAuth
|
||||||
|
);
|
||||||
|
|
||||||
|
return client;
|
||||||
|
};
|
||||||
|
|
||||||
export const listGitLabProjects = async ({
|
export const listGitLabProjects = async ({
|
||||||
appConnection,
|
appConnection,
|
||||||
appConnectionDAL,
|
appConnectionDAL,
|
||||||
|
4
backend/src/services/app-connection/netlify/index.ts
Normal file
4
backend/src/services/app-connection/netlify/index.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
export * from "./netlify-connection-constants";
|
||||||
|
export * from "./netlify-connection-fns";
|
||||||
|
export * from "./netlify-connection-schemas";
|
||||||
|
export * from "./netlify-connection-types";
|
@@ -0,0 +1,3 @@
|
|||||||
|
export enum NetlifyConnectionMethod {
|
||||||
|
AccessToken = "access-token"
|
||||||
|
}
|
@@ -0,0 +1,35 @@
|
|||||||
|
/* eslint-disable no-await-in-loop */
|
||||||
|
import { AxiosError } from "axios";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
|
||||||
|
import { NetlifyConnectionMethod } from "./netlify-connection-constants";
|
||||||
|
import { NetlifyPublicAPI } from "./netlify-connection-public-client";
|
||||||
|
import { TNetlifyConnectionConfig } from "./netlify-connection-types";
|
||||||
|
|
||||||
|
export const getNetlifyConnectionListItem = () => {
|
||||||
|
return {
|
||||||
|
name: "Netlify" as const,
|
||||||
|
app: AppConnection.Netlify as const,
|
||||||
|
methods: Object.values(NetlifyConnectionMethod)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const validateNetlifyConnectionCredentials = async (config: TNetlifyConnectionConfig) => {
|
||||||
|
try {
|
||||||
|
await NetlifyPublicAPI.healthcheck(config);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
if (error instanceof AxiosError) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Unable to validate connection - verify credentials"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return config.credentials;
|
||||||
|
};
|
@@ -0,0 +1,261 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||||
|
/* eslint-disable no-await-in-loop */
|
||||||
|
/* eslint-disable class-methods-use-this */
|
||||||
|
import { AxiosInstance, AxiosRequestConfig, AxiosResponse, HttpStatusCode, isAxiosError } from "axios";
|
||||||
|
|
||||||
|
import { createRequestClient } from "@app/lib/config/request";
|
||||||
|
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||||
|
|
||||||
|
import { NetlifyConnectionMethod } from "./netlify-connection-constants";
|
||||||
|
import { TNetlifyAccount, TNetlifyConnectionConfig, TNetlifySite, TNetlifyVariable } from "./netlify-connection-types";
|
||||||
|
|
||||||
|
export function getNetlifyAuthHeaders(connection: TNetlifyConnectionConfig): Record<string, string> {
|
||||||
|
switch (connection.method) {
|
||||||
|
case NetlifyConnectionMethod.AccessToken:
|
||||||
|
return {
|
||||||
|
Authorization: `Bearer ${connection.credentials.accessToken}`
|
||||||
|
};
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported Netlify connection method`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getNetlifyRatelimiter(response: AxiosResponse): {
|
||||||
|
maxAttempts: number;
|
||||||
|
isRatelimited: boolean;
|
||||||
|
wait: () => Promise<void>;
|
||||||
|
} {
|
||||||
|
const wait = (seconds: number = 60) => {
|
||||||
|
return new Promise<void>((res) => {
|
||||||
|
setTimeout(res, seconds * 1000);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
let remaining = parseInt(response.headers["X-RateLimit-Remaining"] as string, 10);
|
||||||
|
let isRatelimited = response.status === HttpStatusCode.TooManyRequests;
|
||||||
|
|
||||||
|
if (isRatelimited) {
|
||||||
|
if (Math.round(remaining) > 0) {
|
||||||
|
isRatelimited = true;
|
||||||
|
remaining += 1; // Jitter to ensure we wait at least 1 second
|
||||||
|
} else {
|
||||||
|
remaining = 60;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
isRatelimited,
|
||||||
|
wait: () => wait(remaining),
|
||||||
|
maxAttempts: 3
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
type NetlifyParams = {
|
||||||
|
account_id: string;
|
||||||
|
context_name?: string;
|
||||||
|
site_id?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
class NetlifyPublicClient {
|
||||||
|
private client: AxiosInstance;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.client = createRequestClient({
|
||||||
|
baseURL: `${IntegrationUrls.NETLIFY_API_URL}/api/v1`,
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async send<T>(connection: TNetlifyConnectionConfig, config: AxiosRequestConfig, retryAttempt = 0): Promise<T> {
|
||||||
|
const response = await this.client.request<T>({
|
||||||
|
...config,
|
||||||
|
timeout: 1000 * 60, // 60 seconds timeout
|
||||||
|
validateStatus: (status) => (status >= 200 && status < 300) || status === HttpStatusCode.TooManyRequests,
|
||||||
|
headers: getNetlifyAuthHeaders(connection)
|
||||||
|
});
|
||||||
|
const limiter = getNetlifyRatelimiter(response);
|
||||||
|
|
||||||
|
if (limiter.isRatelimited && retryAttempt <= limiter.maxAttempts) {
|
||||||
|
await limiter.wait();
|
||||||
|
return this.send(connection, config, retryAttempt + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
healthcheck(connection: TNetlifyConnectionConfig) {
|
||||||
|
switch (connection.method) {
|
||||||
|
case NetlifyConnectionMethod.AccessToken:
|
||||||
|
return this.getNetlifyAccounts(connection);
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported Netlify connection method`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getVariables(
|
||||||
|
connection: TNetlifyConnectionConfig,
|
||||||
|
{ account_id, ...params }: NetlifyParams,
|
||||||
|
limit: number = 50,
|
||||||
|
page: number = 1
|
||||||
|
) {
|
||||||
|
const res = await this.send<TNetlifyVariable[]>(connection, {
|
||||||
|
method: "GET",
|
||||||
|
url: `/accounts/${account_id}/env`,
|
||||||
|
params: {
|
||||||
|
...params,
|
||||||
|
limit,
|
||||||
|
page
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
async createVariable(
|
||||||
|
connection: TNetlifyConnectionConfig,
|
||||||
|
{ account_id, ...params }: NetlifyParams,
|
||||||
|
...variables: TNetlifyVariable[]
|
||||||
|
) {
|
||||||
|
const res = await this.send<TNetlifyVariable>(connection, {
|
||||||
|
method: "POST",
|
||||||
|
url: `/accounts/${account_id}/env`,
|
||||||
|
data: variables,
|
||||||
|
params
|
||||||
|
});
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateVariableValue(
|
||||||
|
connection: TNetlifyConnectionConfig,
|
||||||
|
{ account_id, ...params }: NetlifyParams,
|
||||||
|
variable: TNetlifyVariable
|
||||||
|
) {
|
||||||
|
const res = await this.send<TNetlifyVariable>(connection, {
|
||||||
|
method: "PATCH",
|
||||||
|
url: `/accounts/${account_id}/env/${variable.key}`,
|
||||||
|
data: variable,
|
||||||
|
params
|
||||||
|
});
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateVariable(
|
||||||
|
connection: TNetlifyConnectionConfig,
|
||||||
|
{ account_id, ...params }: NetlifyParams,
|
||||||
|
variable: TNetlifyVariable
|
||||||
|
) {
|
||||||
|
const res = await this.send<TNetlifyVariable>(connection, {
|
||||||
|
method: "PUT",
|
||||||
|
url: `/accounts/${account_id}/env/${variable.key}`,
|
||||||
|
data: variable,
|
||||||
|
params
|
||||||
|
});
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getVariable(
|
||||||
|
connection: TNetlifyConnectionConfig,
|
||||||
|
{ account_id, ...params }: NetlifyParams,
|
||||||
|
variable: Pick<TNetlifyVariable, "key">
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const res = await this.send<TNetlifyVariable>(connection, {
|
||||||
|
method: "GET",
|
||||||
|
url: `/accounts/${account_id}/env/${variable.key}`,
|
||||||
|
params
|
||||||
|
});
|
||||||
|
|
||||||
|
return res;
|
||||||
|
} catch (error) {
|
||||||
|
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async upsertVariable(connection: TNetlifyConnectionConfig, params: NetlifyParams, variable: TNetlifyVariable) {
|
||||||
|
const res = await this.getVariable(connection, params, variable);
|
||||||
|
|
||||||
|
if (!res) {
|
||||||
|
return this.createVariable(connection, params, variable);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (res.is_secret) {
|
||||||
|
await this.deleteVariable(connection, params, variable);
|
||||||
|
return this.createVariable(connection, params, variable);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.updateVariable(connection, params, variable);
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteVariable(
|
||||||
|
connection: TNetlifyConnectionConfig,
|
||||||
|
{ account_id, ...params }: NetlifyParams,
|
||||||
|
variable: Pick<TNetlifyVariable, "key">
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const res = await this.send<TNetlifyVariable>(connection, {
|
||||||
|
method: "DELETE",
|
||||||
|
url: `/accounts/${account_id}/env/${variable.key}`,
|
||||||
|
params
|
||||||
|
});
|
||||||
|
|
||||||
|
return res;
|
||||||
|
} catch (error) {
|
||||||
|
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteVariableValue(
|
||||||
|
connection: TNetlifyConnectionConfig,
|
||||||
|
{ account_id, value_id, ...params }: NetlifyParams & { value_id: string },
|
||||||
|
variable: Pick<TNetlifyVariable, "key" | "id">
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const res = await this.send<TNetlifyVariable>(connection, {
|
||||||
|
method: "DELETE",
|
||||||
|
url: `/accounts/${account_id}/${variable.key}/value/${value_id}`,
|
||||||
|
params
|
||||||
|
});
|
||||||
|
|
||||||
|
return res;
|
||||||
|
} catch (error) {
|
||||||
|
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getSites(connection: TNetlifyConnectionConfig, accountId: string) {
|
||||||
|
const res = await this.send<TNetlifySite[]>(connection, {
|
||||||
|
method: "GET",
|
||||||
|
url: `/${accountId}/sites`
|
||||||
|
});
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getNetlifyAccounts(connection: TNetlifyConnectionConfig) {
|
||||||
|
const res = await this.send<TNetlifyAccount[]>(connection, {
|
||||||
|
method: "GET",
|
||||||
|
url: `/accounts`
|
||||||
|
});
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const NetlifyPublicAPI = new NetlifyPublicClient();
|
@@ -0,0 +1,67 @@
|
|||||||
|
import z from "zod";
|
||||||
|
|
||||||
|
import { AppConnections } from "@app/lib/api-docs";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import {
|
||||||
|
BaseAppConnectionSchema,
|
||||||
|
GenericCreateAppConnectionFieldsSchema,
|
||||||
|
GenericUpdateAppConnectionFieldsSchema
|
||||||
|
} from "@app/services/app-connection/app-connection-schemas";
|
||||||
|
|
||||||
|
import { NetlifyConnectionMethod } from "./netlify-connection-constants";
|
||||||
|
|
||||||
|
export const NetlifyConnectionMethodSchema = z
|
||||||
|
.nativeEnum(NetlifyConnectionMethod)
|
||||||
|
.describe(AppConnections.CREATE(AppConnection.Netlify).method);
|
||||||
|
|
||||||
|
export const NetlifyConnectionAccessTokenCredentialsSchema = z.object({
|
||||||
|
accessToken: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1, "Access Token required")
|
||||||
|
.max(255)
|
||||||
|
.describe(AppConnections.CREDENTIALS.NETLIFY.accessToken)
|
||||||
|
});
|
||||||
|
|
||||||
|
const BaseNetlifyConnectionSchema = BaseAppConnectionSchema.extend({
|
||||||
|
app: z.literal(AppConnection.Netlify)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const NetlifyConnectionSchema = BaseNetlifyConnectionSchema.extend({
|
||||||
|
method: NetlifyConnectionMethodSchema,
|
||||||
|
credentials: NetlifyConnectionAccessTokenCredentialsSchema
|
||||||
|
});
|
||||||
|
|
||||||
|
export const SanitizedNetlifyConnectionSchema = z.discriminatedUnion("method", [
|
||||||
|
BaseNetlifyConnectionSchema.extend({
|
||||||
|
method: NetlifyConnectionMethodSchema,
|
||||||
|
credentials: NetlifyConnectionAccessTokenCredentialsSchema.pick({})
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
|
export const ValidateNetlifyConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||||
|
z.object({
|
||||||
|
method: NetlifyConnectionMethodSchema,
|
||||||
|
credentials: NetlifyConnectionAccessTokenCredentialsSchema.describe(
|
||||||
|
AppConnections.CREATE(AppConnection.Netlify).credentials
|
||||||
|
)
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
|
export const CreateNetlifyConnectionSchema = ValidateNetlifyConnectionCredentialsSchema.and(
|
||||||
|
GenericCreateAppConnectionFieldsSchema(AppConnection.Netlify)
|
||||||
|
);
|
||||||
|
|
||||||
|
export const UpdateNetlifyConnectionSchema = z
|
||||||
|
.object({
|
||||||
|
credentials: NetlifyConnectionAccessTokenCredentialsSchema.optional().describe(
|
||||||
|
AppConnections.UPDATE(AppConnection.Netlify).credentials
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Netlify));
|
||||||
|
|
||||||
|
export const NetlifyConnectionListItemSchema = z.object({
|
||||||
|
name: z.literal("Netlify"),
|
||||||
|
app: z.literal(AppConnection.Netlify),
|
||||||
|
methods: z.nativeEnum(NetlifyConnectionMethod).array()
|
||||||
|
});
|
@@ -0,0 +1,42 @@
|
|||||||
|
import { logger } from "@app/lib/logger";
|
||||||
|
import { OrgServiceActor } from "@app/lib/types";
|
||||||
|
|
||||||
|
import { AppConnection } from "../app-connection-enums";
|
||||||
|
import { NetlifyPublicAPI } from "./netlify-connection-public-client";
|
||||||
|
import { TNetlifyConnection } from "./netlify-connection-types";
|
||||||
|
|
||||||
|
type TGetAppConnectionFunc = (
|
||||||
|
app: AppConnection,
|
||||||
|
connectionId: string,
|
||||||
|
actor: OrgServiceActor
|
||||||
|
) => Promise<TNetlifyConnection>;
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||||
|
export const netlifyConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||||
|
const listAccounts = async (connectionId: string, actor: OrgServiceActor) => {
|
||||||
|
const appConnection = await getAppConnection(AppConnection.Netlify, connectionId, actor);
|
||||||
|
try {
|
||||||
|
const accounts = await NetlifyPublicAPI.getNetlifyAccounts(appConnection);
|
||||||
|
return accounts;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "Failed to list accounts on Netlify");
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const listSites = async (connectionId: string, actor: OrgServiceActor, accountId: string) => {
|
||||||
|
const appConnection = await getAppConnection(AppConnection.Netlify, connectionId, actor);
|
||||||
|
try {
|
||||||
|
const sites = await NetlifyPublicAPI.getSites(appConnection, accountId);
|
||||||
|
return sites;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "Failed to list sites on Netlify");
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
listAccounts,
|
||||||
|
listSites
|
||||||
|
};
|
||||||
|
};
|
@@ -0,0 +1,51 @@
|
|||||||
|
import z from "zod";
|
||||||
|
|
||||||
|
import { DiscriminativePick } from "@app/lib/types";
|
||||||
|
|
||||||
|
import { AppConnection } from "../app-connection-enums";
|
||||||
|
import {
|
||||||
|
CreateNetlifyConnectionSchema,
|
||||||
|
NetlifyConnectionSchema,
|
||||||
|
ValidateNetlifyConnectionCredentialsSchema
|
||||||
|
} from "./netlify-connection-schemas";
|
||||||
|
|
||||||
|
export type TNetlifyConnection = z.infer<typeof NetlifyConnectionSchema>;
|
||||||
|
|
||||||
|
export type TNetlifyConnectionInput = z.infer<typeof CreateNetlifyConnectionSchema> & {
|
||||||
|
app: AppConnection.Netlify;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TValidateNetlifyConnectionCredentialsSchema = typeof ValidateNetlifyConnectionCredentialsSchema;
|
||||||
|
|
||||||
|
export type TNetlifyConnectionConfig = DiscriminativePick<TNetlifyConnection, "method" | "app" | "credentials"> & {
|
||||||
|
orgId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TNetlifyVariable = {
|
||||||
|
key: string;
|
||||||
|
id?: string; // ID of the variable (present in responses)
|
||||||
|
created_at?: string;
|
||||||
|
updated_at?: string;
|
||||||
|
is_secret?: boolean;
|
||||||
|
scopes?: ("builds" | "functions" | "runtime" | "post_processing")[];
|
||||||
|
values: TNetlifyVariableValue[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TNetlifyVariableValue = {
|
||||||
|
id?: string;
|
||||||
|
context?: string; // "all", "dev", "branch-deploy", etc.
|
||||||
|
value?: string; // Omitted in response if `is_secret` is true
|
||||||
|
site_id?: string; // Optional: overrides at site-level
|
||||||
|
created_at?: string;
|
||||||
|
updated_at?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TNetlifyAccount = {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TNetlifySite = {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
};
|
@@ -1,3 +1,4 @@
|
|||||||
export enum AcmeDnsProvider {
|
export enum AcmeDnsProvider {
|
||||||
Route53 = "route53"
|
Route53 = "route53",
|
||||||
|
Cloudflare = "cloudflare"
|
||||||
}
|
}
|
||||||
|
@@ -1,19 +1,17 @@
|
|||||||
import { ChangeResourceRecordSetsCommand, Route53Client } from "@aws-sdk/client-route-53";
|
|
||||||
import * as x509 from "@peculiar/x509";
|
import * as x509 from "@peculiar/x509";
|
||||||
import acme from "acme-client";
|
import acme from "acme-client";
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
import { TableName } from "@app/db/schemas";
|
||||||
import { CustomAWSHasher } from "@app/lib/aws/hashing";
|
|
||||||
import { crypto } from "@app/lib/crypto/cryptography";
|
import { crypto } from "@app/lib/crypto/cryptography";
|
||||||
import { BadRequestError, CryptographyError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, CryptographyError, NotFoundError } from "@app/lib/errors";
|
||||||
import { OrgServiceActor } from "@app/lib/types";
|
import { OrgServiceActor } from "@app/lib/types";
|
||||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||||
import { AppConnection, AWSRegion } from "@app/services/app-connection/app-connection-enums";
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||||
import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns";
|
import { TAwsConnection } from "@app/services/app-connection/aws/aws-connection-types";
|
||||||
import { TAwsConnection, TAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-types";
|
import { TCloudflareConnection } from "@app/services/app-connection/cloudflare/cloudflare-connection-types";
|
||||||
import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
|
import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
|
||||||
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
|
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
|
||||||
import { TCertificateSecretDALFactory } from "@app/services/certificate/certificate-secret-dal";
|
import { TCertificateSecretDALFactory } from "@app/services/certificate/certificate-secret-dal";
|
||||||
@@ -39,6 +37,8 @@ import {
|
|||||||
TCreateAcmeCertificateAuthorityDTO,
|
TCreateAcmeCertificateAuthorityDTO,
|
||||||
TUpdateAcmeCertificateAuthorityDTO
|
TUpdateAcmeCertificateAuthorityDTO
|
||||||
} from "./acme-certificate-authority-types";
|
} from "./acme-certificate-authority-types";
|
||||||
|
import { cloudflareDeleteTxtRecord, cloudflareInsertTxtRecord } from "./dns-providers/cloudflare";
|
||||||
|
import { route53DeleteTxtRecord, route53InsertTxtRecord } from "./dns-providers/route54";
|
||||||
|
|
||||||
type TAcmeCertificateAuthorityFnsDeps = {
|
type TAcmeCertificateAuthorityFnsDeps = {
|
||||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById">;
|
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById">;
|
||||||
@@ -95,74 +95,6 @@ export const castDbEntryToAcmeCertificateAuthority = (
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const route53InsertTxtRecord = async (
|
|
||||||
connection: TAwsConnectionConfig,
|
|
||||||
hostedZoneId: string,
|
|
||||||
domain: string,
|
|
||||||
value: string
|
|
||||||
) => {
|
|
||||||
const config = await getAwsConnectionConfig(connection, AWSRegion.US_WEST_1); // REGION is irrelevant because Route53 is global
|
|
||||||
const route53Client = new Route53Client({
|
|
||||||
sha256: CustomAWSHasher,
|
|
||||||
useFipsEndpoint: crypto.isFipsModeEnabled(),
|
|
||||||
credentials: config.credentials!,
|
|
||||||
region: config.region
|
|
||||||
});
|
|
||||||
|
|
||||||
const command = new ChangeResourceRecordSetsCommand({
|
|
||||||
HostedZoneId: hostedZoneId,
|
|
||||||
ChangeBatch: {
|
|
||||||
Comment: "Set ACME challenge TXT record",
|
|
||||||
Changes: [
|
|
||||||
{
|
|
||||||
Action: "UPSERT",
|
|
||||||
ResourceRecordSet: {
|
|
||||||
Name: domain,
|
|
||||||
Type: "TXT",
|
|
||||||
TTL: 30,
|
|
||||||
ResourceRecords: [{ Value: value }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await route53Client.send(command);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const route53DeleteTxtRecord = async (
|
|
||||||
connection: TAwsConnectionConfig,
|
|
||||||
hostedZoneId: string,
|
|
||||||
domain: string,
|
|
||||||
value: string
|
|
||||||
) => {
|
|
||||||
const config = await getAwsConnectionConfig(connection, AWSRegion.US_WEST_1); // REGION is irrelevant because Route53 is global
|
|
||||||
const route53Client = new Route53Client({
|
|
||||||
credentials: config.credentials!,
|
|
||||||
region: config.region
|
|
||||||
});
|
|
||||||
|
|
||||||
const command = new ChangeResourceRecordSetsCommand({
|
|
||||||
HostedZoneId: hostedZoneId,
|
|
||||||
ChangeBatch: {
|
|
||||||
Comment: "Delete ACME challenge TXT record",
|
|
||||||
Changes: [
|
|
||||||
{
|
|
||||||
Action: "DELETE",
|
|
||||||
ResourceRecordSet: {
|
|
||||||
Name: domain,
|
|
||||||
Type: "TXT",
|
|
||||||
TTL: 30,
|
|
||||||
ResourceRecords: [{ Value: value }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await route53Client.send(command);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const AcmeCertificateAuthorityFns = ({
|
export const AcmeCertificateAuthorityFns = ({
|
||||||
appConnectionDAL,
|
appConnectionDAL,
|
||||||
appConnectionService,
|
appConnectionService,
|
||||||
@@ -209,6 +141,12 @@ export const AcmeCertificateAuthorityFns = ({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (dnsProviderConfig.provider === AcmeDnsProvider.Cloudflare && appConnection.app !== AppConnection.Cloudflare) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `App connection with ID '${dnsAppConnectionId}' is not a Cloudflare connection`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// validates permission to connect
|
// validates permission to connect
|
||||||
await appConnectionService.connectAppConnectionById(appConnection.app as AppConnection, dnsAppConnectionId, actor);
|
await appConnectionService.connectAppConnectionById(appConnection.app as AppConnection, dnsAppConnectionId, actor);
|
||||||
|
|
||||||
@@ -289,6 +227,15 @@ export const AcmeCertificateAuthorityFns = ({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
dnsProviderConfig.provider === AcmeDnsProvider.Cloudflare &&
|
||||||
|
appConnection.app !== AppConnection.Cloudflare
|
||||||
|
) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `App connection with ID '${dnsAppConnectionId}' is not a Cloudflare connection`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// validates permission to connect
|
// validates permission to connect
|
||||||
await appConnectionService.connectAppConnectionById(
|
await appConnectionService.connectAppConnectionById(
|
||||||
appConnection.app as AppConnection,
|
appConnection.app as AppConnection,
|
||||||
@@ -443,26 +390,56 @@ export const AcmeCertificateAuthorityFns = ({
|
|||||||
const recordName = `_acme-challenge.${authz.identifier.value}`; // e.g., "_acme-challenge.example.com"
|
const recordName = `_acme-challenge.${authz.identifier.value}`; // e.g., "_acme-challenge.example.com"
|
||||||
const recordValue = `"${keyAuthorization}"`; // must be double quoted
|
const recordValue = `"${keyAuthorization}"`; // must be double quoted
|
||||||
|
|
||||||
if (acmeCa.configuration.dnsProviderConfig.provider === AcmeDnsProvider.Route53) {
|
switch (acmeCa.configuration.dnsProviderConfig.provider) {
|
||||||
await route53InsertTxtRecord(
|
case AcmeDnsProvider.Route53: {
|
||||||
connection as TAwsConnection,
|
await route53InsertTxtRecord(
|
||||||
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
|
connection as TAwsConnection,
|
||||||
recordName,
|
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
|
||||||
recordValue
|
recordName,
|
||||||
);
|
recordValue
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case AcmeDnsProvider.Cloudflare: {
|
||||||
|
await cloudflareInsertTxtRecord(
|
||||||
|
connection as TCloudflareConnection,
|
||||||
|
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
|
||||||
|
recordName,
|
||||||
|
recordValue
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
throw new Error(`Unsupported DNS provider: ${acmeCa.configuration.dnsProviderConfig.provider as string}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
challengeRemoveFn: async (authz, challenge, keyAuthorization) => {
|
challengeRemoveFn: async (authz, challenge, keyAuthorization) => {
|
||||||
const recordName = `_acme-challenge.${authz.identifier.value}`; // e.g., "_acme-challenge.example.com"
|
const recordName = `_acme-challenge.${authz.identifier.value}`; // e.g., "_acme-challenge.example.com"
|
||||||
const recordValue = `"${keyAuthorization}"`; // must be double quoted
|
const recordValue = `"${keyAuthorization}"`; // must be double quoted
|
||||||
|
|
||||||
if (acmeCa.configuration.dnsProviderConfig.provider === AcmeDnsProvider.Route53) {
|
switch (acmeCa.configuration.dnsProviderConfig.provider) {
|
||||||
await route53DeleteTxtRecord(
|
case AcmeDnsProvider.Route53: {
|
||||||
connection as TAwsConnection,
|
await route53DeleteTxtRecord(
|
||||||
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
|
connection as TAwsConnection,
|
||||||
recordName,
|
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
|
||||||
recordValue
|
recordName,
|
||||||
);
|
recordValue
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case AcmeDnsProvider.Cloudflare: {
|
||||||
|
await cloudflareDeleteTxtRecord(
|
||||||
|
connection as TCloudflareConnection,
|
||||||
|
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
|
||||||
|
recordName,
|
||||||
|
recordValue
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
throw new Error(`Unsupported DNS provider: ${acmeCa.configuration.dnsProviderConfig.provider as string}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@@ -0,0 +1,109 @@
|
|||||||
|
import axios from "axios";
|
||||||
|
|
||||||
|
import { request } from "@app/lib/config/request";
|
||||||
|
import { TCloudflareConnectionConfig } from "@app/services/app-connection/cloudflare/cloudflare-connection-types";
|
||||||
|
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||||
|
|
||||||
|
export const cloudflareInsertTxtRecord = async (
|
||||||
|
connection: TCloudflareConnectionConfig,
|
||||||
|
hostedZoneId: string,
|
||||||
|
domain: string,
|
||||||
|
value: string
|
||||||
|
) => {
|
||||||
|
const {
|
||||||
|
credentials: { apiToken }
|
||||||
|
} = connection;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await request.post(
|
||||||
|
`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/zones/${encodeURIComponent(hostedZoneId)}/dns_records`,
|
||||||
|
{
|
||||||
|
type: "TXT",
|
||||||
|
name: domain,
|
||||||
|
content: value,
|
||||||
|
ttl: 60,
|
||||||
|
proxied: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${apiToken}`,
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Accept: "application/json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
if (axios.isAxiosError(error)) {
|
||||||
|
const firstErrorMessage = (
|
||||||
|
error.response?.data as {
|
||||||
|
errors?: { message: string }[];
|
||||||
|
}
|
||||||
|
)?.errors?.[0]?.message;
|
||||||
|
if (firstErrorMessage) {
|
||||||
|
throw new Error(firstErrorMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const cloudflareDeleteTxtRecord = async (
|
||||||
|
connection: TCloudflareConnectionConfig,
|
||||||
|
hostedZoneId: string,
|
||||||
|
domain: string,
|
||||||
|
value: string
|
||||||
|
) => {
|
||||||
|
const {
|
||||||
|
credentials: { apiToken }
|
||||||
|
} = connection;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const listRecordsResponse = await request.get<{
|
||||||
|
result: { id: string; type: string; name: string; content: string }[];
|
||||||
|
}>(`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/zones/${encodeURIComponent(hostedZoneId)}/dns_records`, {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${apiToken}`,
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Accept: "application/json"
|
||||||
|
},
|
||||||
|
params: {
|
||||||
|
type: "TXT",
|
||||||
|
name: domain,
|
||||||
|
content: value
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const dnsRecords = listRecordsResponse.data?.result;
|
||||||
|
|
||||||
|
if (Array.isArray(dnsRecords) && dnsRecords.length > 0) {
|
||||||
|
const recordToDelete = dnsRecords.find(
|
||||||
|
(record) => record.type === "TXT" && record.name === domain && record.content === value
|
||||||
|
);
|
||||||
|
|
||||||
|
if (recordToDelete) {
|
||||||
|
await request.delete(
|
||||||
|
`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/zones/${encodeURIComponent(hostedZoneId)}/dns_records/${recordToDelete.id}`,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${apiToken}`,
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Accept: "application/json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (axios.isAxiosError(error)) {
|
||||||
|
const firstErrorMessage = (
|
||||||
|
error.response?.data as {
|
||||||
|
errors?: { message: string }[];
|
||||||
|
}
|
||||||
|
)?.errors?.[0]?.message;
|
||||||
|
if (firstErrorMessage) {
|
||||||
|
throw new Error(firstErrorMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
@@ -0,0 +1,75 @@
|
|||||||
|
import { ChangeResourceRecordSetsCommand, Route53Client } from "@aws-sdk/client-route-53";
|
||||||
|
|
||||||
|
import { CustomAWSHasher } from "@app/lib/aws/hashing";
|
||||||
|
import { crypto } from "@app/lib/crypto/cryptography";
|
||||||
|
import { AWSRegion } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns";
|
||||||
|
import { TAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-types";
|
||||||
|
|
||||||
|
export const route53InsertTxtRecord = async (
|
||||||
|
connection: TAwsConnectionConfig,
|
||||||
|
hostedZoneId: string,
|
||||||
|
domain: string,
|
||||||
|
value: string
|
||||||
|
) => {
|
||||||
|
const config = await getAwsConnectionConfig(connection, AWSRegion.US_WEST_1); // REGION is irrelevant because Route53 is global
|
||||||
|
const route53Client = new Route53Client({
|
||||||
|
sha256: CustomAWSHasher,
|
||||||
|
useFipsEndpoint: crypto.isFipsModeEnabled(),
|
||||||
|
credentials: config.credentials!,
|
||||||
|
region: config.region
|
||||||
|
});
|
||||||
|
|
||||||
|
const command = new ChangeResourceRecordSetsCommand({
|
||||||
|
HostedZoneId: hostedZoneId,
|
||||||
|
ChangeBatch: {
|
||||||
|
Comment: "Set ACME challenge TXT record",
|
||||||
|
Changes: [
|
||||||
|
{
|
||||||
|
Action: "UPSERT",
|
||||||
|
ResourceRecordSet: {
|
||||||
|
Name: domain,
|
||||||
|
Type: "TXT",
|
||||||
|
TTL: 30,
|
||||||
|
ResourceRecords: [{ Value: value }]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
await route53Client.send(command);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const route53DeleteTxtRecord = async (
|
||||||
|
connection: TAwsConnectionConfig,
|
||||||
|
hostedZoneId: string,
|
||||||
|
domain: string,
|
||||||
|
value: string
|
||||||
|
) => {
|
||||||
|
const config = await getAwsConnectionConfig(connection, AWSRegion.US_WEST_1); // REGION is irrelevant because Route53 is global
|
||||||
|
const route53Client = new Route53Client({
|
||||||
|
credentials: config.credentials!,
|
||||||
|
region: config.region
|
||||||
|
});
|
||||||
|
|
||||||
|
const command = new ChangeResourceRecordSetsCommand({
|
||||||
|
HostedZoneId: hostedZoneId,
|
||||||
|
ChangeBatch: {
|
||||||
|
Comment: "Delete ACME challenge TXT record",
|
||||||
|
Changes: [
|
||||||
|
{
|
||||||
|
Action: "DELETE",
|
||||||
|
ResourceRecordSet: {
|
||||||
|
Name: domain,
|
||||||
|
Type: "TXT",
|
||||||
|
TTL: 30,
|
||||||
|
ResourceRecords: [{ Value: value }]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
await route53Client.send(command);
|
||||||
|
};
|
@@ -15,10 +15,15 @@ export const validateAltNameField = z
|
|||||||
.trim()
|
.trim()
|
||||||
.refine(
|
.refine(
|
||||||
(name) => {
|
(name) => {
|
||||||
return isFQDN(name, { allow_wildcard: true }) || z.string().email().safeParse(name).success || isValidIp(name);
|
return (
|
||||||
|
isFQDN(name, { allow_wildcard: true, require_tld: false }) ||
|
||||||
|
z.string().url().safeParse(name).success ||
|
||||||
|
z.string().email().safeParse(name).success ||
|
||||||
|
isValidIp(name)
|
||||||
|
);
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
message: "SAN must be a valid hostname, email address, or IP address"
|
message: "SAN must be a valid hostname, email address, IP address or URL"
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -39,10 +44,15 @@ export const validateAltNamesField = z
|
|||||||
if (data === "") return true;
|
if (data === "") return true;
|
||||||
// Split and validate each alt name
|
// Split and validate each alt name
|
||||||
return data.split(", ").every((name) => {
|
return data.split(", ").every((name) => {
|
||||||
return isFQDN(name, { allow_wildcard: true }) || z.string().email().safeParse(name).success || isValidIp(name);
|
return (
|
||||||
|
isFQDN(name, { allow_wildcard: true, require_tld: false }) ||
|
||||||
|
z.string().url().safeParse(name).success ||
|
||||||
|
z.string().email().safeParse(name).success ||
|
||||||
|
isValidIp(name)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
message: "Each alt name must be a valid hostname or email address"
|
message: "Each alt name must be a valid hostname, email address, IP address or URL"
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@@ -152,7 +152,7 @@ export const InternalCertificateAuthorityFns = ({
|
|||||||
extensions.push(extendedKeyUsagesExtension);
|
extensions.push(extendedKeyUsagesExtension);
|
||||||
}
|
}
|
||||||
|
|
||||||
let altNamesArray: { type: "email" | "dns"; value: string }[] = [];
|
let altNamesArray: { type: "email" | "dns" | "ip" | "url"; value: string }[] = [];
|
||||||
|
|
||||||
if (subscriber.subjectAlternativeNames?.length) {
|
if (subscriber.subjectAlternativeNames?.length) {
|
||||||
altNamesArray = subscriber.subjectAlternativeNames.map((altName) => {
|
altNamesArray = subscriber.subjectAlternativeNames.map((altName) => {
|
||||||
@@ -160,10 +160,18 @@ export const InternalCertificateAuthorityFns = ({
|
|||||||
return { type: "email", value: altName };
|
return { type: "email", value: altName };
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isFQDN(altName, { allow_wildcard: true })) {
|
if (isFQDN(altName, { allow_wildcard: true, require_tld: false })) {
|
||||||
return { type: "dns", value: altName };
|
return { type: "dns", value: altName };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (z.string().url().safeParse(altName).success) {
|
||||||
|
return { type: "url", value: altName };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (z.string().ip().safeParse(altName).success) {
|
||||||
|
return { type: "ip", value: altName };
|
||||||
|
}
|
||||||
|
|
||||||
throw new BadRequestError({ message: `Invalid SAN entry: ${altName}` });
|
throw new BadRequestError({ message: `Invalid SAN entry: ${altName}` });
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -418,7 +426,7 @@ export const InternalCertificateAuthorityFns = ({
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let altNamesArray: { type: "email" | "dns"; value: string }[] = [];
|
let altNamesArray: { type: "email" | "dns" | "ip" | "url"; value: string }[] = [];
|
||||||
|
|
||||||
if (altNames) {
|
if (altNames) {
|
||||||
altNamesArray = altNames.split(",").map((altName) => {
|
altNamesArray = altNames.split(",").map((altName) => {
|
||||||
@@ -426,10 +434,18 @@ export const InternalCertificateAuthorityFns = ({
|
|||||||
return { type: "email", value: altName };
|
return { type: "email", value: altName };
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isFQDN(altName, { allow_wildcard: true })) {
|
if (isFQDN(altName, { allow_wildcard: true, require_tld: false })) {
|
||||||
return { type: "dns", value: altName };
|
return { type: "dns", value: altName };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (z.string().url().safeParse(altName).success) {
|
||||||
|
return { type: "url", value: altName };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (z.string().ip().safeParse(altName).success) {
|
||||||
|
return { type: "ip", value: altName };
|
||||||
|
}
|
||||||
|
|
||||||
throw new BadRequestError({ message: `Invalid SAN entry: ${altName}` });
|
throw new BadRequestError({ message: `Invalid SAN entry: ${altName}` });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@@ -17,25 +17,16 @@ type VaultData = {
|
|||||||
const vaultFactory = () => {
|
const vaultFactory = () => {
|
||||||
const getMounts = async (request: AxiosInstance) => {
|
const getMounts = async (request: AxiosInstance) => {
|
||||||
const response = await request
|
const response = await request
|
||||||
.get<
|
.get<{
|
||||||
Record<
|
data: Record<string, { accessor: string; options: { version?: string } | null; type: string }>;
|
||||||
string,
|
}>("/v1/sys/mounts")
|
||||||
{
|
|
||||||
accessor: string;
|
|
||||||
options: {
|
|
||||||
version?: string;
|
|
||||||
} | null;
|
|
||||||
type: string;
|
|
||||||
}
|
|
||||||
>
|
|
||||||
>("/v1/sys/mounts")
|
|
||||||
.catch((err) => {
|
.catch((err) => {
|
||||||
if (axios.isAxiosError(err)) {
|
if (axios.isAxiosError(err)) {
|
||||||
logger.error(err.response?.data, "External migration: Failed to get Vault mounts");
|
logger.error(err.response?.data, "External migration: Failed to get Vault mounts");
|
||||||
}
|
}
|
||||||
throw err;
|
throw err;
|
||||||
});
|
});
|
||||||
return response.data;
|
return response.data.data;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getPaths = async (
|
const getPaths = async (
|
||||||
|
@@ -19,7 +19,7 @@ import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-d
|
|||||||
import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal";
|
import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal";
|
||||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||||
import { importDataIntoInfisicalFn } from "./external-migration-fns";
|
import { importDataIntoInfisicalFn } from "./external-migration-fns";
|
||||||
import { ExternalPlatforms, ImportType, TImportInfisicalDataCreate } from "./external-migration-types";
|
import { ExternalPlatforms, TImportInfisicalDataCreate } from "./external-migration-types";
|
||||||
|
|
||||||
export type TExternalMigrationQueueFactoryDep = {
|
export type TExternalMigrationQueueFactoryDep = {
|
||||||
smtpService: TSmtpService;
|
smtpService: TSmtpService;
|
||||||
@@ -66,8 +66,8 @@ export const externalMigrationQueueFactory = ({
|
|||||||
}: TExternalMigrationQueueFactoryDep) => {
|
}: TExternalMigrationQueueFactoryDep) => {
|
||||||
const startImport = async (dto: {
|
const startImport = async (dto: {
|
||||||
actorEmail: string;
|
actorEmail: string;
|
||||||
|
importType: ExternalPlatforms;
|
||||||
data: {
|
data: {
|
||||||
importType: ImportType;
|
|
||||||
iv: string;
|
iv: string;
|
||||||
tag: string;
|
tag: string;
|
||||||
ciphertext: string;
|
ciphertext: string;
|
||||||
@@ -87,14 +87,14 @@ export const externalMigrationQueueFactory = ({
|
|||||||
};
|
};
|
||||||
|
|
||||||
queueService.start(QueueName.ImportSecretsFromExternalSource, async (job) => {
|
queueService.start(QueueName.ImportSecretsFromExternalSource, async (job) => {
|
||||||
try {
|
const { data, actorEmail, importType } = job.data;
|
||||||
const { data, actorEmail } = job.data;
|
|
||||||
|
|
||||||
|
try {
|
||||||
await smtpService.sendMail({
|
await smtpService.sendMail({
|
||||||
recipients: [actorEmail],
|
recipients: [actorEmail],
|
||||||
subjectLine: "Infisical import started",
|
subjectLine: "Infisical import started",
|
||||||
substitutions: {
|
substitutions: {
|
||||||
provider: ExternalPlatforms.EnvKey
|
provider: importType
|
||||||
},
|
},
|
||||||
template: SmtpTemplates.ExternalImportStarted
|
template: SmtpTemplates.ExternalImportStarted
|
||||||
});
|
});
|
||||||
@@ -141,7 +141,7 @@ export const externalMigrationQueueFactory = ({
|
|||||||
recipients: [actorEmail],
|
recipients: [actorEmail],
|
||||||
subjectLine: "Infisical import successful",
|
subjectLine: "Infisical import successful",
|
||||||
substitutions: {
|
substitutions: {
|
||||||
provider: ExternalPlatforms.EnvKey
|
provider: importType
|
||||||
},
|
},
|
||||||
template: SmtpTemplates.ExternalImportSuccessful
|
template: SmtpTemplates.ExternalImportSuccessful
|
||||||
});
|
});
|
||||||
@@ -150,7 +150,7 @@ export const externalMigrationQueueFactory = ({
|
|||||||
recipients: [job.data.actorEmail],
|
recipients: [job.data.actorEmail],
|
||||||
subjectLine: "Infisical import failed",
|
subjectLine: "Infisical import failed",
|
||||||
substitutions: {
|
substitutions: {
|
||||||
provider: ExternalPlatforms.EnvKey,
|
provider: importType,
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-assignment
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-assignment
|
||||||
error: (err as any)?.message || "Unknown error"
|
error: (err as any)?.message || "Unknown error"
|
||||||
},
|
},
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user