mirror of
https://github.com/Infisical/infisical.git
synced 2025-08-09 06:02:53 +00:00
Compare commits
214 Commits
improve-in
...
fix/migrat
Author | SHA1 | Date | |
---|---|---|---|
|
ec65e0e29c | ||
|
1b0ef540fe | ||
|
4496241002 | ||
|
52e32484ce | ||
|
8b497699d4 | ||
|
be73f62226 | ||
|
102620ff09 | ||
|
994ee88852 | ||
|
770e25b895 | ||
|
fcf3bdb440 | ||
|
89c11b5541 | ||
|
5f764904e2 | ||
|
1a75384dba | ||
|
50f434cd80 | ||
|
d879cfd90c | ||
|
ca1f5eaca3 | ||
|
04086376ea | ||
|
364027a88a | ||
|
ca110d11b0 | ||
|
4e8f404f16 | ||
|
22abb78f48 | ||
|
24f11406e1 | ||
|
d5d67c82b2 | ||
|
35cfcf1f0f | ||
|
2c8cfeb826 | ||
|
70d22f90ec | ||
|
4f52400887 | ||
|
34eb9f475a | ||
|
902a0b0c56 | ||
|
9e6294786f | ||
|
847c50d2d4 | ||
|
efa043c3d2 | ||
|
7e94791635 | ||
|
eedc5f533e | ||
|
fc5d42baf0 | ||
|
b95c35620a | ||
|
fa867e5068 | ||
|
8851faec65 | ||
|
47fb666dc7 | ||
|
569edd2852 | ||
|
676ebaf3c2 | ||
|
adb3185042 | ||
|
8da0a4d846 | ||
|
eebf080e3c | ||
|
97be31f11e | ||
|
667cceebc0 | ||
|
1ad02e2da6 | ||
|
93445d96b3 | ||
|
e105a5f7da | ||
|
72b80e1fd7 | ||
|
6429adfaf6 | ||
|
fd89b3c702 | ||
|
50e40e8bcf | ||
|
6100086338 | ||
|
000dd6c223 | ||
|
60dc1d1e00 | ||
|
2d68f9aa16 | ||
|
e694293ebe | ||
|
ef6f5ecc4b | ||
|
035ac0fe8d | ||
|
c12408eb81 | ||
|
13194296c6 | ||
|
be20a507ac | ||
|
63cf36c722 | ||
|
4dcd3ed06c | ||
|
59cffe8cfb | ||
|
fa61867a72 | ||
|
f3694ca730 | ||
|
8fcd6d9997 | ||
|
45ff9a50b6 | ||
|
81cdfb9861 | ||
|
e1e553ce23 | ||
|
e7a6f46f56 | ||
|
b51d997e26 | ||
|
23f6fbe9fc | ||
|
c1fb5d8998 | ||
|
0cb21082c7 | ||
|
4e3613ac6e | ||
|
6be65f7a56 | ||
|
63cb484313 | ||
|
aa3af1672a | ||
|
33fe11e0fd | ||
|
d924a4bccc | ||
|
3fc7a71bc7 | ||
|
986fe2fe23 | ||
|
08f7e530b0 | ||
|
e9f5055481 | ||
|
35055955e2 | ||
|
c188e7cd2b | ||
|
7d2ded6235 | ||
|
c568f40954 | ||
|
28f87b8b27 | ||
|
aab1a0297e | ||
|
dd0f5cebd2 | ||
|
1b29a4564a | ||
|
9e3c0c8583 | ||
|
3e803debb4 | ||
|
16ebe0f8e7 | ||
|
e8eb1b5f8b | ||
|
6e37b9f969 | ||
|
899b7fe024 | ||
|
098a8b81be | ||
|
e852cd8b4a | ||
|
830a2f9581 | ||
|
dc4db40936 | ||
|
0beff3cc1c | ||
|
5a3325fc53 | ||
|
3dde786621 | ||
|
da6b233db1 | ||
|
6958f1cfbd | ||
|
adf7a88d67 | ||
|
b8cd836225 | ||
|
6826b1c242 | ||
|
35012fde03 | ||
|
6e14b2f793 | ||
|
5a3aa3d608 | ||
|
95b327de50 | ||
|
a3c36f82f3 | ||
|
42612da57d | ||
|
f63c07d538 | ||
|
98a08d136e | ||
|
6c74b875f3 | ||
|
793cd4c144 | ||
|
dc0cc4c29d | ||
|
6dd639be60 | ||
|
ebe05661d3 | ||
|
4f0007faa5 | ||
|
ec0be1166f | ||
|
899d01237c | ||
|
ff5dbe74fd | ||
|
24004084f2 | ||
|
0e401ece73 | ||
|
c4e1651df7 | ||
|
514c7596db | ||
|
9fbdede82c | ||
|
1898c16f1b | ||
|
e519637e89 | ||
|
ba393b0498 | ||
|
4150f81d83 | ||
|
a45bba8537 | ||
|
fe7e8e7240 | ||
|
cf54365022 | ||
|
4b9e57ae61 | ||
|
eb27983990 | ||
|
fa311b032c | ||
|
71651f85fe | ||
|
d28d3449de | ||
|
14ffa59530 | ||
|
4f26365c21 | ||
|
c974df104e | ||
|
e88fdc957e | ||
|
55e5360dd4 | ||
|
77a8cd9efc | ||
|
de2c1c5560 | ||
|
52f773c647 | ||
|
79de7c5f08 | ||
|
3877fe524d | ||
|
4c5df70790 | ||
|
5645dd2b8d | ||
|
0d55195561 | ||
|
1c0caab469 | ||
|
ed9dfd2974 | ||
|
7f72037d77 | ||
|
9928ca17ea | ||
|
2cbd66e804 | ||
|
7357d377e1 | ||
|
4704774c63 | ||
|
149cecd805 | ||
|
c80fd55a74 | ||
|
93e7723b48 | ||
|
573b990aa3 | ||
|
e15086edc0 | ||
|
4a55ecbe12 | ||
|
13ef3809bd | ||
|
fb49c9250a | ||
|
5ced7fa923 | ||
|
5ffd42378a | ||
|
1e29d550be | ||
|
f995708e44 | ||
|
c266d68993 | ||
|
c7c8107f85 | ||
|
b906fe34a1 | ||
|
bec1fefee8 | ||
|
cd03107a60 | ||
|
07965de1db | ||
|
b20ff0f029 | ||
|
691cbe0a4f | ||
|
0787128803 | ||
|
837158e344 | ||
|
03bd1471b2 | ||
|
f53c39f65b | ||
|
092695089d | ||
|
2d80681597 | ||
|
cf23f98170 | ||
|
c4c8e121f0 | ||
|
0701c996e5 | ||
|
4ca6f165b7 | ||
|
b9dd565926 | ||
|
136b0bdcb5 | ||
|
7266d1f310 | ||
|
9c6ec807cb | ||
|
5fcae35fae | ||
|
359e19f804 | ||
|
0c98d9187d | ||
|
e106a6dceb | ||
|
2d3b1b18d2 | ||
|
d5dd2e8bfd | ||
|
2aa548c7dc | ||
|
4f00fc6777 | ||
|
82b765553c | ||
|
8972521716 | ||
|
81b45b24ec | ||
|
f2b0e4ae37 | ||
|
b4ed1fa96a |
@@ -2,8 +2,7 @@ name: Release production images (frontend, backend)
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*"
|
||||
- "!infisical/v*.*.*-postgres"
|
||||
- "vv*.*.*"
|
||||
|
||||
jobs:
|
||||
backend-image:
|
||||
@@ -12,7 +11,7 @@ jobs:
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
|
82
.github/workflows/nightly-tag-generation.yml
vendored
Normal file
82
.github/workflows/nightly-tag-generation.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
name: Generate Nightly Tag
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # Run daily at midnight UTC
|
||||
workflow_dispatch: # Allow manual triggering for testing
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
create-nightly-tag:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Fetch all history for tags
|
||||
token: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
- name: Generate nightly tag
|
||||
run: |
|
||||
# Get the latest infisical production tag
|
||||
LATEST_STABLE_TAG=$(git tag --list | grep "^v[0-9].*$" | grep -v "nightly" | sort -V | tail -n1)
|
||||
|
||||
if [ -z "$LATEST_STABLE_TAG" ]; then
|
||||
echo "No infisical production tags found, using v0.1.0"
|
||||
LATEST_STABLE_TAG="v0.1.0"
|
||||
fi
|
||||
|
||||
echo "Latest production tag: $LATEST_STABLE_TAG"
|
||||
|
||||
# Get current date in YYYYMMDD format
|
||||
DATE=$(date +%Y%m%d)
|
||||
|
||||
# Base nightly tag name
|
||||
BASE_TAG="${LATEST_STABLE_TAG}-nightly-${DATE}"
|
||||
|
||||
# Check if this exact tag already exists
|
||||
if git tag --list | grep -q "^${BASE_TAG}$"; then
|
||||
echo "Base tag ${BASE_TAG} already exists, finding next increment"
|
||||
|
||||
# Find existing tags for this date and get the highest increment
|
||||
EXISTING_TAGS=$(git tag --list | grep "^${BASE_TAG}" | grep -E '\.[0-9]+$' || true)
|
||||
|
||||
if [ -z "$EXISTING_TAGS" ]; then
|
||||
# No incremental tags exist, create .1
|
||||
NIGHTLY_TAG="${BASE_TAG}.1"
|
||||
else
|
||||
# Find the highest increment
|
||||
HIGHEST_INCREMENT=$(echo "$EXISTING_TAGS" | sed "s|^${BASE_TAG}\.||" | sort -n | tail -n1)
|
||||
NEXT_INCREMENT=$((HIGHEST_INCREMENT + 1))
|
||||
NIGHTLY_TAG="${BASE_TAG}.${NEXT_INCREMENT}"
|
||||
fi
|
||||
else
|
||||
# Base tag doesn't exist, use it
|
||||
NIGHTLY_TAG="$BASE_TAG"
|
||||
fi
|
||||
|
||||
echo "Generated nightly tag: $NIGHTLY_TAG"
|
||||
echo "NIGHTLY_TAG=$NIGHTLY_TAG" >> $GITHUB_ENV
|
||||
echo "LATEST_PRODUCTION_TAG=$LATEST_STABLE_TAG" >> $GITHUB_ENV
|
||||
|
||||
git tag "$NIGHTLY_TAG"
|
||||
git push origin "$NIGHTLY_TAG"
|
||||
echo "✅ Created and pushed nightly tag: $NIGHTLY_TAG"
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
tag_name: ${{ env.NIGHTLY_TAG }}
|
||||
name: ${{ env.NIGHTLY_TAG }}
|
||||
draft: false
|
||||
prerelease: true
|
||||
generate_release_notes: true
|
||||
make_latest: false
|
@@ -2,7 +2,9 @@ name: Release standalone docker image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
- "v*.*.*"
|
||||
- "v*.*.*-nightly-*"
|
||||
- "v*.*.*-nightly-*.*"
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
@@ -17,7 +19,7 @@ jobs:
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
@@ -53,7 +55,7 @@ jobs:
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest-postgres
|
||||
infisical/infisical:latest
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
@@ -69,7 +71,7 @@ jobs:
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
@@ -105,7 +107,7 @@ jobs:
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical-fips:latest-postgres
|
||||
infisical/infisical-fips:latest
|
||||
infisical/infisical-fips:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
@@ -44,10 +44,7 @@ jobs:
|
||||
|
||||
- name: Generate Helm Chart
|
||||
working-directory: k8-operator
|
||||
run: make helm
|
||||
|
||||
- name: Update Helm Chart Version
|
||||
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
|
||||
run: make helm VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Debug - Check file changes
|
||||
run: |
|
||||
|
@@ -55,6 +55,8 @@ USER non-root-user
|
||||
##
|
||||
FROM base AS backend-build
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
@@ -84,6 +86,8 @@ RUN npm run build
|
||||
# Production stage
|
||||
FROM base AS backend-runner
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
@@ -112,6 +116,11 @@ RUN mkdir frontend-build
|
||||
FROM base AS production
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
ca-certificates \
|
||||
bash \
|
||||
curl \
|
||||
@@ -171,6 +180,7 @@ ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
|
244
backend/package-lock.json
generated
244
backend/package-lock.json
generated
@@ -7,7 +7,6 @@
|
||||
"": {
|
||||
"name": "backend",
|
||||
"version": "1.0.0",
|
||||
"hasInstallScript": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-elasticache": "^3.637.0",
|
||||
@@ -34,11 +33,12 @@
|
||||
"@gitbeaker/rest": "^42.5.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@node-saml/passport-saml": "^5.1.0",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/core": "^5.2.1",
|
||||
"@octokit/plugin-paginate-graphql": "^4.0.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/request": "8.4.1",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
@@ -9574,20 +9574,20 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/node-saml": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.0.1.tgz",
|
||||
"integrity": "sha512-YQzFPEC+CnsfO9AFYnwfYZKIzOLx3kITaC1HrjHVLTo6hxcQhc+LgHODOMvW4VCV95Gwrz1MshRUWCPzkDqmnA==",
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.1.0.tgz",
|
||||
"integrity": "sha512-t3cJnZ4aC7HhPZ6MGylGZULvUtBOZ6FzuUndaHGXjmIZHXnLfC/7L8a57O9Q9V7AxJGKAiRM5zu2wNm9EsvQpw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/debug": "^4.1.12",
|
||||
"@types/qs": "^6.9.11",
|
||||
"@types/qs": "^6.9.18",
|
||||
"@types/xml-encryption": "^1.2.4",
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"@xmldom/is-dom-node": "^1.0.1",
|
||||
"@xmldom/xmldom": "^0.8.10",
|
||||
"debug": "^4.3.4",
|
||||
"xml-crypto": "^6.0.1",
|
||||
"xml-encryption": "^3.0.2",
|
||||
"debug": "^4.4.0",
|
||||
"xml-crypto": "^6.1.2",
|
||||
"xml-encryption": "^3.1.0",
|
||||
"xml2js": "^0.6.2",
|
||||
"xmlbuilder": "^15.1.1",
|
||||
"xpath": "^0.0.34"
|
||||
@@ -9597,9 +9597,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/node-saml/node_modules/debug": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
|
||||
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
|
||||
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
@@ -9636,14 +9636,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/passport-saml": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.0.1.tgz",
|
||||
"integrity": "sha512-fMztg3zfSnjLEgxvpl6HaDMNeh0xeQX4QHiF9e2Lsie2dc4qFE37XYbQZhVmn8XJ2awPpSWLQ736UskYgGU8lQ==",
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.1.0.tgz",
|
||||
"integrity": "sha512-pBm+iFjv9eihcgeJuSUs4c0AuX1QEFdHwP8w1iaWCfDzXdeWZxUBU5HT2bY2S4dvNutcy+A9hYsH7ZLBGtgwDg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@node-saml/node-saml": "^5.0.1",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/passport": "^1.0.16",
|
||||
"@node-saml/node-saml": "^5.1.0",
|
||||
"@types/express": "^4.17.23",
|
||||
"@types/passport": "^1.0.17",
|
||||
"@types/passport-strategy": "^0.2.38",
|
||||
"passport": "^0.7.0",
|
||||
"passport-strategy": "^1.0.0"
|
||||
@@ -9778,18 +9778,6 @@
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-app/node_modules/@octokit/endpoint": {
|
||||
"version": "10.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
|
||||
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
|
||||
"dependencies": {
|
||||
"@octokit/types": "^13.0.0",
|
||||
"universal-user-agent": "^7.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-app/node_modules/@octokit/openapi-types": {
|
||||
"version": "22.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
||||
@@ -9836,11 +9824,6 @@
|
||||
"node": "14 || >=16.14"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-app/node_modules/universal-user-agent": {
|
||||
"version": "7.0.2",
|
||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
|
||||
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
|
||||
},
|
||||
"node_modules/@octokit/auth-oauth-app": {
|
||||
"version": "8.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/auth-oauth-app/-/auth-oauth-app-8.1.1.tgz",
|
||||
@@ -9856,18 +9839,6 @@
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-oauth-app/node_modules/@octokit/endpoint": {
|
||||
"version": "10.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
|
||||
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
|
||||
"dependencies": {
|
||||
"@octokit/types": "^13.0.0",
|
||||
"universal-user-agent": "^7.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-oauth-app/node_modules/@octokit/openapi-types": {
|
||||
"version": "22.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
||||
@@ -9906,11 +9877,6 @@
|
||||
"@octokit/openapi-types": "^22.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-oauth-app/node_modules/universal-user-agent": {
|
||||
"version": "7.0.2",
|
||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
|
||||
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
|
||||
},
|
||||
"node_modules/@octokit/auth-oauth-device": {
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/auth-oauth-device/-/auth-oauth-device-7.1.1.tgz",
|
||||
@@ -9925,18 +9891,6 @@
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-oauth-device/node_modules/@octokit/endpoint": {
|
||||
"version": "10.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
|
||||
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
|
||||
"dependencies": {
|
||||
"@octokit/types": "^13.0.0",
|
||||
"universal-user-agent": "^7.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-oauth-device/node_modules/@octokit/openapi-types": {
|
||||
"version": "22.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
||||
@@ -9975,11 +9929,6 @@
|
||||
"@octokit/openapi-types": "^22.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-oauth-device/node_modules/universal-user-agent": {
|
||||
"version": "7.0.2",
|
||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
|
||||
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
|
||||
},
|
||||
"node_modules/@octokit/auth-oauth-user": {
|
||||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/auth-oauth-user/-/auth-oauth-user-5.1.1.tgz",
|
||||
@@ -9995,18 +9944,6 @@
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-oauth-user/node_modules/@octokit/endpoint": {
|
||||
"version": "10.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
|
||||
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
|
||||
"dependencies": {
|
||||
"@octokit/types": "^13.0.0",
|
||||
"universal-user-agent": "^7.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-oauth-user/node_modules/@octokit/openapi-types": {
|
||||
"version": "22.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
||||
@@ -10045,11 +9982,6 @@
|
||||
"@octokit/openapi-types": "^22.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-oauth-user/node_modules/universal-user-agent": {
|
||||
"version": "7.0.2",
|
||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
|
||||
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
|
||||
},
|
||||
"node_modules/@octokit/auth-token": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz",
|
||||
@@ -10103,32 +10035,38 @@
|
||||
"@octokit/openapi-types": "^24.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/core/node_modules/universal-user-agent": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
|
||||
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/@octokit/endpoint": {
|
||||
"version": "9.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
|
||||
"integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
|
||||
"version": "10.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.4.tgz",
|
||||
"integrity": "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@octokit/types": "^13.1.0",
|
||||
"universal-user-agent": "^6.0.0"
|
||||
"@octokit/types": "^14.0.0",
|
||||
"universal-user-agent": "^7.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/endpoint/node_modules/@octokit/openapi-types": {
|
||||
"version": "24.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
|
||||
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
|
||||
"integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@octokit/endpoint/node_modules/@octokit/types": {
|
||||
"version": "13.10.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
|
||||
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
|
||||
"version": "14.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
|
||||
"integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@octokit/openapi-types": "^24.2.0"
|
||||
"@octokit/openapi-types": "^25.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/graphql": {
|
||||
@@ -10160,6 +10098,12 @@
|
||||
"@octokit/openapi-types": "^24.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/graphql/node_modules/universal-user-agent": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
|
||||
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/@octokit/oauth-authorization-url": {
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-7.1.1.tgz",
|
||||
@@ -10182,18 +10126,6 @@
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/oauth-methods/node_modules/@octokit/endpoint": {
|
||||
"version": "10.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
|
||||
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
|
||||
"dependencies": {
|
||||
"@octokit/types": "^13.0.0",
|
||||
"universal-user-agent": "^7.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/oauth-methods/node_modules/@octokit/openapi-types": {
|
||||
"version": "22.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
||||
@@ -10232,11 +10164,6 @@
|
||||
"@octokit/openapi-types": "^22.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/oauth-methods/node_modules/universal-user-agent": {
|
||||
"version": "7.0.2",
|
||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
|
||||
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
|
||||
},
|
||||
"node_modules/@octokit/openapi-types": {
|
||||
"version": "19.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-19.1.0.tgz",
|
||||
@@ -10377,31 +10304,54 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": {
|
||||
"version": "22.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
||||
"integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg=="
|
||||
"version": "24.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
|
||||
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@octokit/request-error/node_modules/@octokit/types": {
|
||||
"version": "13.6.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz",
|
||||
"integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==",
|
||||
"version": "13.10.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
|
||||
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@octokit/openapi-types": "^22.2.0"
|
||||
"@octokit/openapi-types": "^24.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/request/node_modules/@octokit/endpoint": {
|
||||
"version": "9.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
|
||||
"integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@octokit/types": "^13.1.0",
|
||||
"universal-user-agent": "^6.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/request/node_modules/@octokit/openapi-types": {
|
||||
"version": "22.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
|
||||
"integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg=="
|
||||
"version": "24.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
|
||||
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@octokit/request/node_modules/@octokit/types": {
|
||||
"version": "13.6.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz",
|
||||
"integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==",
|
||||
"version": "13.10.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
|
||||
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@octokit/openapi-types": "^22.2.0"
|
||||
"@octokit/openapi-types": "^24.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/request/node_modules/universal-user-agent": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
|
||||
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/@octokit/rest": {
|
||||
"version": "20.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-20.0.2.tgz",
|
||||
@@ -13351,9 +13301,10 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/express": {
|
||||
"version": "4.17.21",
|
||||
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz",
|
||||
"integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==",
|
||||
"version": "4.17.23",
|
||||
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.23.tgz",
|
||||
"integrity": "sha512-Crp6WY9aTYP3qPi2wGDo9iUe/rceX01UMhnF1jmwDcKCFM6cx7YhGP/Mpr3y9AASpfHixIG0E6azCcL5OcDHsQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/body-parser": "*",
|
||||
"@types/express-serve-static-core": "^4.17.33",
|
||||
@@ -13523,9 +13474,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/passport": {
|
||||
"version": "1.0.16",
|
||||
"resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.16.tgz",
|
||||
"integrity": "sha512-FD0qD5hbPWQzaM0wHUnJ/T0BBCJBxCeemtnCwc/ThhTg3x9jfrAcRUmj5Dopza+MfFS9acTe3wk7rcVnRIp/0A==",
|
||||
"version": "1.0.17",
|
||||
"resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.17.tgz",
|
||||
"integrity": "sha512-aciLyx+wDwT2t2/kJGJR2AEeBz0nJU4WuRX04Wu9Dqc5lSUtwu0WERPHYsLhF9PtseiAMPBGNUOtFjxZ56prsg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/express": "*"
|
||||
}
|
||||
@@ -18287,7 +18239,8 @@
|
||||
"node_modules/fast-content-type-parse": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-1.1.0.tgz",
|
||||
"integrity": "sha512-fBHHqSTFLVnR61C+gltJuE5GkVQMV0S2nqUO8TJ+5Z3qAKG8vAx4FKai1s5jq/inV1+sREynIWSuQ6HgoSXpDQ=="
|
||||
"integrity": "sha512-fBHHqSTFLVnR61C+gltJuE5GkVQMV0S2nqUO8TJ+5Z3qAKG8vAx4FKai1s5jq/inV1+sREynIWSuQ6HgoSXpDQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/fast-copy": {
|
||||
"version": "3.0.1",
|
||||
@@ -24775,6 +24728,12 @@
|
||||
"jsonwebtoken": "^9.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/octokit-auth-probot/node_modules/universal-user-agent": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
|
||||
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/odbc": {
|
||||
"version": "2.4.9",
|
||||
"resolved": "https://registry.npmjs.org/odbc/-/odbc-2.4.9.tgz",
|
||||
@@ -30704,9 +30663,10 @@
|
||||
"integrity": "sha512-G5o6f95b5BggDGuUfKDApKaCgNYy2x7OdHY0zSMF081O0EJobw+1130VONhrA7ezGSV2FNOGyM+KQpQZAr9bIQ=="
|
||||
},
|
||||
"node_modules/universal-user-agent": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
|
||||
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.3.tgz",
|
||||
"integrity": "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/universalify": {
|
||||
"version": "2.0.1",
|
||||
@@ -31953,9 +31913,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/xml-crypto": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.0.1.tgz",
|
||||
"integrity": "sha512-v05aU7NS03z4jlZ0iZGRFeZsuKO1UfEbbYiaeRMiATBFs6Jq9+wqKquEMTn4UTrYZ9iGD8yz3KT4L9o2iF682w==",
|
||||
"version": "6.1.2",
|
||||
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.1.2.tgz",
|
||||
"integrity": "sha512-leBOVQdVi8FvPJrMYoum7Ici9qyxfE4kVi+AkpUoYCSXaQF4IlBm1cneTK9oAxR61LpYxTx7lNcsnBIeRpGW2w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@xmldom/is-dom-node": "^1.0.1",
|
||||
|
@@ -153,11 +153,12 @@
|
||||
"@gitbeaker/rest": "^42.5.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@node-saml/passport-saml": "^5.1.0",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/core": "^5.2.1",
|
||||
"@octokit/plugin-paginate-graphql": "^4.0.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/request": "8.4.1",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
|
@@ -99,6 +99,7 @@ const main = async () => {
|
||||
(el) =>
|
||||
!el.tableName.includes("_migrations") &&
|
||||
!el.tableName.includes("audit_logs_") &&
|
||||
!el.tableName.includes("active_locks") &&
|
||||
el.tableName !== "intermediate_audit_logs"
|
||||
);
|
||||
|
||||
|
6
backend/src/@types/fastify.d.ts
vendored
6
backend/src/@types/fastify.d.ts
vendored
@@ -12,10 +12,13 @@ import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certifi
|
||||
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-types";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-types";
|
||||
import { TEventBusService } from "@app/ee/services/event/event-bus-service";
|
||||
import { TServerSentEventsService } from "@app/ee/services/event/event-sse-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TGithubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityAuthTemplateServiceFactory } from "@app/ee/services/identity-auth-template";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
|
||||
@@ -296,6 +299,9 @@ declare module "fastify" {
|
||||
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
|
||||
pkiTemplate: TPkiTemplatesServiceFactory;
|
||||
reminder: TReminderServiceFactory;
|
||||
bus: TEventBusService;
|
||||
sse: TServerSentEventsService;
|
||||
identityAuthTemplate: TIdentityAuthTemplateServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
10
backend/src/@types/knex.d.ts
vendored
10
backend/src/@types/knex.d.ts
vendored
@@ -494,6 +494,11 @@ import {
|
||||
TAccessApprovalPoliciesEnvironmentsInsert,
|
||||
TAccessApprovalPoliciesEnvironmentsUpdate
|
||||
} from "@app/db/schemas/access-approval-policies-environments";
|
||||
import {
|
||||
TIdentityAuthTemplates,
|
||||
TIdentityAuthTemplatesInsert,
|
||||
TIdentityAuthTemplatesUpdate
|
||||
} from "@app/db/schemas/identity-auth-templates";
|
||||
import {
|
||||
TIdentityLdapAuths,
|
||||
TIdentityLdapAuthsInsert,
|
||||
@@ -878,6 +883,11 @@ declare module "knex/types/tables" {
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate
|
||||
>;
|
||||
[TableName.IdentityAuthTemplate]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAuthTemplates,
|
||||
TIdentityAuthTemplatesInsert,
|
||||
TIdentityAuthTemplatesUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPolicies,
|
||||
|
18
backend/src/db/migrations/20250723220500_remove-srp.ts
Normal file
18
backend/src/db/migrations/20250723220500_remove-srp.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.UserEncryptionKey, (table) => {
|
||||
table.text("encryptedPrivateKey").nullable().alter();
|
||||
table.text("publicKey").nullable().alter();
|
||||
table.text("iv").nullable().alter();
|
||||
table.text("tag").nullable().alter();
|
||||
table.text("salt").nullable().alter();
|
||||
table.text("verifier").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
// do nothing for now to avoid breaking down migrations
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.Reminder, "fromDate"))) {
|
||||
await knex.schema.alterTable(TableName.Reminder, (t) => {
|
||||
t.timestamp("fromDate", { useTz: true }).nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.Reminder, "fromDate")) {
|
||||
await knex.schema.alterTable(TableName.Reminder, (t) => {
|
||||
t.dropColumn("fromDate");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,36 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityAuthTemplate))) {
|
||||
await knex.schema.createTable(TableName.IdentityAuthTemplate, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.binary("templateFields").notNullable();
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.string("name", 64).notNullable();
|
||||
t.string("authMethod").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityAuthTemplate);
|
||||
}
|
||||
if (!(await knex.schema.hasColumn(TableName.IdentityLdapAuth, "templateId"))) {
|
||||
await knex.schema.alterTable(TableName.IdentityLdapAuth, (t) => {
|
||||
t.uuid("templateId").nullable();
|
||||
t.foreign("templateId").references("id").inTable(TableName.IdentityAuthTemplate).onDelete("SET NULL");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityLdapAuth, "templateId")) {
|
||||
await knex.schema.alterTable(TableName.IdentityLdapAuth, (t) => {
|
||||
t.dropForeign(["templateId"]);
|
||||
t.dropColumn("templateId");
|
||||
});
|
||||
}
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityAuthTemplate);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityAuthTemplate);
|
||||
}
|
@@ -0,0 +1,65 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const lastUserLoggedInAuthMethod = await knex.schema.hasColumn(TableName.OrgMembership, "lastLoginAuthMethod");
|
||||
const lastIdentityLoggedInAuthMethod = await knex.schema.hasColumn(
|
||||
TableName.IdentityOrgMembership,
|
||||
"lastLoginAuthMethod"
|
||||
);
|
||||
const lastUserLoggedInTime = await knex.schema.hasColumn(TableName.OrgMembership, "lastLoginTime");
|
||||
const lastIdentityLoggedInTime = await knex.schema.hasColumn(TableName.IdentityOrgMembership, "lastLoginTime");
|
||||
if (!lastUserLoggedInAuthMethod || !lastUserLoggedInTime) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
if (!lastUserLoggedInAuthMethod) {
|
||||
t.string("lastLoginAuthMethod").nullable();
|
||||
}
|
||||
if (!lastUserLoggedInTime) {
|
||||
t.datetime("lastLoginTime").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (!lastIdentityLoggedInAuthMethod || !lastIdentityLoggedInTime) {
|
||||
await knex.schema.alterTable(TableName.IdentityOrgMembership, (t) => {
|
||||
if (!lastIdentityLoggedInAuthMethod) {
|
||||
t.string("lastLoginAuthMethod").nullable();
|
||||
}
|
||||
if (!lastIdentityLoggedInTime) {
|
||||
t.datetime("lastLoginTime").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const lastUserLoggedInAuthMethod = await knex.schema.hasColumn(TableName.OrgMembership, "lastLoginAuthMethod");
|
||||
const lastIdentityLoggedInAuthMethod = await knex.schema.hasColumn(
|
||||
TableName.IdentityOrgMembership,
|
||||
"lastLoginAuthMethod"
|
||||
);
|
||||
const lastUserLoggedInTime = await knex.schema.hasColumn(TableName.OrgMembership, "lastLoginTime");
|
||||
const lastIdentityLoggedInTime = await knex.schema.hasColumn(TableName.IdentityOrgMembership, "lastLoginTime");
|
||||
if (lastUserLoggedInAuthMethod || lastUserLoggedInTime) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
if (lastUserLoggedInAuthMethod) {
|
||||
t.dropColumn("lastLoginAuthMethod");
|
||||
}
|
||||
if (lastUserLoggedInTime) {
|
||||
t.dropColumn("lastLoginTime");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (lastIdentityLoggedInAuthMethod || lastIdentityLoggedInTime) {
|
||||
await knex.schema.alterTable(TableName.IdentityOrgMembership, (t) => {
|
||||
if (lastIdentityLoggedInAuthMethod) {
|
||||
t.dropColumn("lastLoginAuthMethod");
|
||||
}
|
||||
if (lastIdentityLoggedInTime) {
|
||||
t.dropColumn("lastLoginTime");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
24
backend/src/db/schemas/identity-auth-templates.ts
Normal file
24
backend/src/db/schemas/identity-auth-templates.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityAuthTemplatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
templateFields: zodBuffer,
|
||||
orgId: z.string().uuid(),
|
||||
name: z.string(),
|
||||
authMethod: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TIdentityAuthTemplates = z.infer<typeof IdentityAuthTemplatesSchema>;
|
||||
export type TIdentityAuthTemplatesInsert = Omit<z.input<typeof IdentityAuthTemplatesSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityAuthTemplatesUpdate = Partial<Omit<z.input<typeof IdentityAuthTemplatesSchema>, TImmutableDBKeys>>;
|
@@ -25,7 +25,8 @@ export const IdentityLdapAuthsSchema = z.object({
|
||||
allowedFields: z.unknown().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
accessTokenPeriod: z.coerce.number().default(0),
|
||||
templateId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityLdapAuths = z.infer<typeof IdentityLdapAuthsSchema>;
|
||||
|
@@ -14,7 +14,9 @@ export const IdentityOrgMembershipsSchema = z.object({
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid()
|
||||
identityId: z.string().uuid(),
|
||||
lastLoginAuthMethod: z.string().nullable().optional(),
|
||||
lastLoginTime: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityOrgMemberships = z.infer<typeof IdentityOrgMembershipsSchema>;
|
||||
|
@@ -91,6 +91,7 @@ export enum TableName {
|
||||
IdentityProjectMembership = "identity_project_memberships",
|
||||
IdentityProjectMembershipRole = "identity_project_membership_role",
|
||||
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
|
||||
IdentityAuthTemplate = "identity_auth_templates",
|
||||
// used by both identity and users
|
||||
IdentityMetadata = "identity_metadata",
|
||||
ResourceMetadata = "resource_metadata",
|
||||
|
@@ -19,7 +19,9 @@ export const OrgMembershipsSchema = z.object({
|
||||
roleId: z.string().uuid().nullable().optional(),
|
||||
projectFavorites: z.string().array().nullable().optional(),
|
||||
isActive: z.boolean().default(true),
|
||||
lastInvitedAt: z.date().nullable().optional()
|
||||
lastInvitedAt: z.date().nullable().optional(),
|
||||
lastLoginAuthMethod: z.string().nullable().optional(),
|
||||
lastLoginTime: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;
|
||||
|
@@ -14,7 +14,8 @@ export const RemindersSchema = z.object({
|
||||
repeatDays: z.number().nullable().optional(),
|
||||
nextReminderDate: z.date(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
fromDate: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TReminders = z.infer<typeof RemindersSchema>;
|
||||
|
@@ -15,12 +15,12 @@ export const UserEncryptionKeysSchema = z.object({
|
||||
protectedKey: z.string().nullable().optional(),
|
||||
protectedKeyIV: z.string().nullable().optional(),
|
||||
protectedKeyTag: z.string().nullable().optional(),
|
||||
publicKey: z.string(),
|
||||
encryptedPrivateKey: z.string(),
|
||||
iv: z.string(),
|
||||
tag: z.string(),
|
||||
salt: z.string(),
|
||||
verifier: z.string(),
|
||||
publicKey: z.string().nullable().optional(),
|
||||
encryptedPrivateKey: z.string().nullable().optional(),
|
||||
iv: z.string().nullable().optional(),
|
||||
tag: z.string().nullable().optional(),
|
||||
salt: z.string().nullable().optional(),
|
||||
verifier: z.string().nullable().optional(),
|
||||
userId: z.string().uuid(),
|
||||
hashedPassword: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKey: z.string().nullable().optional(),
|
||||
|
@@ -115,6 +115,10 @@ export const generateUserSrpKeys = async (password: string) => {
|
||||
};
|
||||
|
||||
export const getUserPrivateKey = async (password: string, user: TUserEncryptionKeys) => {
|
||||
if (!user.encryptedPrivateKey || !user.iv || !user.tag || !user.salt) {
|
||||
throw new Error("User encrypted private key not found");
|
||||
}
|
||||
|
||||
const derivedKey = await argon2.hash(password, {
|
||||
salt: Buffer.from(user.salt),
|
||||
memoryCost: 65536,
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { initEnvConfig } from "@app/lib/config/env";
|
||||
import { initLogger, logger } from "@app/lib/logger";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
import { AuthMethod } from "../../services/auth/auth-type";
|
||||
@@ -17,7 +17,7 @@ export async function seed(knex: Knex): Promise<void> {
|
||||
initLogger();
|
||||
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
await crypto.initialize(superAdminDAL);
|
||||
await initEnvConfig(superAdminDAL, logger);
|
||||
|
||||
await knex(TableName.SuperAdmin).insert([
|
||||
// eslint-disable-next-line
|
||||
@@ -25,6 +25,7 @@ export async function seed(knex: Knex): Promise<void> {
|
||||
{ id: "00000000-0000-0000-0000-000000000000", initialized: true, allowSignUp: true }
|
||||
]);
|
||||
// Inserts seed entries
|
||||
|
||||
const [user] = await knex(TableName.Users)
|
||||
.insert([
|
||||
{
|
||||
|
@@ -1,9 +1,28 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { initEnvConfig } from "@app/lib/config/env";
|
||||
import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography";
|
||||
import { generateUserSrpKeys } from "@app/lib/crypto/srp";
|
||||
import { initLogger, logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { AuthMethod } from "@app/services/auth/auth-type";
|
||||
import { assignWorkspaceKeysToMembers, createProjectKey } from "@app/services/project/project-fns";
|
||||
import { projectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { projectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { projectUserMembershipRoleDALFactory } from "@app/services/project-membership/project-user-membership-role-dal";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
import { userDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { ProjectMembershipRole, ProjectType, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { buildUserProjectKey, getUserPrivateKey, seedData1 } from "../seed-data";
|
||||
import {
|
||||
OrgMembershipRole,
|
||||
OrgMembershipStatus,
|
||||
ProjectMembershipRole,
|
||||
ProjectType,
|
||||
SecretEncryptionAlgo,
|
||||
SecretKeyEncoding,
|
||||
TableName
|
||||
} from "../schemas";
|
||||
import { seedData1 } from "../seed-data";
|
||||
|
||||
export const DEFAULT_PROJECT_ENVS = [
|
||||
{ name: "Development", slug: "dev" },
|
||||
@@ -11,12 +30,159 @@ export const DEFAULT_PROJECT_ENVS = [
|
||||
{ name: "Production", slug: "prod" }
|
||||
];
|
||||
|
||||
const createUserWithGhostUser = async (
|
||||
orgId: string,
|
||||
projectId: string,
|
||||
userId: string,
|
||||
userOrgMembershipId: string,
|
||||
knex: Knex
|
||||
) => {
|
||||
const projectKeyDAL = projectKeyDALFactory(knex);
|
||||
const userDAL = userDALFactory(knex);
|
||||
const projectMembershipDAL = projectMembershipDALFactory(knex);
|
||||
const projectUserMembershipRoleDAL = projectUserMembershipRoleDALFactory(knex);
|
||||
|
||||
const email = `sudo-${alphaNumericNanoId(16)}-${orgId}@infisical.com`; // We add a nanoid because the email is unique. And we have to create a new ghost user each time, so we can have access to the private key.
|
||||
|
||||
const password = crypto.randomBytes(128).toString("hex");
|
||||
|
||||
const [ghostUser] = await knex(TableName.Users)
|
||||
.insert({
|
||||
isGhost: true,
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
username: email,
|
||||
email,
|
||||
isAccepted: true
|
||||
})
|
||||
.returning("*");
|
||||
|
||||
const encKeys = await generateUserSrpKeys(email, password);
|
||||
|
||||
await knex(TableName.UserEncryptionKey)
|
||||
.insert({ userId: ghostUser.id, encryptionVersion: 2, publicKey: encKeys.publicKey })
|
||||
.onConflict("userId")
|
||||
.merge();
|
||||
|
||||
await knex(TableName.OrgMembership)
|
||||
.insert({
|
||||
orgId,
|
||||
userId: ghostUser.id,
|
||||
role: OrgMembershipRole.Admin,
|
||||
status: OrgMembershipStatus.Accepted,
|
||||
isActive: true
|
||||
})
|
||||
.returning("*");
|
||||
|
||||
const [projectMembership] = await knex(TableName.ProjectMembership)
|
||||
.insert({
|
||||
userId: ghostUser.id,
|
||||
projectId
|
||||
})
|
||||
.returning("*");
|
||||
|
||||
await knex(TableName.ProjectUserMembershipRole).insert({
|
||||
projectMembershipId: projectMembership.id,
|
||||
role: ProjectMembershipRole.Admin
|
||||
});
|
||||
|
||||
const { key: encryptedProjectKey, iv: encryptedProjectKeyIv } = createProjectKey({
|
||||
publicKey: encKeys.publicKey,
|
||||
privateKey: encKeys.plainPrivateKey
|
||||
});
|
||||
|
||||
await knex(TableName.ProjectKeys).insert({
|
||||
projectId,
|
||||
receiverId: ghostUser.id,
|
||||
encryptedKey: encryptedProjectKey,
|
||||
nonce: encryptedProjectKeyIv,
|
||||
senderId: ghostUser.id
|
||||
});
|
||||
|
||||
const { iv, tag, ciphertext, encoding, algorithm } = crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.encryptWithRootEncryptionKey(encKeys.plainPrivateKey);
|
||||
|
||||
await knex(TableName.ProjectBot).insert({
|
||||
name: "Infisical Bot (Ghost)",
|
||||
projectId,
|
||||
tag,
|
||||
iv,
|
||||
encryptedProjectKey,
|
||||
encryptedProjectKeyNonce: encryptedProjectKeyIv,
|
||||
encryptedPrivateKey: ciphertext,
|
||||
isActive: true,
|
||||
publicKey: encKeys.publicKey,
|
||||
senderId: ghostUser.id,
|
||||
algorithm,
|
||||
keyEncoding: encoding
|
||||
});
|
||||
|
||||
const latestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, projectId, knex);
|
||||
|
||||
if (!latestKey) {
|
||||
throw new Error("Latest key not found for user");
|
||||
}
|
||||
|
||||
const user = await userDAL.findUserEncKeyByUserId(userId, knex);
|
||||
|
||||
if (!user || !user.publicKey) {
|
||||
throw new Error("User not found");
|
||||
}
|
||||
|
||||
const [projectAdmin] = assignWorkspaceKeysToMembers({
|
||||
decryptKey: latestKey,
|
||||
userPrivateKey: encKeys.plainPrivateKey,
|
||||
members: [
|
||||
{
|
||||
userPublicKey: user.publicKey,
|
||||
orgMembershipId: userOrgMembershipId
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
// Create a membership for the user
|
||||
const userProjectMembership = await projectMembershipDAL.create(
|
||||
{
|
||||
projectId,
|
||||
userId: user.id
|
||||
},
|
||||
knex
|
||||
);
|
||||
await projectUserMembershipRoleDAL.create(
|
||||
{ projectMembershipId: userProjectMembership.id, role: ProjectMembershipRole.Admin },
|
||||
knex
|
||||
);
|
||||
|
||||
// Create a project key for the user
|
||||
await projectKeyDAL.create(
|
||||
{
|
||||
encryptedKey: projectAdmin.workspaceEncryptedKey,
|
||||
nonce: projectAdmin.workspaceEncryptedNonce,
|
||||
senderId: ghostUser.id,
|
||||
receiverId: user.id,
|
||||
projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
|
||||
return {
|
||||
user: ghostUser,
|
||||
keys: encKeys
|
||||
};
|
||||
};
|
||||
|
||||
export async function seed(knex: Knex): Promise<void> {
|
||||
// Deletes ALL existing entries
|
||||
await knex(TableName.Project).del();
|
||||
await knex(TableName.Environment).del();
|
||||
await knex(TableName.SecretFolder).del();
|
||||
|
||||
initLogger();
|
||||
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
await initEnvConfig(superAdminDAL, logger);
|
||||
|
||||
const [project] = await knex(TableName.Project)
|
||||
.insert({
|
||||
name: seedData1.project.name,
|
||||
@@ -29,29 +195,24 @@ export async function seed(knex: Knex): Promise<void> {
|
||||
})
|
||||
.returning("*");
|
||||
|
||||
const projectMembership = await knex(TableName.ProjectMembership)
|
||||
.insert({
|
||||
projectId: project.id,
|
||||
const userOrgMembership = await knex(TableName.OrgMembership)
|
||||
.where({
|
||||
orgId: seedData1.organization.id,
|
||||
userId: seedData1.id
|
||||
})
|
||||
.returning("*");
|
||||
await knex(TableName.ProjectUserMembershipRole).insert({
|
||||
role: ProjectMembershipRole.Admin,
|
||||
projectMembershipId: projectMembership[0].id
|
||||
});
|
||||
.first();
|
||||
|
||||
if (!userOrgMembership) {
|
||||
throw new Error("User org membership not found");
|
||||
}
|
||||
const user = await knex(TableName.UserEncryptionKey).where({ userId: seedData1.id }).first();
|
||||
if (!user) throw new Error("User not found");
|
||||
|
||||
const userPrivateKey = await getUserPrivateKey(seedData1.password, user);
|
||||
const projectKey = buildUserProjectKey(userPrivateKey, user.publicKey);
|
||||
await knex(TableName.ProjectKeys).insert({
|
||||
projectId: project.id,
|
||||
nonce: projectKey.nonce,
|
||||
encryptedKey: projectKey.ciphertext,
|
||||
receiverId: seedData1.id,
|
||||
senderId: seedData1.id
|
||||
});
|
||||
if (!user.publicKey) {
|
||||
throw new Error("User public key not found");
|
||||
}
|
||||
|
||||
await createUserWithGhostUser(seedData1.organization.id, project.id, seedData1.id, userOrgMembership.id, knex);
|
||||
|
||||
// create default environments and default folders
|
||||
const envs = await knex(TableName.Environment)
|
||||
|
@@ -1,6 +1,9 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { initEnvConfig } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { initLogger, logger } from "@app/lib/logger";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
import { IdentityAuthMethod, OrgMembershipRole, ProjectMembershipRole, TableName } from "../schemas";
|
||||
import { seedData1 } from "../seed-data";
|
||||
@@ -10,6 +13,11 @@ export async function seed(knex: Knex): Promise<void> {
|
||||
await knex(TableName.Identity).del();
|
||||
await knex(TableName.IdentityOrgMembership).del();
|
||||
|
||||
initLogger();
|
||||
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
await initEnvConfig(superAdminDAL, logger);
|
||||
|
||||
// Inserts seed entries
|
||||
await knex(TableName.Identity).insert([
|
||||
{
|
||||
|
391
backend/src/ee/routes/v1/identity-template-router.ts
Normal file
391
backend/src/ee/routes/v1/identity-template-router.ts
Normal file
@@ -0,0 +1,391 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityAuthTemplatesSchema } from "@app/db/schemas/identity-auth-templates";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import {
|
||||
IdentityAuthTemplateMethod,
|
||||
TEMPLATE_SUCCESS_MESSAGES,
|
||||
TEMPLATE_VALIDATION_MESSAGES
|
||||
} from "@app/ee/services/identity-auth-template/identity-auth-template-enums";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const ldapTemplateFieldsSchema = z.object({
|
||||
url: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.LDAP.URL_REQUIRED),
|
||||
bindDN: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.LDAP.BIND_DN_REQUIRED),
|
||||
bindPass: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.LDAP.BIND_PASSWORD_REQUIRED),
|
||||
searchBase: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.LDAP.SEARCH_BASE_REQUIRED),
|
||||
ldapCaCertificate: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const registerIdentityTemplateRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
description: "Create identity auth template",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
name: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_NAME_REQUIRED)
|
||||
.max(64, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_NAME_MAX_LENGTH),
|
||||
authMethod: z.nativeEnum(IdentityAuthTemplateMethod),
|
||||
templateFields: ldapTemplateFieldsSchema
|
||||
}),
|
||||
response: {
|
||||
200: IdentityAuthTemplatesSchema.extend({
|
||||
templateFields: z.record(z.string(), z.unknown())
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const template = await server.services.identityAuthTemplate.createTemplate({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.body.name,
|
||||
authMethod: req.body.authMethod,
|
||||
templateFields: req.body.templateFields
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.MACHINE_IDENTITY_AUTH_TEMPLATE_CREATE,
|
||||
metadata: {
|
||||
templateId: template.id,
|
||||
name: template.name
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return template;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:templateId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
description: "Update identity auth template",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
templateId: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_ID_REQUIRED)
|
||||
}),
|
||||
body: z.object({
|
||||
name: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_NAME_REQUIRED)
|
||||
.max(64, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_NAME_MAX_LENGTH)
|
||||
.optional(),
|
||||
templateFields: ldapTemplateFieldsSchema.partial().optional()
|
||||
}),
|
||||
response: {
|
||||
200: IdentityAuthTemplatesSchema.extend({
|
||||
templateFields: z.record(z.string(), z.unknown())
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const template = await server.services.identityAuthTemplate.updateTemplate({
|
||||
templateId: req.params.templateId,
|
||||
name: req.body.name,
|
||||
templateFields: req.body.templateFields,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.MACHINE_IDENTITY_AUTH_TEMPLATE_UPDATE,
|
||||
metadata: {
|
||||
templateId: template.id,
|
||||
name: template.name
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return template;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:templateId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
description: "Delete identity auth template",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
templateId: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_ID_REQUIRED)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const template = await server.services.identityAuthTemplate.deleteTemplate({
|
||||
templateId: req.params.templateId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.MACHINE_IDENTITY_AUTH_TEMPLATE_DELETE,
|
||||
metadata: {
|
||||
templateId: template.id,
|
||||
name: template.name
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { message: TEMPLATE_SUCCESS_MESSAGES.DELETED };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:templateId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
description: "Get identity auth template by ID",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
templateId: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_ID_REQUIRED)
|
||||
}),
|
||||
response: {
|
||||
200: IdentityAuthTemplatesSchema.extend({
|
||||
templateFields: ldapTemplateFieldsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const template = await server.services.identityAuthTemplate.getTemplate({
|
||||
templateId: req.params.templateId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return template;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/search",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
description: "List identity auth templates",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
querystring: z.object({
|
||||
limit: z.coerce.number().positive().max(100).default(5).optional(),
|
||||
offset: z.coerce.number().min(0).default(0).optional(),
|
||||
search: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
templates: IdentityAuthTemplatesSchema.extend({
|
||||
templateFields: ldapTemplateFieldsSchema
|
||||
}).array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { templates, totalCount } = await server.services.identityAuthTemplate.listTemplates({
|
||||
limit: req.query.limit,
|
||||
offset: req.query.offset,
|
||||
search: req.query.search,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return { templates, totalCount };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
description: "Get identity auth templates by authentication method",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
querystring: z.object({
|
||||
authMethod: z.nativeEnum(IdentityAuthTemplateMethod)
|
||||
}),
|
||||
response: {
|
||||
200: IdentityAuthTemplatesSchema.extend({
|
||||
templateFields: ldapTemplateFieldsSchema
|
||||
}).array()
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const templates = await server.services.identityAuthTemplate.getTemplatesByAuthMethod({
|
||||
authMethod: req.query.authMethod,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return templates;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:templateId/usage",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
description: "Get template usage by template ID",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
templateId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z
|
||||
.object({
|
||||
identityId: z.string(),
|
||||
identityName: z.string()
|
||||
})
|
||||
.array()
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const templates = await server.services.identityAuthTemplate.findTemplateUsages({
|
||||
templateId: req.params.templateId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return templates;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:templateId/delete-usage",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
hide: false,
|
||||
description: "Unlink identity auth template usage",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
templateId: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
identityIds: z.string().array()
|
||||
}),
|
||||
response: {
|
||||
200: z
|
||||
.object({
|
||||
authId: z.string(),
|
||||
identityId: z.string(),
|
||||
identityName: z.string()
|
||||
})
|
||||
.array()
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const templates = await server.services.identityAuthTemplate.unlinkTemplateUsage({
|
||||
templateId: req.params.templateId,
|
||||
identityIds: req.body.identityIds,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return templates;
|
||||
}
|
||||
});
|
||||
};
|
@@ -13,6 +13,7 @@ import { registerGatewayRouter } from "./gateway-router";
|
||||
import { registerGithubOrgSyncRouter } from "./github-org-sync-router";
|
||||
import { registerGroupRouter } from "./group-router";
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerIdentityTemplateRouter } from "./identity-template-router";
|
||||
import { registerKmipRouter } from "./kmip-router";
|
||||
import { registerKmipSpecRouter } from "./kmip-spec-router";
|
||||
import { registerLdapRouter } from "./ldap-router";
|
||||
@@ -125,6 +126,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerExternalKmsRouter, {
|
||||
prefix: "/external-kms"
|
||||
});
|
||||
await server.register(registerIdentityTemplateRouter, { prefix: "/identity-templates" });
|
||||
|
||||
await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" });
|
||||
|
||||
|
@@ -379,14 +379,17 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/config/:configId/test-connection",
|
||||
url: "/config/test-connection",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
configId: z.string().trim()
|
||||
body: z.object({
|
||||
url: z.string().trim(),
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
caCert: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.boolean()
|
||||
@@ -399,8 +402,9 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
orgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
ldapConfigId: req.params.configId
|
||||
...req.body
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
@@ -0,0 +1,16 @@
|
||||
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
|
||||
import {
|
||||
CreateGitLabDataSourceSchema,
|
||||
GitLabDataSourceSchema,
|
||||
UpdateGitLabDataSourceSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export const registerGitLabSecretScanningRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretScanningEndpoints({
|
||||
type: SecretScanningDataSource.GitLab,
|
||||
server,
|
||||
responseSchema: GitLabDataSourceSchema,
|
||||
createSchema: CreateGitLabDataSourceSchema,
|
||||
updateSchema: UpdateGitLabDataSourceSchema
|
||||
});
|
@@ -1,3 +1,4 @@
|
||||
import { registerGitLabSecretScanningRouter } from "@app/ee/routes/v2/secret-scanning-v2-routers/gitlab-secret-scanning-router";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
import { registerBitbucketSecretScanningRouter } from "./bitbucket-secret-scanning-router";
|
||||
@@ -10,5 +11,6 @@ export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
|
||||
(server: FastifyZodProvider) => Promise<void>
|
||||
> = {
|
||||
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter,
|
||||
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter
|
||||
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter,
|
||||
[SecretScanningDataSource.GitLab]: registerGitLabSecretScanningRouter
|
||||
};
|
||||
|
@@ -4,6 +4,7 @@ import { SecretScanningConfigsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { BitbucketDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { GitLabDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import {
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
@@ -24,7 +25,8 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [
|
||||
GitHubDataSourceListItemSchema,
|
||||
BitbucketDataSourceListItemSchema
|
||||
BitbucketDataSourceListItemSchema,
|
||||
GitLabDataSourceListItemSchema
|
||||
]);
|
||||
|
||||
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
|
||||
|
@@ -1,8 +1,10 @@
|
||||
// weird commonjs-related error in the CI requires us to do the import like this
|
||||
import knex from "knex";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TAuditLogs } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { DatabaseError, GatewayTimeoutError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, TOrmify } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@@ -150,43 +152,70 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
|
||||
// delete all audit log that have expired
|
||||
const pruneAuditLog: TAuditLogDALFactory["pruneAuditLog"] = async (tx) => {
|
||||
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
|
||||
const MAX_RETRY_ON_FAILURE = 3;
|
||||
const runPrune = async (dbClient: knex.Knex) => {
|
||||
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
|
||||
const MAX_RETRY_ON_FAILURE = 3;
|
||||
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
|
||||
do {
|
||||
try {
|
||||
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
|
||||
.where("expiresAt", "<", today)
|
||||
.where("createdAt", "<", today) // to use audit log partition
|
||||
.orderBy(`${TableName.AuditLog}.createdAt`, "desc")
|
||||
.select("id")
|
||||
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
|
||||
do {
|
||||
try {
|
||||
const findExpiredLogSubQuery = dbClient(TableName.AuditLog)
|
||||
.where("expiresAt", "<", today)
|
||||
.where("createdAt", "<", today) // to use audit log partition
|
||||
.orderBy(`${TableName.AuditLog}.createdAt`, "desc")
|
||||
.select("id")
|
||||
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
deletedAuditLogIds = await (tx || db)(TableName.AuditLog)
|
||||
.whereIn("id", findExpiredLogSubQuery)
|
||||
.del()
|
||||
.returning("id");
|
||||
numberOfRetryOnFailure = 0; // reset
|
||||
} catch (error) {
|
||||
numberOfRetryOnFailure += 1;
|
||||
logger.error(error, "Failed to delete audit log on pruning");
|
||||
} finally {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
deletedAuditLogIds = await dbClient(TableName.AuditLog)
|
||||
.whereIn("id", findExpiredLogSubQuery)
|
||||
.del()
|
||||
.returning("id");
|
||||
numberOfRetryOnFailure = 0; // reset
|
||||
} catch (error) {
|
||||
numberOfRetryOnFailure += 1;
|
||||
logger.error(error, "Failed to delete audit log on pruning");
|
||||
} finally {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
|
||||
};
|
||||
|
||||
if (tx) {
|
||||
await runPrune(tx);
|
||||
} else {
|
||||
const QUERY_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
|
||||
await db.transaction(async (trx) => {
|
||||
await trx.raw(`SET statement_timeout = ${QUERY_TIMEOUT_MS}`);
|
||||
await runPrune(trx);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return { ...auditLogOrm, pruneAuditLog, find };
|
||||
const create: TAuditLogDALFactory["create"] = async (tx) => {
|
||||
const config = getConfig();
|
||||
|
||||
if (config.DISABLE_AUDIT_LOG_STORAGE) {
|
||||
return {
|
||||
...tx,
|
||||
id: uuidv4(),
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date()
|
||||
};
|
||||
}
|
||||
|
||||
return auditLogOrm.create(tx);
|
||||
};
|
||||
|
||||
return { ...auditLogOrm, create, pruneAuditLog, find };
|
||||
};
|
||||
|
@@ -1,7 +1,8 @@
|
||||
import { AxiosError, RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { TEventBusService } from "@app/ee/services/event/event-bus-service";
|
||||
import { TopicName, toPublishableEvent } from "@app/ee/services/event/types";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@@ -21,6 +22,7 @@ type TAuditLogQueueServiceFactoryDep = {
|
||||
queueService: TQueueServiceFactory;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
eventBusService: TEventBusService;
|
||||
};
|
||||
|
||||
export type TAuditLogQueueServiceFactory = {
|
||||
@@ -36,133 +38,17 @@ export const auditLogQueueServiceFactory = async ({
|
||||
queueService,
|
||||
projectDAL,
|
||||
licenseService,
|
||||
auditLogStreamDAL
|
||||
auditLogStreamDAL,
|
||||
eventBusService
|
||||
}: TAuditLogQueueServiceFactoryDep): Promise<TAuditLogQueueServiceFactory> => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
||||
if (appCfg.USE_PG_QUEUE && appCfg.SHOULD_INIT_PG_QUEUE) {
|
||||
await queueService.queuePg<QueueName.AuditLog>(QueueJobs.AuditLog, data, {
|
||||
retryLimit: 10,
|
||||
retryBackoff: true
|
||||
});
|
||||
} else {
|
||||
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
removeOnFail: {
|
||||
count: 3
|
||||
},
|
||||
removeOnComplete: true
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
if (appCfg.SHOULD_INIT_PG_QUEUE) {
|
||||
await queueService.startPg<QueueName.AuditLog>(
|
||||
QueueJobs.AuditLog,
|
||||
async ([job]) => {
|
||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||
let { orgId } = job.data;
|
||||
const MS_IN_DAY = 24 * 60 * 60 * 1000;
|
||||
let project;
|
||||
|
||||
if (!orgId) {
|
||||
// it will never be undefined for both org and project id
|
||||
// TODO(akhilmhdh): use caching here in dal to avoid db calls
|
||||
project = await projectDAL.findById(projectId as string);
|
||||
orgId = project.orgId;
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan.auditLogsRetentionDays === 0) {
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
return;
|
||||
}
|
||||
|
||||
// For project actions, set TTL to project-level audit log retention config
|
||||
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
||||
const ttlInDays =
|
||||
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
||||
? project.auditLogsRetentionDays
|
||||
: plan.auditLogsRetentionDays;
|
||||
|
||||
const ttl = ttlInDays * MS_IN_DAY;
|
||||
|
||||
const auditLog = await auditLogDAL.create({
|
||||
actor: actor.type,
|
||||
actorMetadata: actor.metadata,
|
||||
userAgent,
|
||||
projectId,
|
||||
projectName: project?.name,
|
||||
ipAddress,
|
||||
orgId,
|
||||
eventType: event.type,
|
||||
expiresAt: new Date(Date.now() + ttl),
|
||||
eventMetadata: event.metadata,
|
||||
userAgentType
|
||||
});
|
||||
|
||||
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
||||
await Promise.allSettled(
|
||||
logStreams.map(
|
||||
async ({
|
||||
url,
|
||||
encryptedHeadersTag,
|
||||
encryptedHeadersIV,
|
||||
encryptedHeadersKeyEncoding,
|
||||
encryptedHeadersCiphertext
|
||||
}) => {
|
||||
const streamHeaders =
|
||||
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
||||
? (JSON.parse(
|
||||
crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
||||
iv: encryptedHeadersIV,
|
||||
tag: encryptedHeadersTag,
|
||||
ciphertext: encryptedHeadersCiphertext
|
||||
})
|
||||
) as LogStreamHeaders[])
|
||||
: [];
|
||||
|
||||
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
||||
if (streamHeaders.length)
|
||||
streamHeaders.forEach(({ key, value }) => {
|
||||
headers[key] = value;
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await request.post(
|
||||
url,
|
||||
{ ...providerSpecificPayload(url), ...auditLog },
|
||||
{
|
||||
headers,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
}
|
||||
);
|
||||
return response;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
|
||||
);
|
||||
return error;
|
||||
}
|
||||
}
|
||||
)
|
||||
);
|
||||
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
removeOnFail: {
|
||||
count: 3
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 30,
|
||||
pollingIntervalSeconds: 0.5
|
||||
}
|
||||
);
|
||||
}
|
||||
removeOnComplete: true
|
||||
});
|
||||
};
|
||||
|
||||
queueService.start(QueueName.AuditLog, async (job) => {
|
||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||
@@ -178,88 +64,97 @@ export const auditLogQueueServiceFactory = async ({
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan.auditLogsRetentionDays === 0) {
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
return;
|
||||
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
if (plan.auditLogsRetentionDays !== 0) {
|
||||
// For project actions, set TTL to project-level audit log retention config
|
||||
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
||||
const ttlInDays =
|
||||
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
||||
? project.auditLogsRetentionDays
|
||||
: plan.auditLogsRetentionDays;
|
||||
|
||||
const ttl = ttlInDays * MS_IN_DAY;
|
||||
|
||||
const auditLog = await auditLogDAL.create({
|
||||
actor: actor.type,
|
||||
actorMetadata: actor.metadata,
|
||||
userAgent,
|
||||
projectId,
|
||||
projectName: project?.name,
|
||||
ipAddress,
|
||||
orgId,
|
||||
eventType: event.type,
|
||||
expiresAt: new Date(Date.now() + ttl),
|
||||
eventMetadata: event.metadata,
|
||||
userAgentType
|
||||
});
|
||||
|
||||
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
||||
await Promise.allSettled(
|
||||
logStreams.map(
|
||||
async ({
|
||||
url,
|
||||
encryptedHeadersTag,
|
||||
encryptedHeadersIV,
|
||||
encryptedHeadersKeyEncoding,
|
||||
encryptedHeadersCiphertext
|
||||
}) => {
|
||||
const streamHeaders =
|
||||
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
||||
? (JSON.parse(
|
||||
crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
||||
iv: encryptedHeadersIV,
|
||||
tag: encryptedHeadersTag,
|
||||
ciphertext: encryptedHeadersCiphertext
|
||||
})
|
||||
) as LogStreamHeaders[])
|
||||
: [];
|
||||
|
||||
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
||||
if (streamHeaders.length)
|
||||
streamHeaders.forEach(({ key, value }) => {
|
||||
headers[key] = value;
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await request.post(
|
||||
url,
|
||||
{ ...providerSpecificPayload(url), ...auditLog },
|
||||
{
|
||||
headers,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
}
|
||||
);
|
||||
return response;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
|
||||
);
|
||||
return error;
|
||||
}
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// For project actions, set TTL to project-level audit log retention config
|
||||
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
||||
const ttlInDays =
|
||||
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
||||
? project.auditLogsRetentionDays
|
||||
: plan.auditLogsRetentionDays;
|
||||
const publishable = toPublishableEvent(event);
|
||||
|
||||
const ttl = ttlInDays * MS_IN_DAY;
|
||||
|
||||
const auditLog = await auditLogDAL.create({
|
||||
actor: actor.type,
|
||||
actorMetadata: actor.metadata,
|
||||
userAgent,
|
||||
projectId,
|
||||
projectName: project?.name,
|
||||
ipAddress,
|
||||
orgId,
|
||||
eventType: event.type,
|
||||
expiresAt: new Date(Date.now() + ttl),
|
||||
eventMetadata: event.metadata,
|
||||
userAgentType
|
||||
});
|
||||
|
||||
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
||||
await Promise.allSettled(
|
||||
logStreams.map(
|
||||
async ({
|
||||
url,
|
||||
encryptedHeadersTag,
|
||||
encryptedHeadersIV,
|
||||
encryptedHeadersKeyEncoding,
|
||||
encryptedHeadersCiphertext
|
||||
}) => {
|
||||
const streamHeaders =
|
||||
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
||||
? (JSON.parse(
|
||||
crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
||||
iv: encryptedHeadersIV,
|
||||
tag: encryptedHeadersTag,
|
||||
ciphertext: encryptedHeadersCiphertext
|
||||
})
|
||||
) as LogStreamHeaders[])
|
||||
: [];
|
||||
|
||||
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
||||
if (streamHeaders.length)
|
||||
streamHeaders.forEach(({ key, value }) => {
|
||||
headers[key] = value;
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await request.post(
|
||||
url,
|
||||
{ ...providerSpecificPayload(url), ...auditLog },
|
||||
{
|
||||
headers,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
}
|
||||
);
|
||||
return response;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
|
||||
);
|
||||
return error;
|
||||
}
|
||||
}
|
||||
)
|
||||
);
|
||||
if (publishable) {
|
||||
await eventBusService.publish(TopicName.CoreServers, {
|
||||
type: ProjectType.SecretManager,
|
||||
source: "infiscal",
|
||||
data: publishable.data
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
|
@@ -161,6 +161,9 @@ export enum EventType {
|
||||
CREATE_IDENTITY = "create-identity",
|
||||
UPDATE_IDENTITY = "update-identity",
|
||||
DELETE_IDENTITY = "delete-identity",
|
||||
MACHINE_IDENTITY_AUTH_TEMPLATE_CREATE = "machine-identity-auth-template-create",
|
||||
MACHINE_IDENTITY_AUTH_TEMPLATE_UPDATE = "machine-identity-auth-template-update",
|
||||
MACHINE_IDENTITY_AUTH_TEMPLATE_DELETE = "machine-identity-auth-template-delete",
|
||||
LOGIN_IDENTITY_UNIVERSAL_AUTH = "login-identity-universal-auth",
|
||||
ADD_IDENTITY_UNIVERSAL_AUTH = "add-identity-universal-auth",
|
||||
UPDATE_IDENTITY_UNIVERSAL_AUTH = "update-identity-universal-auth",
|
||||
@@ -830,6 +833,30 @@ interface LoginIdentityUniversalAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface MachineIdentityAuthTemplateCreateEvent {
|
||||
type: EventType.MACHINE_IDENTITY_AUTH_TEMPLATE_CREATE;
|
||||
metadata: {
|
||||
templateId: string;
|
||||
name: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface MachineIdentityAuthTemplateUpdateEvent {
|
||||
type: EventType.MACHINE_IDENTITY_AUTH_TEMPLATE_UPDATE;
|
||||
metadata: {
|
||||
templateId: string;
|
||||
name: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface MachineIdentityAuthTemplateDeleteEvent {
|
||||
type: EventType.MACHINE_IDENTITY_AUTH_TEMPLATE_DELETE;
|
||||
metadata: {
|
||||
templateId: string;
|
||||
name: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AddIdentityUniversalAuthEvent {
|
||||
type: EventType.ADD_IDENTITY_UNIVERSAL_AUTH;
|
||||
metadata: {
|
||||
@@ -1325,6 +1352,7 @@ interface AddIdentityLdapAuthEvent {
|
||||
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
|
||||
allowedFields?: TAllowedFields[];
|
||||
url: string;
|
||||
templateId?: string | null;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1338,6 +1366,7 @@ interface UpdateIdentityLdapAuthEvent {
|
||||
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
|
||||
allowedFields?: TAllowedFields[];
|
||||
url?: string;
|
||||
templateId?: string | null;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -3439,6 +3468,9 @@ export type Event =
|
||||
| UpdateIdentityEvent
|
||||
| DeleteIdentityEvent
|
||||
| LoginIdentityUniversalAuthEvent
|
||||
| MachineIdentityAuthTemplateCreateEvent
|
||||
| MachineIdentityAuthTemplateUpdateEvent
|
||||
| MachineIdentityAuthTemplateDeleteEvent
|
||||
| AddIdentityUniversalAuthEvent
|
||||
| UpdateIdentityUniversalAuthEvent
|
||||
| DeleteIdentityUniversalAuthEvent
|
||||
|
@@ -15,6 +15,7 @@ import { z } from "zod";
|
||||
import { CustomAWSHasher } from "@app/lib/aws/hashing";
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
|
||||
@@ -170,14 +171,29 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).verifyCredentials(providerInputs.clusterName);
|
||||
return true;
|
||||
try {
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).verifyCredentials(providerInputs.clusterName);
|
||||
return true;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [
|
||||
providerInputs.accessKeyId,
|
||||
providerInputs.secretAccessKey,
|
||||
providerInputs.clusterName,
|
||||
providerInputs.region
|
||||
]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -206,21 +222,37 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const parsedStatement = CreateElastiCacheUserSchema.parse(JSON.parse(creationStatement));
|
||||
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).createUser(parsedStatement, providerInputs.clusterName);
|
||||
try {
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).createUser(parsedStatement, providerInputs.clusterName);
|
||||
|
||||
return {
|
||||
entityId: leaseUsername,
|
||||
data: {
|
||||
DB_USERNAME: leaseUsername,
|
||||
DB_PASSWORD: leasePassword
|
||||
}
|
||||
};
|
||||
return {
|
||||
entityId: leaseUsername,
|
||||
data: {
|
||||
DB_USERNAME: leaseUsername,
|
||||
DB_PASSWORD: leasePassword
|
||||
}
|
||||
};
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [
|
||||
leaseUsername,
|
||||
leasePassword,
|
||||
providerInputs.accessKeyId,
|
||||
providerInputs.secretAccessKey,
|
||||
providerInputs.clusterName
|
||||
]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
@@ -229,15 +261,25 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username: entityId });
|
||||
const parsedStatement = DeleteElasticCacheUserSchema.parse(JSON.parse(revokeStatement));
|
||||
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).deleteUser(parsedStatement);
|
||||
try {
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).deleteUser(parsedStatement);
|
||||
|
||||
return { entityId };
|
||||
return { entityId };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [entityId, providerInputs.accessKeyId, providerInputs.secretAccessKey, providerInputs.clusterName]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
|
@@ -23,6 +23,7 @@ import { CustomAWSHasher } from "@app/lib/aws/hashing";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
|
||||
@@ -118,22 +119,39 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown, { projectId }: { projectId: string }) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs, projectId);
|
||||
const isConnected = await client
|
||||
.send(new GetUserCommand({}))
|
||||
.then(() => true)
|
||||
.catch((err) => {
|
||||
const message = (err as Error)?.message;
|
||||
if (
|
||||
(providerInputs.method === AwsIamAuthType.AssumeRole || providerInputs.method === AwsIamAuthType.IRSA) &&
|
||||
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
|
||||
message.includes("Must specify userName when calling with non-User credentials")
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
throw err;
|
||||
try {
|
||||
const client = await $getClient(providerInputs, projectId);
|
||||
const isConnected = await client
|
||||
.send(new GetUserCommand({}))
|
||||
.then(() => true)
|
||||
.catch((err) => {
|
||||
const message = (err as Error)?.message;
|
||||
if (
|
||||
(providerInputs.method === AwsIamAuthType.AssumeRole || providerInputs.method === AwsIamAuthType.IRSA) &&
|
||||
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
|
||||
message.includes("Must specify userName when calling with non-User credentials")
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
return isConnected;
|
||||
} catch (err) {
|
||||
const sensitiveTokens = [];
|
||||
if (providerInputs.method === AwsIamAuthType.AccessKey) {
|
||||
sensitiveTokens.push(providerInputs.accessKey, providerInputs.secretAccessKey);
|
||||
}
|
||||
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
|
||||
sensitiveTokens.push(providerInputs.roleArn);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: sensitiveTokens
|
||||
});
|
||||
return isConnected;
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -162,62 +180,81 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
awsTags.push(...additionalTags);
|
||||
}
|
||||
|
||||
const createUserRes = await client.send(
|
||||
new CreateUserCommand({
|
||||
Path: awsPath,
|
||||
PermissionsBoundary: permissionBoundaryPolicyArn || undefined,
|
||||
Tags: awsTags,
|
||||
UserName: username
|
||||
})
|
||||
);
|
||||
|
||||
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
|
||||
if (userGroups) {
|
||||
await Promise.all(
|
||||
userGroups
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((group) =>
|
||||
client.send(new AddUserToGroupCommand({ UserName: createUserRes?.User?.UserName, GroupName: group }))
|
||||
)
|
||||
);
|
||||
}
|
||||
if (policyArns) {
|
||||
await Promise.all(
|
||||
policyArns
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((policyArn) =>
|
||||
client.send(new AttachUserPolicyCommand({ UserName: createUserRes?.User?.UserName, PolicyArn: policyArn }))
|
||||
)
|
||||
);
|
||||
}
|
||||
if (policyDocument) {
|
||||
await client.send(
|
||||
new PutUserPolicyCommand({
|
||||
UserName: createUserRes.User.UserName,
|
||||
PolicyName: `infisical-dynamic-policy-${alphaNumericNanoId(4)}`,
|
||||
PolicyDocument: policyDocument
|
||||
try {
|
||||
const createUserRes = await client.send(
|
||||
new CreateUserCommand({
|
||||
Path: awsPath,
|
||||
PermissionsBoundary: permissionBoundaryPolicyArn || undefined,
|
||||
Tags: awsTags,
|
||||
UserName: username
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const createAccessKeyRes = await client.send(
|
||||
new CreateAccessKeyCommand({
|
||||
UserName: createUserRes.User.UserName
|
||||
})
|
||||
);
|
||||
if (!createAccessKeyRes.AccessKey)
|
||||
throw new BadRequestError({ message: "Failed to create AWS IAM User access key" });
|
||||
|
||||
return {
|
||||
entityId: username,
|
||||
data: {
|
||||
ACCESS_KEY: createAccessKeyRes.AccessKey.AccessKeyId,
|
||||
SECRET_ACCESS_KEY: createAccessKeyRes.AccessKey.SecretAccessKey,
|
||||
USERNAME: username
|
||||
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
|
||||
if (userGroups) {
|
||||
await Promise.all(
|
||||
userGroups
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((group) =>
|
||||
client.send(new AddUserToGroupCommand({ UserName: createUserRes?.User?.UserName, GroupName: group }))
|
||||
)
|
||||
);
|
||||
}
|
||||
};
|
||||
if (policyArns) {
|
||||
await Promise.all(
|
||||
policyArns
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((policyArn) =>
|
||||
client.send(
|
||||
new AttachUserPolicyCommand({ UserName: createUserRes?.User?.UserName, PolicyArn: policyArn })
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
if (policyDocument) {
|
||||
await client.send(
|
||||
new PutUserPolicyCommand({
|
||||
UserName: createUserRes.User.UserName,
|
||||
PolicyName: `infisical-dynamic-policy-${alphaNumericNanoId(4)}`,
|
||||
PolicyDocument: policyDocument
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const createAccessKeyRes = await client.send(
|
||||
new CreateAccessKeyCommand({
|
||||
UserName: createUserRes.User.UserName
|
||||
})
|
||||
);
|
||||
if (!createAccessKeyRes.AccessKey)
|
||||
throw new BadRequestError({ message: "Failed to create AWS IAM User access key" });
|
||||
|
||||
return {
|
||||
entityId: username,
|
||||
data: {
|
||||
ACCESS_KEY: createAccessKeyRes.AccessKey.AccessKeyId,
|
||||
SECRET_ACCESS_KEY: createAccessKeyRes.AccessKey.SecretAccessKey,
|
||||
USERNAME: username
|
||||
}
|
||||
};
|
||||
} catch (err) {
|
||||
const sensitiveTokens = [username];
|
||||
if (providerInputs.method === AwsIamAuthType.AccessKey) {
|
||||
sensitiveTokens.push(providerInputs.accessKey, providerInputs.secretAccessKey);
|
||||
}
|
||||
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
|
||||
sensitiveTokens.push(providerInputs.roleArn);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: sensitiveTokens
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string, metadata: { projectId: string }) => {
|
||||
@@ -278,8 +315,25 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
)
|
||||
);
|
||||
|
||||
await client.send(new DeleteUserCommand({ UserName: username }));
|
||||
return { entityId: username };
|
||||
try {
|
||||
await client.send(new DeleteUserCommand({ UserName: username }));
|
||||
return { entityId: username };
|
||||
} catch (err) {
|
||||
const sensitiveTokens = [username];
|
||||
if (providerInputs.method === AwsIamAuthType.AccessKey) {
|
||||
sensitiveTokens.push(providerInputs.accessKey, providerInputs.secretAccessKey);
|
||||
}
|
||||
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
|
||||
sensitiveTokens.push(providerInputs.roleArn);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: sensitiveTokens
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
|
@@ -2,6 +2,7 @@ import axios from "axios";
|
||||
import { customAlphabet } from "nanoid";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
|
||||
import { AzureEntraIDSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
@@ -51,45 +52,82 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
return data.success;
|
||||
try {
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
return data.success;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.clientSecret, providerInputs.applicationId, providerInputs.tenantId]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async ({ inputs }: { inputs: unknown }) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
|
||||
const password = generatePassword();
|
||||
|
||||
const response = await axios.patch(
|
||||
`${MSFT_GRAPH_API_URL}/users/${providerInputs.userId}`,
|
||||
{
|
||||
passwordProfile: {
|
||||
forceChangePasswordNextSignIn: false,
|
||||
password
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${data.token}`
|
||||
}
|
||||
try {
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
);
|
||||
if (response.status !== 204) {
|
||||
throw new BadRequestError({ message: "Failed to update password" });
|
||||
}
|
||||
|
||||
return { entityId: providerInputs.userId, data: { email: providerInputs.email, password } };
|
||||
const response = await axios.patch(
|
||||
`${MSFT_GRAPH_API_URL}/users/${providerInputs.userId}`,
|
||||
{
|
||||
passwordProfile: {
|
||||
forceChangePasswordNextSignIn: false,
|
||||
password
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${data.token}`
|
||||
}
|
||||
}
|
||||
);
|
||||
if (response.status !== 204) {
|
||||
throw new BadRequestError({ message: "Failed to update password" });
|
||||
}
|
||||
|
||||
return { entityId: providerInputs.userId, data: { email: providerInputs.email, password } };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [
|
||||
providerInputs.clientSecret,
|
||||
providerInputs.applicationId,
|
||||
providerInputs.userId,
|
||||
providerInputs.email,
|
||||
password
|
||||
]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
// Creates a new password
|
||||
await create({ inputs });
|
||||
return { entityId };
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
try {
|
||||
// Creates a new password
|
||||
await create({ inputs });
|
||||
return { entityId };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.clientSecret, providerInputs.applicationId, entityId]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
|
||||
|
@@ -3,6 +3,8 @@ import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
@@ -71,9 +73,24 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
||||
await client.shutdown();
|
||||
return isConnected;
|
||||
try {
|
||||
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
||||
await client.shutdown();
|
||||
return isConnected;
|
||||
} catch (err) {
|
||||
const tokens = [providerInputs.password, providerInputs.username];
|
||||
if (providerInputs.keyspace) {
|
||||
tokens.push(providerInputs.keyspace);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens
|
||||
});
|
||||
await client.shutdown();
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -89,23 +106,39 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
const { keyspace } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration,
|
||||
keyspace
|
||||
});
|
||||
try {
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await client.execute(query);
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration,
|
||||
keyspace
|
||||
});
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (err) {
|
||||
const tokens = [username, password];
|
||||
if (keyspace) {
|
||||
tokens.push(keyspace);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens
|
||||
});
|
||||
await client.shutdown();
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
await client.shutdown();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
@@ -115,14 +148,29 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
const username = entityId;
|
||||
const { keyspace } = providerInputs;
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await client.execute(query);
|
||||
try {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId: username };
|
||||
} catch (err) {
|
||||
const tokens = [username];
|
||||
if (keyspace) {
|
||||
tokens.push(keyspace);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens
|
||||
});
|
||||
await client.shutdown();
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
@@ -130,21 +178,36 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { keyspace } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
keyspace,
|
||||
expiration
|
||||
});
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await client.execute(query);
|
||||
try {
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
keyspace,
|
||||
expiration
|
||||
});
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId };
|
||||
} catch (err) {
|
||||
const tokens = [entityId];
|
||||
if (keyspace) {
|
||||
tokens.push(keyspace);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens
|
||||
});
|
||||
await client.shutdown();
|
||||
throw new BadRequestError({
|
||||
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -2,6 +2,8 @@ import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
@@ -63,12 +65,24 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection
|
||||
.info()
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
return infoResponse;
|
||||
try {
|
||||
const infoResponse = await connection.info().then(() => true);
|
||||
return infoResponse;
|
||||
} catch (err) {
|
||||
const tokens = [];
|
||||
if (providerInputs.auth.type === ElasticSearchAuthTypes.ApiKey) {
|
||||
tokens.push(providerInputs.auth.apiKey, providerInputs.auth.apiKeyId);
|
||||
} else {
|
||||
tokens.push(providerInputs.auth.username, providerInputs.auth.password);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
@@ -79,27 +93,49 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
|
||||
await connection.security.putUser({
|
||||
username,
|
||||
password,
|
||||
full_name: "Managed by Infisical.com",
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
try {
|
||||
await connection.security.putUser({
|
||||
username,
|
||||
password,
|
||||
full_name: "Managed by Infisical.com",
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
await connection.close();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password]
|
||||
});
|
||||
await connection.close();
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
});
|
||||
try {
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId };
|
||||
await connection.close();
|
||||
return { entityId };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [entityId]
|
||||
});
|
||||
await connection.close();
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
|
@@ -3,6 +3,7 @@ import { GetAccessTokenResponse } from "google-auth-library/build/src/auth/oauth
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretGcpIamSchema, TDynamicProviderFns } from "./models";
|
||||
@@ -65,8 +66,18 @@ export const GcpIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
await $getToken(providerInputs.serviceAccountEmail, 10);
|
||||
return true;
|
||||
try {
|
||||
await $getToken(providerInputs.serviceAccountEmail, 10);
|
||||
return true;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.serviceAccountEmail]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; expireAt: number }) => {
|
||||
@@ -74,13 +85,23 @@ export const GcpIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const ttl = Math.max(Math.floor(expireAt / 1000) - now, 0);
|
||||
try {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const ttl = Math.max(Math.floor(expireAt / 1000) - now, 0);
|
||||
|
||||
const token = await $getToken(providerInputs.serviceAccountEmail, ttl);
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
const token = await $getToken(providerInputs.serviceAccountEmail, ttl);
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
|
||||
return { entityId, data: { SERVICE_ACCOUNT_EMAIL: providerInputs.serviceAccountEmail, TOKEN: token } };
|
||||
return { entityId, data: { SERVICE_ACCOUNT_EMAIL: providerInputs.serviceAccountEmail, TOKEN: token } };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.serviceAccountEmail]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||
@@ -89,10 +110,21 @@ export const GcpIamProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
// To renew a token it must be re-created
|
||||
const data = await create({ inputs, expireAt });
|
||||
try {
|
||||
// To renew a token it must be re-created
|
||||
const data = await create({ inputs, expireAt });
|
||||
|
||||
return { ...data, entityId };
|
||||
return { ...data, entityId };
|
||||
} catch (err) {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.serviceAccountEmail]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -3,6 +3,7 @@ import jwt from "jsonwebtoken";
|
||||
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
|
||||
@@ -89,26 +90,46 @@ export const GithubProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
await $generateGitHubInstallationAccessToken(providerInputs);
|
||||
return true;
|
||||
try {
|
||||
await $generateGitHubInstallationAccessToken(providerInputs);
|
||||
return true;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.privateKey, String(providerInputs.appId), String(providerInputs.installationId)]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown }) => {
|
||||
const { inputs } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const ghTokenData = await $generateGitHubInstallationAccessToken(providerInputs);
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
try {
|
||||
const ghTokenData = await $generateGitHubInstallationAccessToken(providerInputs);
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
|
||||
return {
|
||||
entityId,
|
||||
data: {
|
||||
TOKEN: ghTokenData.token,
|
||||
EXPIRES_AT: ghTokenData.expires_at,
|
||||
PERMISSIONS: ghTokenData.permissions,
|
||||
REPOSITORY_SELECTION: ghTokenData.repository_selection
|
||||
}
|
||||
};
|
||||
return {
|
||||
entityId,
|
||||
data: {
|
||||
TOKEN: ghTokenData.token,
|
||||
EXPIRES_AT: ghTokenData.expires_at,
|
||||
PERMISSIONS: ghTokenData.permissions,
|
||||
REPOSITORY_SELECTION: ghTokenData.repository_selection
|
||||
}
|
||||
};
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.privateKey, String(providerInputs.appId), String(providerInputs.installationId)]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async () => {
|
||||
|
@@ -2,7 +2,8 @@ import axios, { AxiosError } from "axios";
|
||||
import handlebars from "handlebars";
|
||||
import https from "https";
|
||||
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
@@ -356,8 +357,12 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
errorMessage = (error.response?.data as { message: string }).message;
|
||||
}
|
||||
|
||||
throw new InternalServerError({
|
||||
message: `Failed to validate connection: ${errorMessage}`
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [providerInputs.clusterToken || ""]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -602,8 +607,12 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
errorMessage = (error.response?.data as { message: string }).message;
|
||||
}
|
||||
|
||||
throw new InternalServerError({
|
||||
message: `Failed to create dynamic secret: ${errorMessage}`
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [providerInputs.clusterToken || ""]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -683,50 +692,65 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
};
|
||||
|
||||
if (providerInputs.credentialType === KubernetesCredentialType.Dynamic) {
|
||||
const rawUrl =
|
||||
providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? GATEWAY_AUTH_DEFAULT_URL
|
||||
: providerInputs.url || "";
|
||||
try {
|
||||
const rawUrl =
|
||||
providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? GATEWAY_AUTH_DEFAULT_URL
|
||||
: providerInputs.url || "";
|
||||
|
||||
const url = new URL(rawUrl);
|
||||
const k8sGatewayHost = url.hostname;
|
||||
const k8sPort = url.port ? Number(url.port) : 443;
|
||||
const k8sHost = `${url.protocol}//${url.hostname}`;
|
||||
const url = new URL(rawUrl);
|
||||
const k8sGatewayHost = url.hostname;
|
||||
const k8sPort = url.port ? Number(url.port) : 443;
|
||||
const k8sHost = `${url.protocol}//${url.hostname}`;
|
||||
|
||||
const httpsAgent =
|
||||
providerInputs.ca && providerInputs.sslEnabled
|
||||
? new https.Agent({
|
||||
ca: providerInputs.ca,
|
||||
rejectUnauthorized: true
|
||||
})
|
||||
: undefined;
|
||||
const httpsAgent =
|
||||
providerInputs.ca && providerInputs.sslEnabled
|
||||
? new https.Agent({
|
||||
ca: providerInputs.ca,
|
||||
rejectUnauthorized: true
|
||||
})
|
||||
: undefined;
|
||||
|
||||
if (providerInputs.gatewayId) {
|
||||
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sHost,
|
||||
targetPort: k8sPort,
|
||||
httpsAgent,
|
||||
reviewTokenThroughGateway: true
|
||||
},
|
||||
serviceAccountDynamicCallback
|
||||
);
|
||||
if (providerInputs.gatewayId) {
|
||||
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sHost,
|
||||
targetPort: k8sPort,
|
||||
httpsAgent,
|
||||
reviewTokenThroughGateway: true
|
||||
},
|
||||
serviceAccountDynamicCallback
|
||||
);
|
||||
} else {
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sGatewayHost,
|
||||
targetPort: k8sPort,
|
||||
httpsAgent,
|
||||
reviewTokenThroughGateway: false
|
||||
},
|
||||
serviceAccountDynamicCallback
|
||||
);
|
||||
}
|
||||
} else {
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sGatewayHost,
|
||||
targetPort: k8sPort,
|
||||
httpsAgent,
|
||||
reviewTokenThroughGateway: false
|
||||
},
|
||||
serviceAccountDynamicCallback
|
||||
);
|
||||
await serviceAccountDynamicCallback(k8sHost, k8sPort, httpsAgent);
|
||||
}
|
||||
} else {
|
||||
await serviceAccountDynamicCallback(k8sHost, k8sPort, httpsAgent);
|
||||
} catch (error) {
|
||||
let errorMessage = error instanceof Error ? error.message : "Unknown error";
|
||||
if (axios.isAxiosError(error) && (error.response?.data as { message: string })?.message) {
|
||||
errorMessage = (error.response?.data as { message: string }).message;
|
||||
}
|
||||
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [entityId, providerInputs.clusterToken || ""]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -6,6 +6,7 @@ import RE2 from "re2";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
|
||||
@@ -91,8 +92,18 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
return client.connected;
|
||||
try {
|
||||
const client = await $getClient(providerInputs);
|
||||
return client.connected;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.bindpass, providerInputs.binddn]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const executeLdif = async (client: ldapjs.Client, ldif_file: string) => {
|
||||
@@ -205,11 +216,11 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||
const dnRegex = new RE2("^dn:\\s*(.+)", "m");
|
||||
const dnMatch = dnRegex.exec(providerInputs.rotationLdif);
|
||||
const username = dnMatch?.[1];
|
||||
if (!username) throw new BadRequestError({ message: "Username not found from Ldif" });
|
||||
const password = generatePassword();
|
||||
|
||||
if (dnMatch) {
|
||||
const username = dnMatch[1];
|
||||
const password = generatePassword();
|
||||
|
||||
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rotationLdif });
|
||||
|
||||
try {
|
||||
@@ -217,7 +228,11 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||
} catch (err) {
|
||||
throw new BadRequestError({ message: (err as Error).message });
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.binddn, providerInputs.bindpass]
|
||||
});
|
||||
throw new BadRequestError({ message: sanitizedErrorMessage });
|
||||
}
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
@@ -238,7 +253,11 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
const rollbackLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rollbackLdif });
|
||||
await executeLdif(client, rollbackLdif);
|
||||
}
|
||||
throw new BadRequestError({ message: (err as Error).message });
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.binddn, providerInputs.bindpass]
|
||||
});
|
||||
throw new BadRequestError({ message: sanitizedErrorMessage });
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -262,7 +281,11 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||
} catch (err) {
|
||||
throw new BadRequestError({ message: (err as Error).message });
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.binddn, providerInputs.bindpass]
|
||||
});
|
||||
throw new BadRequestError({ message: sanitizedErrorMessage });
|
||||
}
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
@@ -278,7 +301,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
@@ -3,6 +3,8 @@ import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||
@@ -49,19 +51,25 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
url: `v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
params: { itemsPerPage: 1 }
|
||||
})
|
||||
.then(() => true)
|
||||
.catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
try {
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
url: `v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
params: { itemsPerPage: 1 }
|
||||
}).then(() => true);
|
||||
return isConnected;
|
||||
} catch (error) {
|
||||
const errorMessage = (error as AxiosError).response
|
||||
? JSON.stringify((error as AxiosError).response?.data)
|
||||
: (error as Error)?.message;
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [providerInputs.adminPublicKey, providerInputs.adminPrivateKey, providerInputs.groupId]
|
||||
});
|
||||
return isConnected;
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -77,25 +85,39 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
await client({
|
||||
method: "POST",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
data: {
|
||||
roles: providerInputs.roles,
|
||||
scopes: providerInputs.scopes,
|
||||
deleteAfterDate: expiration,
|
||||
username,
|
||||
password,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
try {
|
||||
await client({
|
||||
method: "POST",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
data: {
|
||||
roles: providerInputs.roles,
|
||||
scopes: providerInputs.scopes,
|
||||
deleteAfterDate: expiration,
|
||||
username,
|
||||
password,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
});
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (error) {
|
||||
const errorMessage = (error as AxiosError).response
|
||||
? JSON.stringify((error as AxiosError).response?.data)
|
||||
: (error as Error)?.message;
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [
|
||||
username,
|
||||
password,
|
||||
providerInputs.adminPublicKey,
|
||||
providerInputs.adminPrivateKey,
|
||||
providerInputs.groupId
|
||||
]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
@@ -111,15 +133,23 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
throw err;
|
||||
});
|
||||
if (isExisting) {
|
||||
await client({
|
||||
method: "DELETE",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
try {
|
||||
await client({
|
||||
method: "DELETE",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
});
|
||||
} catch (error) {
|
||||
const errorMessage = (error as AxiosError).response
|
||||
? JSON.stringify((error as AxiosError).response?.data)
|
||||
: (error as Error)?.message;
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [username, providerInputs.adminPublicKey, providerInputs.adminPrivateKey, providerInputs.groupId]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
@@ -132,21 +162,29 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
await client({
|
||||
method: "PATCH",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`,
|
||||
data: {
|
||||
deleteAfterDate: expiration,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username };
|
||||
try {
|
||||
await client({
|
||||
method: "PATCH",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`,
|
||||
data: {
|
||||
deleteAfterDate: expiration,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
});
|
||||
return { entityId: username };
|
||||
} catch (error) {
|
||||
const errorMessage = (error as AxiosError).response
|
||||
? JSON.stringify((error as AxiosError).response?.data)
|
||||
: (error as Error)?.message;
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [username, providerInputs.adminPublicKey, providerInputs.adminPrivateKey, providerInputs.groupId]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -2,6 +2,8 @@ import { MongoClient } from "mongodb";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
@@ -51,13 +53,24 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client
|
||||
.db(providerInputs.database)
|
||||
.command({ ping: 1 })
|
||||
.then(() => true);
|
||||
try {
|
||||
const isConnected = await client
|
||||
.db(providerInputs.database)
|
||||
.command({ ping: 1 })
|
||||
.then(() => true);
|
||||
|
||||
await client.close();
|
||||
return isConnected;
|
||||
await client.close();
|
||||
return isConnected;
|
||||
} catch (err) {
|
||||
await client.close();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.password, providerInputs.username, providerInputs.database, providerInputs.host]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
@@ -68,16 +81,27 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
|
||||
const db = client.db(providerInputs.database);
|
||||
try {
|
||||
const db = client.db(providerInputs.database);
|
||||
|
||||
await db.command({
|
||||
createUser: username,
|
||||
pwd: password,
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
await client.close();
|
||||
await db.command({
|
||||
createUser: username,
|
||||
pwd: password,
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
await client.close();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (err) {
|
||||
await client.close();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.password, providerInputs.username, providerInputs.database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
@@ -86,13 +110,24 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const username = entityId;
|
||||
|
||||
const db = client.db(providerInputs.database);
|
||||
await db.command({
|
||||
dropUser: username
|
||||
});
|
||||
await client.close();
|
||||
try {
|
||||
const db = client.db(providerInputs.database);
|
||||
await db.command({
|
||||
dropUser: username
|
||||
});
|
||||
await client.close();
|
||||
|
||||
return { entityId: username };
|
||||
return { entityId: username };
|
||||
} catch (err) {
|
||||
await client.close();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, providerInputs.password, providerInputs.username, providerInputs.database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
|
@@ -3,6 +3,8 @@ import https from "https";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
@@ -110,11 +112,19 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||
|
||||
return infoResponse;
|
||||
try {
|
||||
const connection = await $getClient(providerInputs);
|
||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||
return infoResponse;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.password, providerInputs.username, providerInputs.host]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
@@ -125,26 +135,44 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
|
||||
await createRabbitMqUser({
|
||||
axiosInstance: connection,
|
||||
virtualHost: providerInputs.virtualHost,
|
||||
createUser: {
|
||||
password,
|
||||
username,
|
||||
tags: [...(providerInputs.tags ?? []), "infisical-user"]
|
||||
}
|
||||
});
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
try {
|
||||
await createRabbitMqUser({
|
||||
axiosInstance: connection,
|
||||
virtualHost: providerInputs.virtualHost,
|
||||
createUser: {
|
||||
password,
|
||||
username,
|
||||
tags: [...(providerInputs.tags ?? []), "infisical-user"]
|
||||
}
|
||||
});
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||
|
||||
return { entityId };
|
||||
try {
|
||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||
return { entityId };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [entityId, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
|
@@ -4,6 +4,7 @@ import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
@@ -112,14 +113,27 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const pingResponse = await connection
|
||||
.ping()
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
return pingResponse;
|
||||
let connection;
|
||||
try {
|
||||
connection = await $getClient(providerInputs);
|
||||
const pingResponse = await connection.ping().then(() => true);
|
||||
await connection.quit();
|
||||
return pingResponse;
|
||||
} catch (err) {
|
||||
if (connection) await connection.quit();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [
|
||||
providerInputs.password || "",
|
||||
providerInputs.username,
|
||||
providerInputs.host,
|
||||
String(providerInputs.port)
|
||||
]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -144,10 +158,20 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
|
||||
await executeTransactions(connection, queries);
|
||||
|
||||
await connection.quit();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
try {
|
||||
await executeTransactions(connection, queries);
|
||||
await connection.quit();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (err) {
|
||||
await connection.quit();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.password || "", providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
@@ -159,10 +183,20 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
|
||||
await executeTransactions(connection, queries);
|
||||
|
||||
await connection.quit();
|
||||
return { entityId: username };
|
||||
try {
|
||||
await executeTransactions(connection, queries);
|
||||
await connection.quit();
|
||||
return { entityId: username };
|
||||
} catch (err) {
|
||||
await connection.quit();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, providerInputs.password || "", providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
@@ -176,13 +210,23 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
||||
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await executeTransactions(connection, queries);
|
||||
try {
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await executeTransactions(connection, queries);
|
||||
}
|
||||
await connection.quit();
|
||||
return { entityId: username };
|
||||
} catch (err) {
|
||||
await connection.quit();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, providerInputs.password || "", providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
await connection.quit();
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -4,6 +4,7 @@ import odbc from "odbc";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
@@ -67,25 +68,41 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
const client = await $getClient(providerInputs);
|
||||
let masterClient;
|
||||
let client;
|
||||
try {
|
||||
masterClient = await $getClient(providerInputs, true);
|
||||
client = await $getClient(providerInputs);
|
||||
|
||||
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
|
||||
if (!resultFromSelectedDatabase.version) {
|
||||
if (!resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection, version query failed"
|
||||
});
|
||||
}
|
||||
|
||||
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection (master), version mismatch"
|
||||
});
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
return true;
|
||||
} catch (err) {
|
||||
if (masterClient) await masterClient.close();
|
||||
if (client) await client.close();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.password, providerInputs.username, providerInputs.host, providerInputs.database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection, version query failed"
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection (master), version mismatch"
|
||||
});
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
@@ -105,16 +122,26 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
|
||||
|
||||
for await (const query of queries) {
|
||||
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||
// If not done, then the newly created user won't be able to authenticate.
|
||||
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
|
||||
try {
|
||||
for await (const query of queries) {
|
||||
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||
// If not done, then the newly created user won't be able to authenticate.
|
||||
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (err) {
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.password, providerInputs.username, providerInputs.database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
@@ -140,14 +167,24 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
}
|
||||
}
|
||||
|
||||
for await (const query of queries) {
|
||||
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
|
||||
try {
|
||||
for await (const query of queries) {
|
||||
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
return { entityId: username };
|
||||
} catch (err) {
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, providerInputs.password, providerInputs.username, providerInputs.database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_: unknown, username: string) => {
|
||||
|
@@ -10,6 +10,7 @@ import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
@@ -83,19 +84,26 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const testResult = await new Promise<boolean>((resolve, reject) => {
|
||||
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||
if (err) {
|
||||
reject();
|
||||
}
|
||||
|
||||
resolve(true);
|
||||
try {
|
||||
const client = await $getClient(providerInputs);
|
||||
const testResult = await new Promise<boolean>((resolve, reject) => {
|
||||
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return testResult;
|
||||
return testResult;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.password, providerInputs.username, providerInputs.host]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -119,18 +127,22 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
});
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(
|
||||
new BadRequestError({
|
||||
message: err.message
|
||||
})
|
||||
);
|
||||
}
|
||||
resolve(true);
|
||||
try {
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) return reject(err);
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -142,18 +154,24 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const client = await $getClient(providerInputs);
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(
|
||||
new BadRequestError({
|
||||
message: err.message
|
||||
})
|
||||
);
|
||||
}
|
||||
resolve(true);
|
||||
try {
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -174,16 +192,20 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(
|
||||
new BadRequestError({
|
||||
message: err.message
|
||||
})
|
||||
);
|
||||
reject(err);
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [entityId, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
client.disconnect();
|
||||
}
|
||||
|
@@ -4,6 +4,7 @@ import snowflake from "snowflake-sdk";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
@@ -69,12 +70,10 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
let isValidConnection: boolean;
|
||||
|
||||
let client;
|
||||
try {
|
||||
isValidConnection = await Promise.race([
|
||||
client = await $getClient(providerInputs);
|
||||
const isValidConnection = await Promise.race([
|
||||
client.isValidAsync(),
|
||||
new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000);
|
||||
@@ -82,11 +81,18 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
throw new BadRequestError({ message: "Unable to establish connection - verify credentials" });
|
||||
})
|
||||
]);
|
||||
return isValidConnection;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.password, providerInputs.username, providerInputs.accountId, providerInputs.orgId]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
if (client) client.destroy(noop);
|
||||
}
|
||||
|
||||
return isValidConnection;
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -116,13 +122,19 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
sqlText: creationStatement,
|
||||
complete(err) {
|
||||
if (err) {
|
||||
return reject(new BadRequestError({ name: "CreateLease", message: err.message }));
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error).message,
|
||||
tokens: [username, password, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({ message: `Failed to create lease from provider: ${sanitizedErrorMessage}` });
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
@@ -143,13 +155,19 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
sqlText: revokeStatement,
|
||||
complete(err) {
|
||||
if (err) {
|
||||
return reject(new BadRequestError({ name: "RevokeLease", message: err.message }));
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error).message,
|
||||
tokens: [username, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({ message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}` });
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
@@ -175,13 +193,19 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
sqlText: renewStatement,
|
||||
complete(err) {
|
||||
if (err) {
|
||||
return reject(new BadRequestError({ name: "RenewLease", message: err.message }));
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error).message,
|
||||
tokens: [entityId, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({ message: `Failed to renew lease from provider: ${sanitizedErrorMessage}` });
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
@@ -3,6 +3,8 @@ import knex from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
@@ -212,8 +214,19 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
||||
isConnected = await db.raw(testStatement).then(() => true);
|
||||
await db.destroy();
|
||||
try {
|
||||
isConnected = await db.raw(testStatement).then(() => true);
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
if (providerInputs.gatewayId) {
|
||||
@@ -233,13 +246,13 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
const { inputs, expireAt, usernameTemplate, identity } = data;
|
||||
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const { database } = providerInputs;
|
||||
const username = generateUsername(providerInputs.client, usernameTemplate, identity);
|
||||
|
||||
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
try {
|
||||
const { database } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
@@ -256,6 +269,14 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
@@ -283,6 +304,14 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
@@ -319,6 +348,14 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
|
@@ -1,6 +1,8 @@
|
||||
import { authenticator } from "otplib";
|
||||
import { HashAlgorithms } from "otplib/core";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
|
||||
@@ -12,62 +14,84 @@ export const TotpProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const validateConnection = async () => {
|
||||
return true;
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
try {
|
||||
await validateProviderInputs(inputs);
|
||||
return true;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: []
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const create = async (data: { inputs: unknown }) => {
|
||||
const { inputs } = data;
|
||||
try {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
const authenticatorInstance = authenticator.clone();
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
const authenticatorInstance = authenticator.clone();
|
||||
|
||||
let secret: string;
|
||||
let period: number | null | undefined;
|
||||
let digits: number | null | undefined;
|
||||
let algorithm: HashAlgorithms | null | undefined;
|
||||
let secret: string;
|
||||
let period: number | null | undefined;
|
||||
let digits: number | null | undefined;
|
||||
let algorithm: HashAlgorithms | null | undefined;
|
||||
|
||||
if (providerInputs.configType === TotpConfigType.URL) {
|
||||
const urlObj = new URL(providerInputs.url);
|
||||
secret = urlObj.searchParams.get("secret") as string;
|
||||
const periodFromUrl = urlObj.searchParams.get("period");
|
||||
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||
if (providerInputs.configType === TotpConfigType.URL) {
|
||||
const urlObj = new URL(providerInputs.url);
|
||||
secret = urlObj.searchParams.get("secret") as string;
|
||||
const periodFromUrl = urlObj.searchParams.get("period");
|
||||
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||
|
||||
if (periodFromUrl) {
|
||||
period = +periodFromUrl;
|
||||
if (periodFromUrl) {
|
||||
period = +periodFromUrl;
|
||||
}
|
||||
|
||||
if (digitsFromUrl) {
|
||||
digits = +digitsFromUrl;
|
||||
}
|
||||
|
||||
if (algorithmFromUrl) {
|
||||
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||
}
|
||||
} else {
|
||||
secret = providerInputs.secret;
|
||||
period = providerInputs.period;
|
||||
digits = providerInputs.digits;
|
||||
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||
}
|
||||
|
||||
if (digitsFromUrl) {
|
||||
digits = +digitsFromUrl;
|
||||
if (digits) {
|
||||
authenticatorInstance.options = { digits };
|
||||
}
|
||||
|
||||
if (algorithmFromUrl) {
|
||||
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||
if (algorithm) {
|
||||
authenticatorInstance.options = { algorithm };
|
||||
}
|
||||
} else {
|
||||
secret = providerInputs.secret;
|
||||
period = providerInputs.period;
|
||||
digits = providerInputs.digits;
|
||||
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||
}
|
||||
|
||||
if (digits) {
|
||||
authenticatorInstance.options = { digits };
|
||||
}
|
||||
if (period) {
|
||||
authenticatorInstance.options = { step: period };
|
||||
}
|
||||
|
||||
if (algorithm) {
|
||||
authenticatorInstance.options = { algorithm };
|
||||
return {
|
||||
entityId,
|
||||
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||
};
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: []
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
if (period) {
|
||||
authenticatorInstance.options = { step: period };
|
||||
}
|
||||
|
||||
return {
|
||||
entityId,
|
||||
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||
|
@@ -4,6 +4,7 @@ import { z } from "zod";
|
||||
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
@@ -275,6 +276,14 @@ export const VerticaProvider = ({ gatewayService }: TVerticaProviderDTO): TDynam
|
||||
await client.raw(trimmedQuery);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.username, providerInputs.password]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
if (client) await client.destroy();
|
||||
}
|
||||
@@ -339,6 +348,14 @@ export const VerticaProvider = ({ gatewayService }: TVerticaProviderDTO): TDynam
|
||||
await client.raw(trimmedQuery);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, providerInputs.username, providerInputs.password]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
if (client) await client.destroy();
|
||||
}
|
||||
|
83
backend/src/ee/services/event/event-bus-service.ts
Normal file
83
backend/src/ee/services/event/event-bus-service.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import Redis from "ioredis";
|
||||
import { z } from "zod";
|
||||
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { EventSchema, TopicName } from "./types";
|
||||
|
||||
export const eventBusFactory = (redis: Redis) => {
|
||||
const publisher = redis.duplicate();
|
||||
// Duplicate the publisher to create a subscriber.
|
||||
// This is necessary because Redis does not allow a single connection to both publish and subscribe.
|
||||
const subscriber = publisher.duplicate();
|
||||
|
||||
const init = async (topics: TopicName[] = Object.values(TopicName)) => {
|
||||
subscriber.on("error", (e) => {
|
||||
logger.error(e, "Event Bus subscriber error");
|
||||
});
|
||||
|
||||
publisher.on("error", (e) => {
|
||||
logger.error(e, "Event Bus publisher error");
|
||||
});
|
||||
|
||||
await subscriber.subscribe(...topics);
|
||||
};
|
||||
|
||||
/**
|
||||
* Publishes an event to the specified topic.
|
||||
* @param topic - The topic to publish the event to.
|
||||
* @param event - The event data to publish.
|
||||
*/
|
||||
const publish = async <T extends z.input<typeof EventSchema>>(topic: TopicName, event: T) => {
|
||||
const json = JSON.stringify(event);
|
||||
|
||||
return publisher.publish(topic, json, (err) => {
|
||||
if (err) {
|
||||
return logger.error(err, `Error publishing to channel ${topic}`);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* @param fn - The function to call when a message is received.
|
||||
* It should accept the parsed event data as an argument.
|
||||
* @template T - The type of the event data, which should match the schema defined in EventSchema.
|
||||
* @returns A function that can be called to unsubscribe from the event bus.
|
||||
*/
|
||||
const subscribe = <T extends z.infer<typeof EventSchema>>(fn: (data: T) => Promise<void> | void) => {
|
||||
// Not using async await cause redis client's `on` method does not expect async listeners.
|
||||
const listener = (channel: string, message: string) => {
|
||||
try {
|
||||
const parsed = JSON.parse(message) as T;
|
||||
const thenable = fn(parsed);
|
||||
|
||||
// If the function returns a Promise, catch any errors that occur during processing.
|
||||
if (thenable instanceof Promise) {
|
||||
thenable.catch((error) => {
|
||||
logger.error(error, `Error processing message from channel ${channel}`);
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(error, `Error parsing message data from channel ${channel}`);
|
||||
}
|
||||
};
|
||||
subscriber.on("message", listener);
|
||||
|
||||
return () => {
|
||||
subscriber.off("message", listener);
|
||||
};
|
||||
};
|
||||
|
||||
const close = async () => {
|
||||
try {
|
||||
await publisher.quit();
|
||||
await subscriber.quit();
|
||||
} catch (error) {
|
||||
logger.error(error, "Error closing event bus connections");
|
||||
}
|
||||
};
|
||||
|
||||
return { init, publish, subscribe, close };
|
||||
};
|
||||
|
||||
export type TEventBusService = ReturnType<typeof eventBusFactory>;
|
162
backend/src/ee/services/event/event-sse-service.ts
Normal file
162
backend/src/ee/services/event/event-sse-service.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
/* eslint-disable no-continue */
|
||||
import { subject } from "@casl/ability";
|
||||
import Redis from "ioredis";
|
||||
|
||||
import { KeyStorePrefixes } from "@app/keystore/keystore";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TEventBusService } from "./event-bus-service";
|
||||
import { createEventStreamClient, EventStreamClient, IEventStreamClientOpts } from "./event-sse-stream";
|
||||
import { EventData, RegisteredEvent, toBusEventName } from "./types";
|
||||
|
||||
const AUTH_REFRESH_INTERVAL = 60 * 1000;
|
||||
const HEART_BEAT_INTERVAL = 15 * 1000;
|
||||
|
||||
export const sseServiceFactory = (bus: TEventBusService, redis: Redis) => {
|
||||
const clients = new Set<EventStreamClient>();
|
||||
|
||||
const heartbeatInterval = setInterval(() => {
|
||||
for (const client of clients) {
|
||||
if (client.stream.closed) continue;
|
||||
void client.ping();
|
||||
}
|
||||
}, HEART_BEAT_INTERVAL);
|
||||
|
||||
const refreshInterval = setInterval(() => {
|
||||
for (const client of clients) {
|
||||
if (client.stream.closed) continue;
|
||||
void client.refresh();
|
||||
}
|
||||
}, AUTH_REFRESH_INTERVAL);
|
||||
|
||||
const removeActiveConnection = async (projectId: string, identityId: string, connectionId: string) => {
|
||||
const set = KeyStorePrefixes.ActiveSSEConnectionsSet(projectId, identityId);
|
||||
const key = KeyStorePrefixes.ActiveSSEConnections(projectId, identityId, connectionId);
|
||||
|
||||
await Promise.all([redis.lrem(set, 0, connectionId), redis.del(key)]);
|
||||
};
|
||||
|
||||
const getActiveConnectionsCount = async (projectId: string, identityId: string) => {
|
||||
const set = KeyStorePrefixes.ActiveSSEConnectionsSet(projectId, identityId);
|
||||
const connections = await redis.lrange(set, 0, -1);
|
||||
|
||||
if (connections.length === 0) {
|
||||
return 0; // No active connections
|
||||
}
|
||||
|
||||
const keys = connections.map((c) => KeyStorePrefixes.ActiveSSEConnections(projectId, identityId, c));
|
||||
|
||||
const values = await redis.mget(...keys);
|
||||
|
||||
// eslint-disable-next-line no-plusplus
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
if (values[i] === null) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await removeActiveConnection(projectId, identityId, connections[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return redis.llen(set);
|
||||
};
|
||||
|
||||
const onDisconnect = async (client: EventStreamClient) => {
|
||||
try {
|
||||
client.close();
|
||||
clients.delete(client);
|
||||
await removeActiveConnection(client.auth.projectId, client.auth.actorId, client.id);
|
||||
} catch (error) {
|
||||
logger.error(error, "Error during SSE stream disconnection");
|
||||
}
|
||||
};
|
||||
|
||||
function filterEventsForClient(client: EventStreamClient, event: EventData, registered: RegisteredEvent[]) {
|
||||
const eventType = toBusEventName(event.data.eventType);
|
||||
const match = registered.find((r) => r.event === eventType);
|
||||
if (!match) return;
|
||||
|
||||
const item = event.data.payload;
|
||||
|
||||
if (Array.isArray(item)) {
|
||||
if (item.length === 0) return;
|
||||
|
||||
const baseSubject = {
|
||||
eventType,
|
||||
environment: undefined as string | undefined,
|
||||
secretPath: undefined as string | undefined
|
||||
};
|
||||
|
||||
const filtered = item.filter((ev) => {
|
||||
baseSubject.secretPath = ev.secretPath ?? "/";
|
||||
baseSubject.environment = ev.environment;
|
||||
|
||||
return client.matcher.can("subscribe", subject(event.type, baseSubject));
|
||||
});
|
||||
|
||||
if (filtered.length === 0) return;
|
||||
|
||||
return client.send({
|
||||
...event,
|
||||
data: {
|
||||
...event.data,
|
||||
payload: filtered
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// For single item
|
||||
const baseSubject = {
|
||||
eventType,
|
||||
secretPath: item.secretPath ?? "/",
|
||||
environment: item.environment
|
||||
};
|
||||
|
||||
if (client.matcher.can("subscribe", subject(event.type, baseSubject))) {
|
||||
client.send(event);
|
||||
}
|
||||
}
|
||||
|
||||
const subscribe = async (
|
||||
opts: IEventStreamClientOpts & {
|
||||
onClose?: () => void;
|
||||
}
|
||||
) => {
|
||||
const client = createEventStreamClient(redis, opts);
|
||||
|
||||
// Set up event listener on event bus
|
||||
const unsubscribe = bus.subscribe((event) => {
|
||||
if (event.type !== opts.type) return;
|
||||
filterEventsForClient(client, event, opts.registered);
|
||||
});
|
||||
|
||||
client.stream.on("close", () => {
|
||||
unsubscribe();
|
||||
void onDisconnect(client); // This will never throw
|
||||
});
|
||||
|
||||
await client.open();
|
||||
|
||||
clients.add(client);
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const close = () => {
|
||||
if (heartbeatInterval) {
|
||||
clearInterval(heartbeatInterval);
|
||||
}
|
||||
|
||||
if (refreshInterval) {
|
||||
clearInterval(refreshInterval);
|
||||
}
|
||||
|
||||
for (const client of clients) {
|
||||
client.close();
|
||||
}
|
||||
|
||||
clients.clear();
|
||||
};
|
||||
|
||||
return { subscribe, close, getActiveConnectionsCount };
|
||||
};
|
||||
|
||||
export type TServerSentEventsService = ReturnType<typeof sseServiceFactory>;
|
187
backend/src/ee/services/event/event-sse-stream.ts
Normal file
187
backend/src/ee/services/event/event-sse-stream.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
import { Readable } from "node:stream";
|
||||
|
||||
import { MongoAbility, PureAbility } from "@casl/ability";
|
||||
import { MongoQuery } from "@ucast/mongo2js";
|
||||
import Redis from "ioredis";
|
||||
import { nanoid } from "nanoid";
|
||||
|
||||
import { ProjectType } from "@app/db/schemas";
|
||||
import { ProjectPermissionSet } from "@app/ee/services/permission/project-permission";
|
||||
import { KeyStorePrefixes } from "@app/keystore/keystore";
|
||||
import { conditionsMatcher } from "@app/lib/casl";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { EventData, RegisteredEvent } from "./types";
|
||||
|
||||
export const getServerSentEventsHeaders = () =>
|
||||
({
|
||||
"Cache-Control": "no-cache",
|
||||
"Content-Type": "text/event-stream",
|
||||
Connection: "keep-alive",
|
||||
"X-Accel-Buffering": "no"
|
||||
}) as const;
|
||||
|
||||
type TAuthInfo = {
|
||||
actorId: string;
|
||||
projectId: string;
|
||||
permission: MongoAbility<ProjectPermissionSet, MongoQuery>;
|
||||
};
|
||||
|
||||
export interface IEventStreamClientOpts {
|
||||
type: ProjectType;
|
||||
registered: RegisteredEvent[];
|
||||
onAuthRefresh: (info: TAuthInfo) => Promise<void> | void;
|
||||
getAuthInfo: () => Promise<TAuthInfo> | TAuthInfo;
|
||||
}
|
||||
|
||||
interface EventMessage {
|
||||
time?: string | number;
|
||||
type: string;
|
||||
data?: unknown;
|
||||
}
|
||||
|
||||
function serializeSseEvent(chunk: EventMessage): string {
|
||||
let payload = "";
|
||||
|
||||
if (chunk.time) payload += `id: ${chunk.time}\n`;
|
||||
if (chunk.type) payload += `event: ${chunk.type}\n`;
|
||||
if (chunk.data) payload += `data: ${JSON.stringify(chunk)}\n`;
|
||||
|
||||
return `${payload}\n`;
|
||||
}
|
||||
|
||||
export type EventStreamClient = {
|
||||
id: string;
|
||||
stream: Readable;
|
||||
open: () => Promise<void>;
|
||||
send: (data: EventMessage | EventData) => void;
|
||||
ping: () => Promise<void>;
|
||||
refresh: () => Promise<void>;
|
||||
close: () => void;
|
||||
get auth(): TAuthInfo;
|
||||
signal: AbortSignal;
|
||||
abort: () => void;
|
||||
matcher: PureAbility;
|
||||
};
|
||||
|
||||
export function createEventStreamClient(redis: Redis, options: IEventStreamClientOpts): EventStreamClient {
|
||||
const rules = options.registered.map((r) => {
|
||||
const secretPath = r.conditions?.secretPath;
|
||||
const hasConditions = r.conditions?.environmentSlug || r.conditions?.secretPath;
|
||||
|
||||
return {
|
||||
subject: options.type,
|
||||
action: "subscribe",
|
||||
conditions: {
|
||||
eventType: r.event,
|
||||
...(hasConditions
|
||||
? {
|
||||
environment: r.conditions?.environmentSlug ?? "",
|
||||
secretPath: { $glob: secretPath }
|
||||
}
|
||||
: {})
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
const id = `sse-${nanoid()}`;
|
||||
const control = new AbortController();
|
||||
const matcher = new PureAbility(rules, { conditionsMatcher });
|
||||
|
||||
let auth: TAuthInfo | undefined;
|
||||
|
||||
const stream = new Readable({
|
||||
objectMode: true
|
||||
});
|
||||
|
||||
// We will manually push data to the stream
|
||||
stream._read = () => {};
|
||||
|
||||
const send = (data: EventMessage | EventData) => {
|
||||
const chunk = serializeSseEvent(data);
|
||||
if (!stream.push(chunk)) {
|
||||
logger.debug("Backpressure detected: dropped manual event");
|
||||
}
|
||||
};
|
||||
|
||||
stream.on("error", (error: Error) => stream.destroy(error));
|
||||
|
||||
const open = async () => {
|
||||
auth = await options.getAuthInfo();
|
||||
await options.onAuthRefresh(auth);
|
||||
|
||||
const { actorId, projectId } = auth;
|
||||
const set = KeyStorePrefixes.ActiveSSEConnectionsSet(projectId, actorId);
|
||||
const key = KeyStorePrefixes.ActiveSSEConnections(projectId, actorId, id);
|
||||
|
||||
await Promise.all([redis.rpush(set, id), redis.set(key, "1", "EX", 60)]);
|
||||
};
|
||||
|
||||
const ping = async () => {
|
||||
if (!auth) return; // Avoid race condition if ping is called before open
|
||||
|
||||
const { actorId, projectId } = auth;
|
||||
const key = KeyStorePrefixes.ActiveSSEConnections(projectId, actorId, id);
|
||||
|
||||
await redis.set(key, "1", "EX", 60);
|
||||
|
||||
stream.push("1");
|
||||
};
|
||||
|
||||
const close = () => {
|
||||
if (stream.closed) return;
|
||||
stream.push(null);
|
||||
stream.destroy();
|
||||
};
|
||||
|
||||
/**
|
||||
* Refreshes the connection's auth permissions
|
||||
* Must be called atleast once when connection is opened
|
||||
*/
|
||||
const refresh = async () => {
|
||||
try {
|
||||
auth = await options.getAuthInfo();
|
||||
await options.onAuthRefresh(auth);
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
send({
|
||||
type: "error",
|
||||
data: {
|
||||
...error
|
||||
}
|
||||
});
|
||||
return close();
|
||||
}
|
||||
stream.emit("error", error);
|
||||
}
|
||||
};
|
||||
|
||||
const abort = () => {
|
||||
try {
|
||||
control.abort();
|
||||
} catch (error) {
|
||||
logger.debug(error, "Error aborting SSE stream");
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
id,
|
||||
stream,
|
||||
open,
|
||||
send,
|
||||
ping,
|
||||
refresh,
|
||||
close,
|
||||
signal: control.signal,
|
||||
abort,
|
||||
matcher,
|
||||
get auth() {
|
||||
if (!auth) {
|
||||
throw new Error("Auth info not set");
|
||||
}
|
||||
|
||||
return auth;
|
||||
}
|
||||
};
|
||||
}
|
125
backend/src/ee/services/event/types.ts
Normal file
125
backend/src/ee/services/event/types.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectType } from "@app/db/schemas";
|
||||
import { Event, EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
|
||||
export enum TopicName {
|
||||
CoreServers = "infisical::core-servers"
|
||||
}
|
||||
|
||||
export enum BusEventName {
|
||||
CreateSecret = "secret:create",
|
||||
UpdateSecret = "secret:update",
|
||||
DeleteSecret = "secret:delete"
|
||||
}
|
||||
|
||||
type PublisableEventTypes =
|
||||
| EventType.CREATE_SECRET
|
||||
| EventType.CREATE_SECRETS
|
||||
| EventType.DELETE_SECRET
|
||||
| EventType.DELETE_SECRETS
|
||||
| EventType.UPDATE_SECRETS
|
||||
| EventType.UPDATE_SECRET;
|
||||
|
||||
export function toBusEventName(input: EventType) {
|
||||
switch (input) {
|
||||
case EventType.CREATE_SECRET:
|
||||
case EventType.CREATE_SECRETS:
|
||||
return BusEventName.CreateSecret;
|
||||
case EventType.UPDATE_SECRET:
|
||||
case EventType.UPDATE_SECRETS:
|
||||
return BusEventName.UpdateSecret;
|
||||
case EventType.DELETE_SECRET:
|
||||
case EventType.DELETE_SECRETS:
|
||||
return BusEventName.DeleteSecret;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const isBulkEvent = (event: Event): event is Extract<Event, { metadata: { secrets: Array<unknown> } }> => {
|
||||
return event.type.endsWith("-secrets"); // Feels so wrong
|
||||
};
|
||||
|
||||
export const toPublishableEvent = (event: Event) => {
|
||||
const name = toBusEventName(event.type);
|
||||
|
||||
if (!name) return null;
|
||||
|
||||
const e = event as Extract<Event, { type: PublisableEventTypes }>;
|
||||
|
||||
if (isBulkEvent(e)) {
|
||||
return {
|
||||
name,
|
||||
isBulk: true,
|
||||
data: {
|
||||
eventType: e.type,
|
||||
payload: e.metadata.secrets.map((s) => ({
|
||||
environment: e.metadata.environment,
|
||||
secretPath: e.metadata.secretPath,
|
||||
...s
|
||||
}))
|
||||
}
|
||||
} as const;
|
||||
}
|
||||
|
||||
return {
|
||||
name,
|
||||
isBulk: false,
|
||||
data: {
|
||||
eventType: e.type,
|
||||
payload: {
|
||||
...e.metadata,
|
||||
environment: e.metadata.environment
|
||||
}
|
||||
}
|
||||
} as const;
|
||||
};
|
||||
|
||||
export const EventName = z.nativeEnum(BusEventName);
|
||||
|
||||
const EventSecretPayload = z.object({
|
||||
secretPath: z.string().optional(),
|
||||
secretId: z.string(),
|
||||
secretKey: z.string(),
|
||||
environment: z.string()
|
||||
});
|
||||
|
||||
export type EventSecret = z.infer<typeof EventSecretPayload>;
|
||||
|
||||
export const EventSchema = z.object({
|
||||
datacontenttype: z.literal("application/json").optional().default("application/json"),
|
||||
type: z.nativeEnum(ProjectType),
|
||||
source: z.string(),
|
||||
time: z
|
||||
.string()
|
||||
.optional()
|
||||
.default(() => new Date().toISOString()),
|
||||
data: z.discriminatedUnion("eventType", [
|
||||
z.object({
|
||||
specversion: z.number().optional().default(1),
|
||||
eventType: z.enum([EventType.CREATE_SECRET, EventType.UPDATE_SECRET, EventType.DELETE_SECRET]),
|
||||
payload: EventSecretPayload
|
||||
}),
|
||||
z.object({
|
||||
specversion: z.number().optional().default(1),
|
||||
eventType: z.enum([EventType.CREATE_SECRETS, EventType.UPDATE_SECRETS, EventType.DELETE_SECRETS]),
|
||||
payload: EventSecretPayload.array()
|
||||
})
|
||||
// Add more event types as needed
|
||||
])
|
||||
});
|
||||
|
||||
export type EventData = z.infer<typeof EventSchema>;
|
||||
|
||||
export const EventRegisterSchema = z.object({
|
||||
event: EventName,
|
||||
conditions: z
|
||||
.object({
|
||||
secretPath: z.string().optional().default("/"),
|
||||
environmentSlug: z.string()
|
||||
})
|
||||
.optional()
|
||||
});
|
||||
|
||||
export type RegisteredEvent = z.infer<typeof EventRegisterSchema>;
|
@@ -1,6 +1,6 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
|
||||
import { ProjectVersion, SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, ScimRequestError } from "@app/lib/errors";
|
||||
|
||||
@@ -65,6 +65,18 @@ const addAcceptedUsersToGroup = async ({
|
||||
const userKeysSet = new Set(keys.map((k) => `${k.projectId}-${k.receiverId}`));
|
||||
|
||||
for await (const projectId of projectIds) {
|
||||
const project = await projectDAL.findById(projectId, tx);
|
||||
if (!project) {
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find project with ID '${projectId}'`
|
||||
});
|
||||
}
|
||||
|
||||
if (project.version !== ProjectVersion.V1 && project.version !== ProjectVersion.V2) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
const usersToAddProjectKeyFor = users.filter((u) => !userKeysSet.has(`${projectId}-${u.userId}`));
|
||||
|
||||
if (usersToAddProjectKeyFor.length) {
|
||||
@@ -86,6 +98,12 @@ const addAcceptedUsersToGroup = async ({
|
||||
});
|
||||
}
|
||||
|
||||
if (!ghostUserLatestKey.sender.publicKey) {
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find project owner's public key in project with ID '${projectId}'`
|
||||
});
|
||||
}
|
||||
|
||||
const bot = await projectBotDAL.findOne({ projectId }, tx);
|
||||
|
||||
if (!bot) {
|
||||
@@ -112,6 +130,12 @@ const addAcceptedUsersToGroup = async ({
|
||||
});
|
||||
|
||||
const projectKeysToAdd = usersToAddProjectKeyFor.map((user) => {
|
||||
if (!user.publicKey) {
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find user's public key in project with ID '${projectId}'`
|
||||
});
|
||||
}
|
||||
|
||||
const { ciphertext: encryptedKey, nonce } = crypto
|
||||
.encryption()
|
||||
.asymmetric()
|
||||
|
@@ -41,7 +41,7 @@ type TGroupServiceFactoryDep = {
|
||||
TUserGroupMembershipDALFactory,
|
||||
"findOne" | "delete" | "filterProjectsByUserMembership" | "transaction" | "insertMany" | "find"
|
||||
>;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findById">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "findLatestProjectKey" | "insertMany">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getOrgPermissionByRole">;
|
||||
|
@@ -65,7 +65,7 @@ export type TAddUsersToGroup = {
|
||||
userGroupMembershipDAL: Pick<TUserGroupMembershipDALFactory, "find" | "transaction" | "insertMany">;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findById">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
tx: Knex;
|
||||
};
|
||||
@@ -78,7 +78,7 @@ export type TAddUsersToGroupByUserIds = {
|
||||
orgDAL: Pick<TOrgDALFactory, "findMembership">;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findById">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
tx?: Knex;
|
||||
};
|
||||
@@ -102,7 +102,7 @@ export type TConvertPendingGroupAdditionsToGroupMemberships = {
|
||||
>;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findById">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
tx?: Knex;
|
||||
};
|
||||
|
@@ -0,0 +1,83 @@
|
||||
/* eslint-disable no-case-declarations */
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { buildFindFilter, ormify } from "@app/lib/knex";
|
||||
|
||||
import { IdentityAuthTemplateMethod } from "./identity-auth-template-enums";
|
||||
|
||||
export type TIdentityAuthTemplateDALFactory = ReturnType<typeof identityAuthTemplateDALFactory>;
|
||||
|
||||
export const identityAuthTemplateDALFactory = (db: TDbClient) => {
|
||||
const identityAuthTemplateOrm = ormify(db, TableName.IdentityAuthTemplate);
|
||||
|
||||
const findByOrgId = async (
|
||||
orgId: string,
|
||||
{ limit, offset, search, tx }: { limit?: number; offset?: number; search?: string; tx?: Knex } = {}
|
||||
) => {
|
||||
let query = (tx || db.replicaNode())(TableName.IdentityAuthTemplate).where({ orgId });
|
||||
let countQuery = (tx || db.replicaNode())(TableName.IdentityAuthTemplate).where({ orgId });
|
||||
|
||||
if (search) {
|
||||
const searchFilter = `%${search.toLowerCase()}%`;
|
||||
query = query.whereRaw("LOWER(name) LIKE ?", [searchFilter]);
|
||||
countQuery = countQuery.whereRaw("LOWER(name) LIKE ?", [searchFilter]);
|
||||
}
|
||||
|
||||
query = query.orderBy("createdAt", "desc");
|
||||
|
||||
if (limit !== undefined) {
|
||||
query = query.limit(limit);
|
||||
}
|
||||
if (offset !== undefined) {
|
||||
query = query.offset(offset);
|
||||
}
|
||||
|
||||
const docs = await query;
|
||||
|
||||
const [{ count }] = (await countQuery.count("* as count")) as [{ count: string | number }];
|
||||
|
||||
return { docs, totalCount: Number(count) };
|
||||
};
|
||||
|
||||
const findByAuthMethod = async (authMethod: string, orgId: string, tx?: Knex) => {
|
||||
const query = (tx || db.replicaNode())(TableName.IdentityAuthTemplate)
|
||||
.where({ authMethod, orgId })
|
||||
.orderBy("createdAt", "desc");
|
||||
const docs = await query;
|
||||
return docs;
|
||||
};
|
||||
|
||||
const findTemplateUsages = async (templateId: string, authMethod: string, tx?: Knex) => {
|
||||
switch (authMethod) {
|
||||
case IdentityAuthTemplateMethod.LDAP:
|
||||
const query = (tx || db.replicaNode())(TableName.IdentityLdapAuth)
|
||||
.join(TableName.Identity, `${TableName.IdentityLdapAuth}.identityId`, `${TableName.Identity}.id`)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter({ templateId }, TableName.IdentityLdapAuth))
|
||||
.select(
|
||||
db.ref("identityId").withSchema(TableName.IdentityLdapAuth),
|
||||
db.ref("name").withSchema(TableName.Identity).as("identityName")
|
||||
);
|
||||
const docs = await query;
|
||||
return docs;
|
||||
default:
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
const findByIdAndOrgId = async (id: string, orgId: string, tx?: Knex) => {
|
||||
const query = (tx || db.replicaNode())(TableName.IdentityAuthTemplate).where({ id, orgId });
|
||||
const doc = await query;
|
||||
return doc?.[0];
|
||||
};
|
||||
|
||||
return {
|
||||
...identityAuthTemplateOrm,
|
||||
findByOrgId,
|
||||
findByAuthMethod,
|
||||
findTemplateUsages,
|
||||
findByIdAndOrgId
|
||||
};
|
||||
};
|
@@ -0,0 +1,22 @@
|
||||
export enum IdentityAuthTemplateMethod {
|
||||
LDAP = "ldap"
|
||||
}
|
||||
|
||||
export const TEMPLATE_VALIDATION_MESSAGES = {
|
||||
TEMPLATE_NAME_REQUIRED: "Template name is required",
|
||||
TEMPLATE_NAME_MAX_LENGTH: "Template name must be at most 64 characters long",
|
||||
AUTH_METHOD_REQUIRED: "Auth method is required",
|
||||
TEMPLATE_ID_REQUIRED: "Template ID is required",
|
||||
LDAP: {
|
||||
URL_REQUIRED: "LDAP URL is required",
|
||||
BIND_DN_REQUIRED: "Bind DN is required",
|
||||
BIND_PASSWORD_REQUIRED: "Bind password is required",
|
||||
SEARCH_BASE_REQUIRED: "Search base is required"
|
||||
}
|
||||
} as const;
|
||||
|
||||
export const TEMPLATE_SUCCESS_MESSAGES = {
|
||||
CREATED: "Template created successfully",
|
||||
UPDATED: "Template updated successfully",
|
||||
DELETED: "Template deleted successfully"
|
||||
} as const;
|
@@ -0,0 +1,454 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { EventType, TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import {
|
||||
OrgPermissionMachineIdentityAuthTemplateActions,
|
||||
OrgPermissionSubjects
|
||||
} from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityLdapAuthDALFactory } from "@app/services/identity-ldap-auth/identity-ldap-auth-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { TIdentityAuthTemplateDALFactory } from "./identity-auth-template-dal";
|
||||
import { IdentityAuthTemplateMethod } from "./identity-auth-template-enums";
|
||||
import {
|
||||
TDeleteIdentityAuthTemplateDTO,
|
||||
TFindTemplateUsagesDTO,
|
||||
TGetIdentityAuthTemplateDTO,
|
||||
TGetTemplatesByAuthMethodDTO,
|
||||
TLdapTemplateFields,
|
||||
TListIdentityAuthTemplatesDTO,
|
||||
TUnlinkTemplateUsageDTO
|
||||
} from "./identity-auth-template-types";
|
||||
|
||||
type TIdentityAuthTemplateServiceFactoryDep = {
|
||||
identityAuthTemplateDAL: TIdentityAuthTemplateDALFactory;
|
||||
identityLdapAuthDAL: TIdentityLdapAuthDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey" | "encryptWithInputKey" | "decryptWithInputKey">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
};
|
||||
|
||||
export type TIdentityAuthTemplateServiceFactory = ReturnType<typeof identityAuthTemplateServiceFactory>;
|
||||
|
||||
export const identityAuthTemplateServiceFactory = ({
|
||||
identityAuthTemplateDAL,
|
||||
identityLdapAuthDAL,
|
||||
permissionService,
|
||||
kmsService,
|
||||
licenseService,
|
||||
auditLogService
|
||||
}: TIdentityAuthTemplateServiceFactoryDep) => {
|
||||
// Plan check
|
||||
const $checkPlan = async (orgId: string) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.machineIdentityAuthTemplates)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to use identity auth template due to plan restriction. Upgrade plan to access machine identity auth templates."
|
||||
});
|
||||
};
|
||||
const createTemplate = async ({
|
||||
name,
|
||||
authMethod,
|
||||
templateFields,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: {
|
||||
name: string;
|
||||
authMethod: string;
|
||||
templateFields: Record<string, unknown>;
|
||||
} & Omit<TOrgPermission, "orgId">) => {
|
||||
await $checkPlan(actorOrgId);
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.CreateTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: actorOrgId
|
||||
});
|
||||
const template = await identityAuthTemplateDAL.create({
|
||||
name,
|
||||
authMethod,
|
||||
templateFields: encryptor({ plainText: Buffer.from(JSON.stringify(templateFields)) }).cipherTextBlob,
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
return { ...template, templateFields };
|
||||
};
|
||||
|
||||
const updateTemplate = async ({
|
||||
templateId,
|
||||
name,
|
||||
templateFields,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: {
|
||||
templateId: string;
|
||||
name?: string;
|
||||
templateFields?: Record<string, unknown>;
|
||||
} & Omit<TOrgPermission, "orgId">) => {
|
||||
await $checkPlan(actorOrgId);
|
||||
const template = await identityAuthTemplateDAL.findByIdAndOrgId(templateId, actorOrgId);
|
||||
if (!template) {
|
||||
throw new NotFoundError({ message: "Template not found" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
template.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.EditTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: template.orgId
|
||||
});
|
||||
|
||||
let finalTemplateFields: Record<string, unknown> = {};
|
||||
|
||||
const updatedTemplate = await identityAuthTemplateDAL.transaction(async (tx) => {
|
||||
const authTemplate = await identityAuthTemplateDAL.updateById(
|
||||
templateId,
|
||||
{
|
||||
name,
|
||||
...(templateFields && {
|
||||
templateFields: encryptor({ plainText: Buffer.from(JSON.stringify(templateFields)) }).cipherTextBlob
|
||||
})
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (templateFields && template.authMethod === IdentityAuthTemplateMethod.LDAP) {
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: template.orgId
|
||||
});
|
||||
|
||||
const currentTemplateFields = JSON.parse(
|
||||
decryptor({ cipherTextBlob: template.templateFields }).toString()
|
||||
) as TLdapTemplateFields;
|
||||
|
||||
const mergedTemplateFields: TLdapTemplateFields = { ...currentTemplateFields, ...templateFields };
|
||||
finalTemplateFields = mergedTemplateFields;
|
||||
const ldapUpdateData: {
|
||||
url?: string;
|
||||
searchBase?: string;
|
||||
encryptedBindDN?: Buffer;
|
||||
encryptedBindPass?: Buffer;
|
||||
encryptedLdapCaCertificate?: Buffer;
|
||||
} = {};
|
||||
|
||||
if ("url" in templateFields) {
|
||||
ldapUpdateData.url = mergedTemplateFields.url;
|
||||
}
|
||||
if ("searchBase" in templateFields) {
|
||||
ldapUpdateData.searchBase = mergedTemplateFields.searchBase;
|
||||
}
|
||||
if ("bindDN" in templateFields) {
|
||||
ldapUpdateData.encryptedBindDN = encryptor({
|
||||
plainText: Buffer.from(mergedTemplateFields.bindDN)
|
||||
}).cipherTextBlob;
|
||||
}
|
||||
if ("bindPass" in templateFields) {
|
||||
ldapUpdateData.encryptedBindPass = encryptor({
|
||||
plainText: Buffer.from(mergedTemplateFields.bindPass)
|
||||
}).cipherTextBlob;
|
||||
}
|
||||
if ("ldapCaCertificate" in templateFields) {
|
||||
ldapUpdateData.encryptedLdapCaCertificate = encryptor({
|
||||
plainText: Buffer.from(mergedTemplateFields.ldapCaCertificate || "")
|
||||
}).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (Object.keys(ldapUpdateData).length > 0) {
|
||||
const updatedLdapAuths = await identityLdapAuthDAL.update({ templateId }, ldapUpdateData, tx);
|
||||
await Promise.all(
|
||||
updatedLdapAuths.map(async (updatedLdapAuth) => {
|
||||
await auditLogService.createAuditLog({
|
||||
actor: {
|
||||
type: ActorType.PLATFORM,
|
||||
metadata: {}
|
||||
},
|
||||
orgId: actorOrgId,
|
||||
event: {
|
||||
type: EventType.UPDATE_IDENTITY_LDAP_AUTH,
|
||||
metadata: {
|
||||
identityId: updatedLdapAuth.identityId,
|
||||
templateId: template.id
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
return authTemplate;
|
||||
});
|
||||
|
||||
return { ...updatedTemplate, templateFields: finalTemplateFields };
|
||||
};
|
||||
|
||||
const deleteTemplate = async ({
|
||||
templateId,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TDeleteIdentityAuthTemplateDTO) => {
|
||||
await $checkPlan(actorOrgId);
|
||||
const template = await identityAuthTemplateDAL.findByIdAndOrgId(templateId, actorOrgId);
|
||||
if (!template) {
|
||||
throw new NotFoundError({ message: "Template not found" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
template.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.DeleteTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
|
||||
const deletedTemplate = await identityAuthTemplateDAL.transaction(async (tx) => {
|
||||
// Remove template reference from identityLdapAuth records
|
||||
const updatedLdapAuths = await identityLdapAuthDAL.update({ templateId }, { templateId: null }, tx);
|
||||
await Promise.all(
|
||||
updatedLdapAuths.map(async (updatedLdapAuth) => {
|
||||
await auditLogService.createAuditLog({
|
||||
actor: {
|
||||
type: ActorType.PLATFORM,
|
||||
metadata: {}
|
||||
},
|
||||
orgId: actorOrgId,
|
||||
event: {
|
||||
type: EventType.UPDATE_IDENTITY_LDAP_AUTH,
|
||||
metadata: {
|
||||
identityId: updatedLdapAuth.identityId,
|
||||
templateId: template.id
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// Delete the template
|
||||
const [deletedTpl] = await identityAuthTemplateDAL.delete({ id: templateId }, tx);
|
||||
return deletedTpl;
|
||||
});
|
||||
|
||||
return deletedTemplate;
|
||||
};
|
||||
|
||||
const getTemplate = async ({
|
||||
templateId,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TGetIdentityAuthTemplateDTO) => {
|
||||
await $checkPlan(actorOrgId);
|
||||
const template = await identityAuthTemplateDAL.findByIdAndOrgId(templateId, actorOrgId);
|
||||
if (!template) {
|
||||
throw new NotFoundError({ message: "Template not found" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
template.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.ListTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: template.orgId
|
||||
});
|
||||
const decryptedTemplateFields = decryptor({ cipherTextBlob: template.templateFields }).toString();
|
||||
return {
|
||||
...template,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
templateFields: JSON.parse(decryptedTemplateFields)
|
||||
};
|
||||
};
|
||||
|
||||
const listTemplates = async ({
|
||||
limit,
|
||||
offset,
|
||||
search,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TListIdentityAuthTemplatesDTO) => {
|
||||
await $checkPlan(actorOrgId);
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.ListTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
|
||||
const { docs, totalCount } = await identityAuthTemplateDAL.findByOrgId(actorOrgId, { limit, offset, search });
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: actorOrgId
|
||||
});
|
||||
return {
|
||||
totalCount,
|
||||
templates: docs.map((doc) => ({
|
||||
...doc,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
templateFields: JSON.parse(decryptor({ cipherTextBlob: doc.templateFields }).toString())
|
||||
}))
|
||||
};
|
||||
};
|
||||
|
||||
const getTemplatesByAuthMethod = async ({
|
||||
authMethod,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TGetTemplatesByAuthMethodDTO) => {
|
||||
await $checkPlan(actorOrgId);
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.AttachTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
|
||||
const docs = await identityAuthTemplateDAL.findByAuthMethod(authMethod, actorOrgId);
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: actorOrgId
|
||||
});
|
||||
return docs.map((doc) => ({
|
||||
...doc,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
templateFields: JSON.parse(decryptor({ cipherTextBlob: doc.templateFields }).toString())
|
||||
}));
|
||||
};
|
||||
|
||||
const findTemplateUsages = async ({
|
||||
templateId,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TFindTemplateUsagesDTO) => {
|
||||
await $checkPlan(actorOrgId);
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.ListTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
|
||||
const template = await identityAuthTemplateDAL.findByIdAndOrgId(templateId, actorOrgId);
|
||||
if (!template) {
|
||||
throw new NotFoundError({ message: "Template not found" });
|
||||
}
|
||||
|
||||
const docs = await identityAuthTemplateDAL.findTemplateUsages(templateId, template.authMethod);
|
||||
return docs;
|
||||
};
|
||||
|
||||
const unlinkTemplateUsage = async ({
|
||||
templateId,
|
||||
identityIds,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TUnlinkTemplateUsageDTO) => {
|
||||
await $checkPlan(actorOrgId);
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.UnlinkTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
|
||||
const template = await identityAuthTemplateDAL.findByIdAndOrgId(templateId, actorOrgId);
|
||||
if (!template) {
|
||||
throw new NotFoundError({ message: "Template not found" });
|
||||
}
|
||||
|
||||
switch (template.authMethod) {
|
||||
case IdentityAuthTemplateMethod.LDAP:
|
||||
await identityLdapAuthDAL.update({ $in: { identityId: identityIds }, templateId }, { templateId: null });
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
createTemplate,
|
||||
updateTemplate,
|
||||
deleteTemplate,
|
||||
getTemplate,
|
||||
listTemplates,
|
||||
getTemplatesByAuthMethod,
|
||||
findTemplateUsages,
|
||||
unlinkTemplateUsage
|
||||
};
|
||||
};
|
@@ -0,0 +1,61 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
import { IdentityAuthTemplateMethod } from "./identity-auth-template-enums";
|
||||
|
||||
// Method-specific template field types
|
||||
export type TLdapTemplateFields = {
|
||||
url: string;
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
searchBase: string;
|
||||
ldapCaCertificate?: string;
|
||||
};
|
||||
|
||||
// Union type for all template field types
|
||||
export type TTemplateFieldsByMethod = {
|
||||
[IdentityAuthTemplateMethod.LDAP]: TLdapTemplateFields;
|
||||
};
|
||||
|
||||
// Generic base types that use conditional types for type safety
|
||||
export type TCreateIdentityAuthTemplateDTO = {
|
||||
name: string;
|
||||
authMethod: IdentityAuthTemplateMethod;
|
||||
templateFields: TTemplateFieldsByMethod[IdentityAuthTemplateMethod];
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateIdentityAuthTemplateDTO = {
|
||||
templateId: string;
|
||||
name?: string;
|
||||
templateFields?: Partial<TTemplateFieldsByMethod[IdentityAuthTemplateMethod]>;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteIdentityAuthTemplateDTO = {
|
||||
templateId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetIdentityAuthTemplateDTO = {
|
||||
templateId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TListIdentityAuthTemplatesDTO = {
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
search?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetTemplatesByAuthMethodDTO = {
|
||||
authMethod: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TFindTemplateUsagesDTO = {
|
||||
templateId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUnlinkTemplateUsageDTO = {
|
||||
templateId: string;
|
||||
identityIds: string[];
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
// Specific LDAP types for convenience
|
||||
export type TCreateLdapTemplateDTO = TCreateIdentityAuthTemplateDTO;
|
||||
export type TUpdateLdapTemplateDTO = TUpdateIdentityAuthTemplateDTO;
|
6
backend/src/ee/services/identity-auth-template/index.ts
Normal file
6
backend/src/ee/services/identity-auth-template/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export type { TIdentityAuthTemplateDALFactory } from "./identity-auth-template-dal";
|
||||
export { identityAuthTemplateDALFactory } from "./identity-auth-template-dal";
|
||||
export * from "./identity-auth-template-enums";
|
||||
export type { TIdentityAuthTemplateServiceFactory } from "./identity-auth-template-service";
|
||||
export { identityAuthTemplateServiceFactory } from "./identity-auth-template-service";
|
||||
export type * from "./identity-auth-template-types";
|
@@ -1,4 +1,5 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { OrgMembershipStatus, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
@@ -45,7 +46,7 @@ import { searchGroups, testLDAPConfig } from "./ldap-fns";
|
||||
import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal";
|
||||
|
||||
type TLdapConfigServiceFactoryDep = {
|
||||
ldapConfigDAL: Pick<TLdapConfigDALFactory, "create" | "update" | "findOne">;
|
||||
ldapConfigDAL: Pick<TLdapConfigDALFactory, "create" | "update" | "findOne" | "transaction">;
|
||||
ldapGroupMapDAL: Pick<TLdapGroupMapDALFactory, "find" | "create" | "delete" | "findLdapGroupMapsByLdapConfigId">;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||
orgDAL: Pick<
|
||||
@@ -55,7 +56,7 @@ type TLdapConfigServiceFactoryDep = {
|
||||
groupDAL: Pick<TGroupDALFactory, "find" | "findOne">;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findById">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
userGroupMembershipDAL: Pick<
|
||||
TUserGroupMembershipDALFactory,
|
||||
@@ -131,6 +132,19 @@ export const ldapConfigServiceFactory = ({
|
||||
orgId
|
||||
});
|
||||
|
||||
const isConnected = await testLDAPConfig({
|
||||
bindDN,
|
||||
bindPass,
|
||||
caCert,
|
||||
url
|
||||
});
|
||||
|
||||
if (!isConnected) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to establish connection to LDAP directory. Please verify that your credentials are correct."
|
||||
});
|
||||
}
|
||||
|
||||
const ldapConfig = await ldapConfigDAL.create({
|
||||
orgId,
|
||||
isActive,
|
||||
@@ -148,6 +162,50 @@ export const ldapConfigServiceFactory = ({
|
||||
return ldapConfig;
|
||||
};
|
||||
|
||||
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean; id?: string }, tx?: Knex) => {
|
||||
const ldapConfig = await ldapConfigDAL.findOne(filter, tx);
|
||||
if (!ldapConfig) {
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find organization LDAP data in organization with ID '${filter.orgId}'`
|
||||
});
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: ldapConfig.orgId
|
||||
});
|
||||
|
||||
let bindDN = "";
|
||||
if (ldapConfig.encryptedLdapBindDN) {
|
||||
bindDN = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindDN }).toString();
|
||||
}
|
||||
|
||||
let bindPass = "";
|
||||
if (ldapConfig.encryptedLdapBindPass) {
|
||||
bindPass = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindPass }).toString();
|
||||
}
|
||||
|
||||
let caCert = "";
|
||||
if (ldapConfig.encryptedLdapCaCertificate) {
|
||||
caCert = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapCaCertificate }).toString();
|
||||
}
|
||||
|
||||
return {
|
||||
id: ldapConfig.id,
|
||||
organization: ldapConfig.orgId,
|
||||
isActive: ldapConfig.isActive,
|
||||
url: ldapConfig.url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: ldapConfig.searchFilter,
|
||||
groupSearchBase: ldapConfig.groupSearchBase,
|
||||
groupSearchFilter: ldapConfig.groupSearchFilter,
|
||||
caCert
|
||||
};
|
||||
};
|
||||
|
||||
const updateLdapCfg = async ({
|
||||
actor,
|
||||
actorId,
|
||||
@@ -202,53 +260,25 @@ export const ldapConfigServiceFactory = ({
|
||||
updateQuery.encryptedLdapCaCertificate = encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
const [ldapConfig] = await ldapConfigDAL.update({ orgId }, updateQuery);
|
||||
const config = await ldapConfigDAL.transaction(async (tx) => {
|
||||
const [updatedLdapCfg] = await ldapConfigDAL.update({ orgId }, updateQuery, tx);
|
||||
const decryptedLdapCfg = await getLdapCfg({ orgId }, tx);
|
||||
|
||||
return ldapConfig;
|
||||
};
|
||||
const isSoftDeletion = !decryptedLdapCfg.url && !decryptedLdapCfg.bindDN && !decryptedLdapCfg.bindPass;
|
||||
if (!isSoftDeletion) {
|
||||
const isConnected = await testLDAPConfig(decryptedLdapCfg);
|
||||
if (!isConnected) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to establish connection to LDAP directory. Please verify that your credentials are correct."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean; id?: string }) => {
|
||||
const ldapConfig = await ldapConfigDAL.findOne(filter);
|
||||
if (!ldapConfig) {
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find organization LDAP data in organization with ID '${filter.orgId}'`
|
||||
});
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: ldapConfig.orgId
|
||||
return updatedLdapCfg;
|
||||
});
|
||||
|
||||
let bindDN = "";
|
||||
if (ldapConfig.encryptedLdapBindDN) {
|
||||
bindDN = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindDN }).toString();
|
||||
}
|
||||
|
||||
let bindPass = "";
|
||||
if (ldapConfig.encryptedLdapBindPass) {
|
||||
bindPass = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindPass }).toString();
|
||||
}
|
||||
|
||||
let caCert = "";
|
||||
if (ldapConfig.encryptedLdapCaCertificate) {
|
||||
caCert = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapCaCertificate }).toString();
|
||||
}
|
||||
|
||||
return {
|
||||
id: ldapConfig.id,
|
||||
organization: ldapConfig.orgId,
|
||||
isActive: ldapConfig.isActive,
|
||||
url: ldapConfig.url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: ldapConfig.searchFilter,
|
||||
groupSearchBase: ldapConfig.groupSearchBase,
|
||||
groupSearchFilter: ldapConfig.groupSearchFilter,
|
||||
caCert
|
||||
};
|
||||
return config;
|
||||
};
|
||||
|
||||
const getLdapCfgWithPermissionCheck = async ({
|
||||
@@ -527,14 +557,13 @@ export const ldapConfigServiceFactory = ({
|
||||
});
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
hasExchangedPrivateKey: true,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
@@ -694,7 +723,17 @@ export const ldapConfigServiceFactory = ({
|
||||
return deletedGroupMap;
|
||||
};
|
||||
|
||||
const testLDAPConnection = async ({ actor, actorId, orgId, actorAuthMethod, actorOrgId }: TTestLdapConnectionDTO) => {
|
||||
const testLDAPConnection = async ({
|
||||
actor,
|
||||
actorId,
|
||||
orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
bindDN,
|
||||
bindPass,
|
||||
caCert,
|
||||
url
|
||||
}: TTestLdapConnectionDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
|
||||
|
||||
@@ -704,11 +743,12 @@ export const ldapConfigServiceFactory = ({
|
||||
message: "Failed to test LDAP connection due to plan restriction. Upgrade plan to test the LDAP connection."
|
||||
});
|
||||
|
||||
const ldapConfig = await getLdapCfg({
|
||||
orgId
|
||||
return testLDAPConfig({
|
||||
bindDN,
|
||||
bindPass,
|
||||
caCert,
|
||||
url
|
||||
});
|
||||
|
||||
return testLDAPConfig(ldapConfig);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -83,6 +83,4 @@ export type TDeleteLdapGroupMapDTO = {
|
||||
ldapGroupMapId: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
export type TTestLdapConnectionDTO = {
|
||||
ldapConfigId: string;
|
||||
} & TOrgPermission;
|
||||
export type TTestLdapConnectionDTO = TOrgPermission & TTestLDAPConfigDTO;
|
||||
|
@@ -31,7 +31,8 @@ export const getDefaultOnPremFeatures = () => {
|
||||
caCrl: false,
|
||||
sshHostGroups: false,
|
||||
enterpriseSecretSyncs: false,
|
||||
enterpriseAppConnections: false
|
||||
enterpriseAppConnections: false,
|
||||
machineIdentityAuthTemplates: false
|
||||
};
|
||||
};
|
||||
|
||||
|
@@ -59,7 +59,9 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
secretScanning: false,
|
||||
enterpriseSecretSyncs: false,
|
||||
enterpriseAppConnections: false,
|
||||
fips: false
|
||||
fips: false,
|
||||
eventSubscriptions: false,
|
||||
machineIdentityAuthTemplates: false
|
||||
});
|
||||
|
||||
export const setupLicenseRequestWithStore = (
|
||||
|
@@ -5,13 +5,14 @@
|
||||
// TODO(akhilmhdh): With tony find out the api structure and fill it here
|
||||
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { AxiosError } from "axios";
|
||||
import { CronJob } from "cron";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { verifyOfflineLicense } from "@app/lib/crypto";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
@@ -603,10 +604,22 @@ export const licenseServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const { data } = await licenseServerCloudApi.request.delete(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods/${pmtMethodId}`
|
||||
);
|
||||
return data;
|
||||
try {
|
||||
const { data } = await licenseServerCloudApi.request.delete(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods/${pmtMethodId}`
|
||||
);
|
||||
return data;
|
||||
} catch (error) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
message: `Failed to remove payment method: ${error.response?.data?.message}`
|
||||
});
|
||||
}
|
||||
throw new BadRequestError({
|
||||
message: "Unable to remove payment method"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const getOrgTaxIds = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgTaxIdDTO) => {
|
||||
|
@@ -75,7 +75,9 @@ export type TFeatureSet = {
|
||||
secretScanning: false;
|
||||
enterpriseSecretSyncs: false;
|
||||
enterpriseAppConnections: false;
|
||||
machineIdentityAuthTemplates: false;
|
||||
fips: false;
|
||||
eventSubscriptions: false;
|
||||
};
|
||||
|
||||
export type TOrgPlansTableDTO = {
|
||||
|
@@ -79,7 +79,7 @@ type TOidcConfigServiceFactoryDep = {
|
||||
>;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findById">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
@@ -404,7 +404,6 @@ export const oidcConfigServiceFactory = ({
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
@@ -417,7 +416,7 @@ export const oidcConfigServiceFactory = ({
|
||||
organizationName: organization.name,
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
hasExchangedPrivateKey: true,
|
||||
authMethod: AuthMethod.OIDC,
|
||||
authType: UserAliasType.OIDC,
|
||||
isUserCompleted,
|
||||
|
@@ -161,7 +161,8 @@ const buildAdminPermissionRules = () => {
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
ProjectPermissionSecretActions.Delete
|
||||
ProjectPermissionSecretActions.Delete,
|
||||
ProjectPermissionSecretActions.Subscribe
|
||||
],
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
@@ -265,7 +266,8 @@ const buildMemberPermissionRules = () => {
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
ProjectPermissionSecretActions.Delete
|
||||
ProjectPermissionSecretActions.Delete,
|
||||
ProjectPermissionSecretActions.Subscribe
|
||||
],
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
|
@@ -28,6 +28,15 @@ export enum OrgPermissionKmipActions {
|
||||
Setup = "setup"
|
||||
}
|
||||
|
||||
export enum OrgPermissionMachineIdentityAuthTemplateActions {
|
||||
ListTemplates = "list-templates",
|
||||
EditTemplates = "edit-templates",
|
||||
CreateTemplates = "create-templates",
|
||||
DeleteTemplates = "delete-templates",
|
||||
UnlinkTemplates = "unlink-templates",
|
||||
AttachTemplates = "attach-templates"
|
||||
}
|
||||
|
||||
export enum OrgPermissionAdminConsoleAction {
|
||||
AccessAllProjects = "access-all-projects"
|
||||
}
|
||||
@@ -88,6 +97,7 @@ export enum OrgPermissionSubjects {
|
||||
Identity = "identity",
|
||||
Kms = "kms",
|
||||
AdminConsole = "organization-admin-console",
|
||||
MachineIdentityAuthTemplate = "machine-identity-auth-template",
|
||||
AuditLogs = "audit-logs",
|
||||
ProjectTemplates = "project-templates",
|
||||
AppConnections = "app-connections",
|
||||
@@ -126,6 +136,7 @@ export type OrgPermissionSet =
|
||||
)
|
||||
]
|
||||
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole]
|
||||
| [OrgPermissionMachineIdentityAuthTemplateActions, OrgPermissionSubjects.MachineIdentityAuthTemplate]
|
||||
| [OrgPermissionKmipActions, OrgPermissionSubjects.Kmip]
|
||||
| [OrgPermissionSecretShareAction, OrgPermissionSubjects.SecretShare];
|
||||
|
||||
@@ -237,6 +248,14 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z
|
||||
.literal(OrgPermissionSubjects.MachineIdentityAuthTemplate)
|
||||
.describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionMachineIdentityAuthTemplateActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.Gateway).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionGatewayActions).describe(
|
||||
@@ -350,6 +369,25 @@ const buildAdminPermission = () => {
|
||||
// the proxy assignment is temporary in order to prevent "more privilege" error during role assignment to MI
|
||||
can(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
can(OrgPermissionMachineIdentityAuthTemplateActions.ListTemplates, OrgPermissionSubjects.MachineIdentityAuthTemplate);
|
||||
can(OrgPermissionMachineIdentityAuthTemplateActions.EditTemplates, OrgPermissionSubjects.MachineIdentityAuthTemplate);
|
||||
can(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.CreateTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
can(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.DeleteTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
can(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.UnlinkTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
can(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.AttachTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
|
||||
can(OrgPermissionSecretShareAction.ManageSettings, OrgPermissionSubjects.SecretShare);
|
||||
|
||||
return rules;
|
||||
@@ -385,6 +423,16 @@ const buildMemberPermission = () => {
|
||||
can(OrgPermissionGatewayActions.CreateGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.AttachGateways, OrgPermissionSubjects.Gateway);
|
||||
|
||||
can(OrgPermissionMachineIdentityAuthTemplateActions.ListTemplates, OrgPermissionSubjects.MachineIdentityAuthTemplate);
|
||||
can(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.UnlinkTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
can(
|
||||
OrgPermissionMachineIdentityAuthTemplateActions.AttachTemplates,
|
||||
OrgPermissionSubjects.MachineIdentityAuthTemplate
|
||||
);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
|
@@ -36,7 +36,8 @@ export enum ProjectPermissionSecretActions {
|
||||
ReadValue = "readValue",
|
||||
Create = "create",
|
||||
Edit = "edit",
|
||||
Delete = "delete"
|
||||
Delete = "delete",
|
||||
Subscribe = "subscribe"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionCmekActions {
|
||||
@@ -204,6 +205,7 @@ export type SecretSubjectFields = {
|
||||
secretPath: string;
|
||||
secretName?: string;
|
||||
secretTags?: string[];
|
||||
eventType?: string;
|
||||
};
|
||||
|
||||
export type SecretFolderSubjectFields = {
|
||||
@@ -483,7 +485,17 @@ const SecretConditionV2Schema = z
|
||||
.object({
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
|
||||
})
|
||||
.partial()
|
||||
.partial(),
|
||||
eventType: z.union([
|
||||
z.string(),
|
||||
z
|
||||
.object({
|
||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
|
||||
})
|
||||
.partial()
|
||||
])
|
||||
})
|
||||
.partial();
|
||||
|
||||
|
@@ -411,7 +411,6 @@ export const samlConfigServiceFactory = ({
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified);
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
@@ -424,7 +423,7 @@ export const samlConfigServiceFactory = ({
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
authMethod: authProvider,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
hasExchangedPrivateKey: true,
|
||||
authType: UserAliasType.SAML,
|
||||
isUserCompleted,
|
||||
...(relayState
|
||||
|
@@ -59,7 +59,7 @@ type TScimServiceFactoryDep = {
|
||||
TOrgMembershipDALFactory,
|
||||
"find" | "findOne" | "create" | "updateById" | "findById" | "update"
|
||||
>;
|
||||
projectDAL: Pick<TProjectDALFactory, "find" | "findProjectGhostUser">;
|
||||
projectDAL: Pick<TProjectDALFactory, "find" | "findProjectGhostUser" | "findById">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find" | "delete" | "findProjectMembershipsByUserId">;
|
||||
groupDAL: Pick<
|
||||
TGroupDALFactory,
|
||||
|
@@ -65,7 +65,10 @@ import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission/permission-fns";
|
||||
import {
|
||||
hasSecretReadValueOrDescribePermission,
|
||||
throwIfMissingSecretReadValueOrDescribePermission
|
||||
} from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
|
||||
@@ -277,13 +280,19 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
) {
|
||||
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
|
||||
}
|
||||
|
||||
const hasSecretReadAccess = permission.can(
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
const getHasSecretReadAccess = (environment: string, tags: { slug: string }[], secretPath?: string) => {
|
||||
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
|
||||
environment,
|
||||
secretPath: secretPath || "/",
|
||||
secretTags: tags.map((i) => i.slug)
|
||||
});
|
||||
return canRead;
|
||||
};
|
||||
|
||||
let secrets;
|
||||
const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [
|
||||
secretApprovalRequest.folderId
|
||||
]);
|
||||
if (shouldUseSecretV2Bridge) {
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
@@ -299,8 +308,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
version: el.version,
|
||||
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
|
||||
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
|
||||
secretValueHidden: !hasSecretReadAccess,
|
||||
secretValue: !hasSecretReadAccess
|
||||
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secret && el.secret.isRotatedSecret
|
||||
? undefined
|
||||
@@ -315,8 +324,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretKey: el.secret.key,
|
||||
id: el.secret.id,
|
||||
version: el.secret.version,
|
||||
secretValueHidden: !hasSecretReadAccess,
|
||||
secretValue: !hasSecretReadAccess
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secret.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
|
||||
@@ -331,8 +344,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretKey: el.secretVersion.key,
|
||||
id: el.secretVersion.id,
|
||||
version: el.secretVersion.version,
|
||||
secretValueHidden: !hasSecretReadAccess,
|
||||
secretValue: !hasSecretReadAccess
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secretVersion.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
|
||||
@@ -350,7 +367,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
|
||||
secrets = encryptedSecrets.map((el) => ({
|
||||
...el,
|
||||
secretValueHidden: !hasSecretReadAccess,
|
||||
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
|
||||
...decryptSecretWithBot(el, botKey),
|
||||
secret: el.secret
|
||||
? {
|
||||
@@ -370,9 +387,6 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
: undefined
|
||||
}));
|
||||
}
|
||||
const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [
|
||||
secretApprovalRequest.folderId
|
||||
]);
|
||||
|
||||
return { ...secretApprovalRequest, secretPath: secretPath?.[0]?.path || "/", commits: secrets };
|
||||
};
|
||||
|
@@ -21,6 +21,8 @@ const GRAPH_API_BASE = "https://graph.microsoft.com/v1.0";
|
||||
|
||||
type AzureErrorResponse = { error: { message: string } };
|
||||
|
||||
const EXPIRY_PADDING_IN_DAYS = 3;
|
||||
|
||||
const sleep = async () =>
|
||||
new Promise((resolve) => {
|
||||
setTimeout(resolve, 1000);
|
||||
@@ -33,7 +35,8 @@ export const azureClientSecretRotationFactory: TRotationFactory<
|
||||
const {
|
||||
connection,
|
||||
parameters: { objectId, clientId: clientIdParam },
|
||||
secretsMapping
|
||||
secretsMapping,
|
||||
rotationInterval
|
||||
} = secretRotation;
|
||||
|
||||
/**
|
||||
@@ -50,7 +53,7 @@ export const azureClientSecretRotationFactory: TRotationFactory<
|
||||
)}-${now.getFullYear()}`;
|
||||
|
||||
const endDateTime = new Date();
|
||||
endDateTime.setFullYear(now.getFullYear() + 5);
|
||||
endDateTime.setDate(now.getDate() + rotationInterval * 2 + EXPIRY_PADDING_IN_DAYS); // give 72 hour buffer
|
||||
|
||||
try {
|
||||
const { data } = await request.post<AzureAddPasswordResponse>(
|
||||
@@ -195,6 +198,12 @@ export const azureClientSecretRotationFactory: TRotationFactory<
|
||||
callback
|
||||
) => {
|
||||
const credentials = await $rotateClientSecret();
|
||||
|
||||
// 2.5 years as expiry is set to x2 interval for the inactive period of credential
|
||||
if (rotationInterval > Math.floor(365 * 2.5) - EXPIRY_PADDING_IN_DAYS) {
|
||||
throw new BadRequestError({ message: "Azure does not support token duration over 5 years" });
|
||||
}
|
||||
|
||||
return callback(credentials);
|
||||
};
|
||||
|
||||
|
@@ -51,6 +51,7 @@ const baseSecretRotationV2Query = ({
|
||||
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
|
||||
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
|
||||
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
|
||||
db.ref("gatewayId").withSchema(TableName.AppConnection).as("connectionGatewayId"),
|
||||
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
|
||||
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
|
||||
db
|
||||
@@ -104,6 +105,7 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
|
||||
connectionCreatedAt,
|
||||
connectionUpdatedAt,
|
||||
connectionVersion,
|
||||
connectionGatewayId,
|
||||
connectionIsPlatformManagedCredentials,
|
||||
...el
|
||||
} = secretRotation;
|
||||
@@ -123,6 +125,7 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
|
||||
createdAt: connectionCreatedAt,
|
||||
updatedAt: connectionUpdatedAt,
|
||||
version: connectionVersion,
|
||||
gatewayId: connectionGatewayId,
|
||||
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
|
||||
},
|
||||
folder: {
|
||||
|
@@ -18,7 +18,8 @@ import {
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryPostInitialization,
|
||||
TSecretScanningFactoryTeardown
|
||||
TSecretScanningFactoryTeardown,
|
||||
TSecretScanningFactoryValidateConfigUpdate
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
@@ -302,6 +303,13 @@ export const BitbucketSecretScanningFactory = () => {
|
||||
);
|
||||
};
|
||||
|
||||
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
|
||||
TBitbucketDataSourceInput["config"],
|
||||
TBitbucketDataSourceWithConnection
|
||||
> = async () => {
|
||||
// no validation required
|
||||
};
|
||||
|
||||
return {
|
||||
initialize,
|
||||
postInitialization,
|
||||
@@ -309,6 +317,7 @@ export const BitbucketSecretScanningFactory = () => {
|
||||
getFullScanPath,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload,
|
||||
teardown
|
||||
teardown,
|
||||
validateConfigUpdate
|
||||
};
|
||||
};
|
||||
|
@@ -20,7 +20,8 @@ import {
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryPostInitialization,
|
||||
TSecretScanningFactoryTeardown
|
||||
TSecretScanningFactoryTeardown,
|
||||
TSecretScanningFactoryValidateConfigUpdate
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
@@ -64,7 +65,14 @@ export const GitHubSecretScanningFactory = () => {
|
||||
};
|
||||
|
||||
const teardown: TSecretScanningFactoryTeardown<TGitHubDataSourceWithConnection> = async () => {
|
||||
// no termination required
|
||||
// no teardown required
|
||||
};
|
||||
|
||||
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
|
||||
TGitHubDataSourceInput["config"],
|
||||
TGitHubDataSourceWithConnection
|
||||
> = async () => {
|
||||
// no validation required
|
||||
};
|
||||
|
||||
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
|
||||
@@ -238,6 +246,7 @@ export const GitHubSecretScanningFactory = () => {
|
||||
getFullScanPath,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload,
|
||||
teardown
|
||||
teardown,
|
||||
validateConfigUpdate
|
||||
};
|
||||
};
|
||||
|
@@ -0,0 +1,9 @@
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
|
||||
name: "GitLab",
|
||||
type: SecretScanningDataSource.GitLab,
|
||||
connection: AppConnection.GitLab
|
||||
};
|
@@ -0,0 +1,8 @@
|
||||
export enum GitLabDataSourceScope {
|
||||
Project = "project",
|
||||
Group = "group"
|
||||
}
|
||||
|
||||
export enum GitLabWebHookEvent {
|
||||
Push = "Push Hook"
|
||||
}
|
@@ -0,0 +1,409 @@
|
||||
import { Camelize, GitbeakerRequestError, GroupHookSchema, ProjectHookSchema } from "@gitbeaker/rest";
|
||||
import { join } from "path";
|
||||
|
||||
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import {
|
||||
SecretScanningFindingSeverity,
|
||||
SecretScanningResource
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
cloneRepository,
|
||||
convertPatchLineToFileLineNumber,
|
||||
replaceNonChangesWithNewlines
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import {
|
||||
TSecretScanningFactoryGetDiffScanFindingsPayload,
|
||||
TSecretScanningFactoryGetDiffScanResourcePayload,
|
||||
TSecretScanningFactoryGetFullScanPath,
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryParams,
|
||||
TSecretScanningFactoryPostInitialization,
|
||||
TSecretScanningFactoryTeardown,
|
||||
TSecretScanningFactoryValidateConfigUpdate
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { GitLabProjectRegex } from "@app/lib/regex";
|
||||
import {
|
||||
getGitLabConnectionClient,
|
||||
getGitLabInstanceUrl,
|
||||
TGitLabConnection
|
||||
} from "@app/services/app-connection/gitlab";
|
||||
|
||||
import { GitLabDataSourceScope } from "./gitlab-secret-scanning-enums";
|
||||
import {
|
||||
TGitLabDataSourceCredentials,
|
||||
TGitLabDataSourceInput,
|
||||
TGitLabDataSourceWithConnection,
|
||||
TQueueGitLabResourceDiffScan
|
||||
} from "./gitlab-secret-scanning-types";
|
||||
|
||||
const getMainDomain = (instanceUrl: string) => {
|
||||
const url = new URL(instanceUrl);
|
||||
const { hostname } = url;
|
||||
const parts = hostname.split(".");
|
||||
|
||||
if (parts.length >= 2) {
|
||||
return parts.slice(-2).join(".");
|
||||
}
|
||||
|
||||
return hostname;
|
||||
};
|
||||
|
||||
export const GitLabSecretScanningFactory = ({ appConnectionDAL, kmsService }: TSecretScanningFactoryParams) => {
|
||||
const initialize: TSecretScanningFactoryInitialize<
|
||||
TGitLabDataSourceInput,
|
||||
TGitLabConnection,
|
||||
TGitLabDataSourceCredentials
|
||||
> = async ({ payload: { config, name }, connection }, callback) => {
|
||||
const token = alphaNumericNanoId(64);
|
||||
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (config.scope === GitLabDataSourceScope.Project) {
|
||||
const { projectId } = config;
|
||||
const project = await client.Projects.show(projectId);
|
||||
|
||||
if (!project) {
|
||||
throw new BadRequestError({ message: `Could not find project with ID ${projectId}.` });
|
||||
}
|
||||
|
||||
let hook: Camelize<ProjectHookSchema>;
|
||||
try {
|
||||
hook = await client.ProjectHooks.add(projectId, `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`, {
|
||||
token,
|
||||
pushEvents: true,
|
||||
enableSslVerification: true,
|
||||
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||
name: `Infisical Secret Scanning - ${name}`
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof GitbeakerRequestError) {
|
||||
throw new BadRequestError({ message: `${error.message}: ${error.cause?.description ?? "Unknown Error"}` });
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
try {
|
||||
return await callback({
|
||||
credentials: {
|
||||
token,
|
||||
hookId: hook.id
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.ProjectHooks.remove(projectId, hook.id);
|
||||
} catch {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// group scope
|
||||
const { groupId } = config;
|
||||
|
||||
const group = await client.Groups.show(groupId);
|
||||
|
||||
if (!group) {
|
||||
throw new BadRequestError({ message: `Could not find group with ID ${groupId}.` });
|
||||
}
|
||||
|
||||
let hook: Camelize<GroupHookSchema>;
|
||||
try {
|
||||
hook = await client.GroupHooks.add(groupId, `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`, {
|
||||
token,
|
||||
pushEvents: true,
|
||||
enableSslVerification: true,
|
||||
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||
name: `Infisical Secret Scanning - ${name}`
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof GitbeakerRequestError) {
|
||||
throw new BadRequestError({ message: `${error.message}: ${error.cause?.description ?? "Unknown Error"}` });
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
try {
|
||||
return await callback({
|
||||
credentials: {
|
||||
token,
|
||||
hookId: hook.id
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.GroupHooks.remove(groupId, hook.id);
|
||||
} catch {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const postInitialization: TSecretScanningFactoryPostInitialization<
|
||||
TGitLabDataSourceInput,
|
||||
TGitLabConnection,
|
||||
TGitLabDataSourceCredentials
|
||||
> = async ({ connection, dataSourceId, credentials, payload: { config } }) => {
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
const appCfg = getConfig();
|
||||
|
||||
const hookUrl = `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`;
|
||||
const { hookId } = credentials;
|
||||
|
||||
if (config.scope === GitLabDataSourceScope.Project) {
|
||||
const { projectId } = config;
|
||||
|
||||
try {
|
||||
await client.ProjectHooks.edit(projectId, hookId, hookUrl, {
|
||||
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||
name: `Infisical Secret Scanning - ${dataSourceId}`,
|
||||
custom_headers: [{ key: "x-data-source-id", value: dataSourceId }]
|
||||
});
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.ProjectHooks.remove(projectId, hookId);
|
||||
} catch {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// group-scope
|
||||
const { groupId } = config;
|
||||
|
||||
try {
|
||||
await client.GroupHooks.edit(groupId, hookId, hookUrl, {
|
||||
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||
name: `Infisical Secret Scanning - ${dataSourceId}`,
|
||||
custom_headers: [{ key: "x-data-source-id", value: dataSourceId }]
|
||||
});
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.GroupHooks.remove(groupId, hookId);
|
||||
} catch {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const listRawResources: TSecretScanningFactoryListRawResources<TGitLabDataSourceWithConnection> = async (
|
||||
dataSource
|
||||
) => {
|
||||
const { connection, config } = dataSource;
|
||||
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
|
||||
if (config.scope === GitLabDataSourceScope.Project) {
|
||||
const { projectId } = config;
|
||||
|
||||
const project = await client.Projects.show(projectId);
|
||||
|
||||
if (!project) {
|
||||
throw new BadRequestError({ message: `Could not find project with ID ${projectId}.` });
|
||||
}
|
||||
|
||||
// scott: even though we have this data we want to get potentially updated name
|
||||
return [
|
||||
{
|
||||
name: project.pathWithNamespace,
|
||||
externalId: project.id.toString(),
|
||||
type: SecretScanningResource.Project
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
// group-scope
|
||||
|
||||
const { groupId, includeProjects } = config;
|
||||
|
||||
const projects = await client.Groups.allProjects(groupId, {
|
||||
archived: false
|
||||
});
|
||||
|
||||
const filteredProjects: typeof projects = [];
|
||||
if (!includeProjects || includeProjects.includes("*")) {
|
||||
filteredProjects.push(...projects);
|
||||
} else {
|
||||
filteredProjects.push(...projects.filter((project) => includeProjects.includes(project.pathWithNamespace)));
|
||||
}
|
||||
|
||||
return filteredProjects.map(({ id, pathWithNamespace }) => ({
|
||||
name: pathWithNamespace,
|
||||
externalId: id.toString(),
|
||||
type: SecretScanningResource.Project
|
||||
}));
|
||||
};
|
||||
|
||||
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TGitLabDataSourceWithConnection> = async ({
|
||||
dataSource,
|
||||
resourceName,
|
||||
tempFolder
|
||||
}) => {
|
||||
const { connection } = dataSource;
|
||||
|
||||
const instanceUrl = await getGitLabInstanceUrl(connection.credentials.instanceUrl);
|
||||
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
|
||||
const user = await client.Users.showCurrentUser();
|
||||
|
||||
const repoPath = join(tempFolder, "repo.git");
|
||||
|
||||
if (!GitLabProjectRegex.test(resourceName)) {
|
||||
throw new Error("Invalid GitLab project name");
|
||||
}
|
||||
|
||||
await cloneRepository({
|
||||
cloneUrl: `https://${user.username}:${connection.credentials.accessToken}@${getMainDomain(instanceUrl)}/${resourceName}.git`,
|
||||
repoPath
|
||||
});
|
||||
|
||||
return repoPath;
|
||||
};
|
||||
|
||||
const teardown: TSecretScanningFactoryTeardown<
|
||||
TGitLabDataSourceWithConnection,
|
||||
TGitLabDataSourceCredentials
|
||||
> = async ({ dataSource: { connection, config }, credentials: { hookId } }) => {
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
|
||||
if (config.scope === GitLabDataSourceScope.Project) {
|
||||
const { projectId } = config;
|
||||
try {
|
||||
await client.ProjectHooks.remove(projectId, hookId);
|
||||
} catch (error) {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const { groupId } = config;
|
||||
try {
|
||||
await client.GroupHooks.remove(groupId, hookId);
|
||||
} catch (error) {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
};
|
||||
|
||||
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
|
||||
TQueueGitLabResourceDiffScan["payload"]
|
||||
> = ({ project }) => {
|
||||
return {
|
||||
name: project.path_with_namespace,
|
||||
externalId: project.id.toString(),
|
||||
type: SecretScanningResource.Project
|
||||
};
|
||||
};
|
||||
|
||||
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
|
||||
TGitLabDataSourceWithConnection,
|
||||
TQueueGitLabResourceDiffScan["payload"]
|
||||
> = async ({ dataSource, payload, resourceName, configPath }) => {
|
||||
const { connection } = dataSource;
|
||||
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
|
||||
const { commits, project } = payload;
|
||||
|
||||
const allFindings: SecretMatch[] = [];
|
||||
|
||||
for (const commit of commits) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const commitDiffs = await client.Commits.showDiff(project.id, commit.id);
|
||||
|
||||
for (const commitDiff of commitDiffs) {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (commitDiff.deletedFile) continue;
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const findings = await scanContentAndGetFindings(
|
||||
replaceNonChangesWithNewlines(`\n${commitDiff.diff}`),
|
||||
configPath
|
||||
);
|
||||
|
||||
const adjustedFindings = findings.map((finding) => {
|
||||
const startLine = convertPatchLineToFileLineNumber(commitDiff.diff, finding.StartLine);
|
||||
const endLine =
|
||||
finding.StartLine === finding.EndLine
|
||||
? startLine
|
||||
: convertPatchLineToFileLineNumber(commitDiff.diff, finding.EndLine);
|
||||
const startColumn = finding.StartColumn - 1; // subtract 1 for +
|
||||
const endColumn = finding.EndColumn - 1; // subtract 1 for +
|
||||
const authorName = commit.author.name;
|
||||
const authorEmail = commit.author.email;
|
||||
|
||||
return {
|
||||
...finding,
|
||||
StartLine: startLine,
|
||||
EndLine: endLine,
|
||||
StartColumn: startColumn,
|
||||
EndColumn: endColumn,
|
||||
File: commitDiff.newPath,
|
||||
Commit: commit.id,
|
||||
Author: authorName,
|
||||
Email: authorEmail,
|
||||
Message: commit.message,
|
||||
Fingerprint: `${commit.id}:${commitDiff.newPath}:${finding.RuleID}:${startLine}:${startColumn}`,
|
||||
Date: commit.timestamp,
|
||||
Link: `https://gitlab.com/${resourceName}/blob/${commit.id}/${commitDiff.newPath}#L${startLine}`
|
||||
};
|
||||
});
|
||||
|
||||
allFindings.push(...adjustedFindings);
|
||||
}
|
||||
}
|
||||
|
||||
return allFindings.map(
|
||||
({
|
||||
// discard match and secret as we don't want to store
|
||||
Match,
|
||||
Secret,
|
||||
...finding
|
||||
}) => ({
|
||||
details: titleCaseToCamelCase(finding),
|
||||
fingerprint: finding.Fingerprint,
|
||||
severity: SecretScanningFindingSeverity.High,
|
||||
rule: finding.RuleID
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
|
||||
TGitLabDataSourceInput["config"],
|
||||
TGitLabDataSourceWithConnection
|
||||
> = async ({ config, dataSource }) => {
|
||||
if (dataSource.config.scope !== config.scope) {
|
||||
throw new BadRequestError({ message: "Cannot change Data Source scope after creation." });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
listRawResources,
|
||||
getFullScanPath,
|
||||
initialize,
|
||||
postInitialization,
|
||||
teardown,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload,
|
||||
validateConfigUpdate
|
||||
};
|
||||
};
|
@@ -0,0 +1,101 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { GitLabDataSourceScope } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningResource
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
BaseCreateSecretScanningDataSourceSchema,
|
||||
BaseSecretScanningDataSourceSchema,
|
||||
BaseSecretScanningFindingSchema,
|
||||
BaseUpdateSecretScanningDataSourceSchema,
|
||||
GitRepositoryScanFindingDetailsSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
|
||||
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
||||
import { GitLabProjectRegex } from "@app/lib/regex";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const GitLabDataSourceConfigSchema = z.discriminatedUnion("scope", [
|
||||
z.object({
|
||||
scope: z.literal(GitLabDataSourceScope.Group).describe(SecretScanningDataSources.CONFIG.GITLAB.scope),
|
||||
groupId: z.number().describe(SecretScanningDataSources.CONFIG.GITLAB.groupId),
|
||||
groupName: z.string().trim().max(256).optional().describe(SecretScanningDataSources.CONFIG.GITLAB.groupName),
|
||||
includeProjects: z
|
||||
.array(
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(256)
|
||||
.refine((value) => value === "*" || GitLabProjectRegex.test(value), "Invalid project name format")
|
||||
)
|
||||
.nonempty("One or more projects required")
|
||||
.max(100, "Cannot configure more than 100 projects")
|
||||
.default(["*"])
|
||||
.describe(SecretScanningDataSources.CONFIG.GITLAB.includeProjects)
|
||||
}),
|
||||
z.object({
|
||||
scope: z.literal(GitLabDataSourceScope.Project).describe(SecretScanningDataSources.CONFIG.GITLAB.scope),
|
||||
projectName: z.string().trim().max(256).optional().describe(SecretScanningDataSources.CONFIG.GITLAB.projectName),
|
||||
projectId: z.number().describe(SecretScanningDataSources.CONFIG.GITLAB.projectId)
|
||||
})
|
||||
]);
|
||||
|
||||
export const GitLabDataSourceSchema = BaseSecretScanningDataSourceSchema({
|
||||
type: SecretScanningDataSource.GitLab,
|
||||
isConnectionRequired: true
|
||||
})
|
||||
.extend({
|
||||
config: GitLabDataSourceConfigSchema
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
);
|
||||
|
||||
export const CreateGitLabDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
|
||||
type: SecretScanningDataSource.GitLab,
|
||||
isConnectionRequired: true
|
||||
})
|
||||
.extend({
|
||||
config: GitLabDataSourceConfigSchema
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
);
|
||||
|
||||
export const UpdateGitLabDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(SecretScanningDataSource.GitLab)
|
||||
.extend({
|
||||
config: GitLabDataSourceConfigSchema.optional()
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
);
|
||||
|
||||
export const GitLabDataSourceListItemSchema = z
|
||||
.object({
|
||||
name: z.literal("GitLab"),
|
||||
connection: z.literal(AppConnection.GitLab),
|
||||
type: z.literal(SecretScanningDataSource.GitLab)
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
);
|
||||
|
||||
export const GitLabFindingSchema = BaseSecretScanningFindingSchema.extend({
|
||||
resourceType: z.literal(SecretScanningResource.Project),
|
||||
dataSourceType: z.literal(SecretScanningDataSource.GitLab),
|
||||
details: GitRepositoryScanFindingDetailsSchema
|
||||
});
|
||||
|
||||
export const GitLabDataSourceCredentialsSchema = z.object({
|
||||
token: z.string(),
|
||||
hookId: z.number()
|
||||
});
|
@@ -0,0 +1,94 @@
|
||||
import { GitLabDataSourceScope } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
|
||||
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import {
|
||||
TGitLabDataSource,
|
||||
TGitLabDataSourceCredentials,
|
||||
THandleGitLabPushEvent
|
||||
} from "./gitlab-secret-scanning-types";
|
||||
|
||||
export const gitlabSecretScanningService = (
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory,
|
||||
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const handlePushEvent = async ({ payload, token, dataSourceId }: THandleGitLabPushEvent) => {
|
||||
if (!payload.total_commits_count || !payload.project) {
|
||||
logger.warn(
|
||||
`secretScanningV2PushEvent: GitLab - Insufficient data [changes=${
|
||||
payload.total_commits_count ?? 0
|
||||
}] [projectName=${payload.project?.path_with_namespace ?? "unknown"}] [projectId=${payload.project?.id ?? "unknown"}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
|
||||
id: dataSourceId,
|
||||
type: SecretScanningDataSource.GitLab
|
||||
})) as TGitLabDataSource | undefined;
|
||||
|
||||
if (!dataSource) {
|
||||
logger.error(
|
||||
`secretScanningV2PushEvent: GitLab - Could not find data source [dataSourceId=${dataSourceId}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const { isAutoScanEnabled, config, encryptedCredentials, projectId } = dataSource;
|
||||
|
||||
if (!encryptedCredentials) {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitLab - Could not find encrypted credentials [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const decryptedCredentials = decryptor({ cipherTextBlob: encryptedCredentials });
|
||||
|
||||
const credentials = JSON.parse(decryptedCredentials.toString()) as TGitLabDataSourceCredentials;
|
||||
|
||||
if (token !== credentials.token) {
|
||||
logger.error(
|
||||
`secretScanningV2PushEvent: GitLab - Invalid webhook token [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isAutoScanEnabled) {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitLab - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
config.scope === GitLabDataSourceScope.Project
|
||||
? config.projectId.toString() === payload.project_id.toString()
|
||||
: config.includeProjects.includes("*") || config.includeProjects.includes(payload.project.path_with_namespace)
|
||||
) {
|
||||
await secretScanningV2Queue.queueResourceDiffScan({
|
||||
dataSourceType: SecretScanningDataSource.GitLab,
|
||||
payload,
|
||||
dataSourceId: dataSource.id
|
||||
});
|
||||
} else {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitLab - ignoring due to repository not being present in config [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
handlePushEvent
|
||||
};
|
||||
};
|
@@ -0,0 +1,97 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TGitLabConnection } from "@app/services/app-connection/gitlab";
|
||||
|
||||
import {
|
||||
CreateGitLabDataSourceSchema,
|
||||
GitLabDataSourceCredentialsSchema,
|
||||
GitLabDataSourceListItemSchema,
|
||||
GitLabDataSourceSchema,
|
||||
GitLabFindingSchema
|
||||
} from "./gitlab-secret-scanning-schemas";
|
||||
|
||||
export type TGitLabDataSource = z.infer<typeof GitLabDataSourceSchema>;
|
||||
|
||||
export type TGitLabDataSourceInput = z.infer<typeof CreateGitLabDataSourceSchema>;
|
||||
|
||||
export type TGitLabDataSourceListItem = z.infer<typeof GitLabDataSourceListItemSchema>;
|
||||
|
||||
export type TGitLabFinding = z.infer<typeof GitLabFindingSchema>;
|
||||
|
||||
export type TGitLabDataSourceWithConnection = TGitLabDataSource & {
|
||||
connection: TGitLabConnection;
|
||||
};
|
||||
|
||||
export type TGitLabDataSourceCredentials = z.infer<typeof GitLabDataSourceCredentialsSchema>;
|
||||
|
||||
export type TGitLabDataSourcePushEventPayload = {
|
||||
object_kind: "push";
|
||||
event_name: "push";
|
||||
before: string;
|
||||
after: string;
|
||||
ref: string;
|
||||
ref_protected: boolean;
|
||||
checkout_sha: string;
|
||||
user_id: number;
|
||||
user_name: string;
|
||||
user_username: string;
|
||||
user_email: string;
|
||||
user_avatar: string;
|
||||
project_id: number;
|
||||
project: {
|
||||
id: number;
|
||||
name: string;
|
||||
description: string;
|
||||
web_url: string;
|
||||
avatar_url: string | null;
|
||||
git_ssh_url: string;
|
||||
git_http_url: string;
|
||||
namespace: string;
|
||||
visibility_level: number;
|
||||
path_with_namespace: string;
|
||||
default_branch: string;
|
||||
homepage: string;
|
||||
url: string;
|
||||
ssh_url: string;
|
||||
http_url: string;
|
||||
};
|
||||
repository: {
|
||||
name: string;
|
||||
url: string;
|
||||
description: string;
|
||||
homepage: string;
|
||||
git_http_url: string;
|
||||
git_ssh_url: string;
|
||||
visibility_level: number;
|
||||
};
|
||||
commits: {
|
||||
id: string;
|
||||
message: string;
|
||||
title: string;
|
||||
timestamp: string;
|
||||
url: string;
|
||||
author: {
|
||||
name: string;
|
||||
email: string;
|
||||
};
|
||||
added: string[];
|
||||
modified: string[];
|
||||
removed: string[];
|
||||
}[];
|
||||
total_commits_count: number;
|
||||
};
|
||||
|
||||
export type THandleGitLabPushEvent = {
|
||||
payload: TGitLabDataSourcePushEventPayload;
|
||||
dataSourceId: string;
|
||||
token: string;
|
||||
};
|
||||
|
||||
export type TQueueGitLabResourceDiffScan = {
|
||||
dataSourceType: SecretScanningDataSource.GitLab;
|
||||
payload: TGitLabDataSourcePushEventPayload;
|
||||
dataSourceId: string;
|
||||
resourceId: string;
|
||||
scanId: string;
|
||||
};
|
@@ -0,0 +1,3 @@
|
||||
export * from "./gitlab-secret-scanning-constants";
|
||||
export * from "./gitlab-secret-scanning-schemas";
|
||||
export * from "./gitlab-secret-scanning-types";
|
@@ -49,6 +49,7 @@ const baseSecretScanningDataSourceQuery = ({
|
||||
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
|
||||
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
|
||||
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
|
||||
db.ref("gatewayId").withSchema(TableName.AppConnection).as("connectionGatewayId"),
|
||||
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
|
||||
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
|
||||
db
|
||||
@@ -82,6 +83,7 @@ const expandSecretScanningDataSource = <
|
||||
connectionUpdatedAt,
|
||||
connectionVersion,
|
||||
connectionIsPlatformManagedCredentials,
|
||||
connectionGatewayId,
|
||||
...el
|
||||
} = dataSource;
|
||||
|
||||
@@ -100,7 +102,8 @@ const expandSecretScanningDataSource = <
|
||||
createdAt: connectionCreatedAt,
|
||||
updatedAt: connectionUpdatedAt,
|
||||
version: connectionVersion,
|
||||
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
|
||||
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials,
|
||||
gatewayId: connectionGatewayId
|
||||
}
|
||||
: undefined
|
||||
};
|
||||
|
@@ -1,6 +1,7 @@
|
||||
export enum SecretScanningDataSource {
|
||||
GitHub = "github",
|
||||
Bitbucket = "bitbucket"
|
||||
Bitbucket = "bitbucket",
|
||||
GitLab = "gitlab"
|
||||
}
|
||||
|
||||
export enum SecretScanningScanStatus {
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import { BitbucketSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-factory";
|
||||
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
|
||||
import { GitLabSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-factory";
|
||||
|
||||
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
|
||||
import {
|
||||
@@ -19,5 +20,6 @@ type TSecretScanningFactoryImplementation = TSecretScanningFactory<
|
||||
|
||||
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
|
||||
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation,
|
||||
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation
|
||||
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation,
|
||||
[SecretScanningDataSource.GitLab]: GitLabSecretScanningFactory as TSecretScanningFactoryImplementation
|
||||
};
|
||||
|
@@ -13,6 +13,7 @@ import {
|
||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
@@ -23,7 +24,8 @@ import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListIte
|
||||
|
||||
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
|
||||
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
|
||||
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
||||
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
|
||||
[SecretScanningDataSource.GitLab]: GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
||||
};
|
||||
|
||||
export const listSecretScanningDataSourceOptions = () => {
|
||||
|
@@ -3,15 +3,18 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
|
||||
|
||||
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
|
||||
[SecretScanningDataSource.GitHub]: "GitHub",
|
||||
[SecretScanningDataSource.Bitbucket]: "Bitbucket"
|
||||
[SecretScanningDataSource.Bitbucket]: "Bitbucket",
|
||||
[SecretScanningDataSource.GitLab]: "GitLab"
|
||||
};
|
||||
|
||||
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
|
||||
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar,
|
||||
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket
|
||||
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket,
|
||||
[SecretScanningDataSource.GitLab]: AppConnection.GitLab
|
||||
};
|
||||
|
||||
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
|
||||
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" },
|
||||
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" }
|
||||
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" },
|
||||
[SecretScanningDataSource.GitLab]: { verb: "push", noun: "projects" }
|
||||
};
|
||||
|
@@ -16,6 +16,7 @@ import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
@@ -48,6 +49,7 @@ type TSecretRotationV2QueueServiceFactoryDep = {
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem">;
|
||||
};
|
||||
@@ -62,7 +64,8 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
smtpService,
|
||||
kmsService,
|
||||
auditLogService,
|
||||
keyStore
|
||||
keyStore,
|
||||
appConnectionDAL
|
||||
}: TSecretRotationV2QueueServiceFactoryDep) => {
|
||||
const queueDataSourceFullScan = async (
|
||||
dataSource: TSecretScanningDataSourceWithConnection,
|
||||
@@ -71,7 +74,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
try {
|
||||
const { type } = dataSource;
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const rawResources = await factory.listRawResources(dataSource);
|
||||
|
||||
@@ -171,7 +177,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const findingsPath = join(tempFolder, "findings.json");
|
||||
|
||||
@@ -329,7 +338,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
dataSourceId,
|
||||
dataSourceType
|
||||
}: Pick<TQueueSecretScanningResourceDiffScan, "payload" | "dataSourceId" | "dataSourceType">) => {
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const resourcePayload = factory.getDiffScanResourcePayload(payload);
|
||||
|
||||
@@ -391,7 +403,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
|
||||
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const tempFolder = await createTempFolder();
|
||||
|
||||
|
@@ -46,6 +46,7 @@ import {
|
||||
import { DatabaseErrorCode } from "@app/lib/error-codes";
|
||||
import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
||||
@@ -53,12 +54,14 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { bitbucketSecretScanningService } from "./bitbucket/bitbucket-secret-scanning-service";
|
||||
import { gitlabSecretScanningService } from "./gitlab/gitlab-secret-scanning-service";
|
||||
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
|
||||
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
|
||||
|
||||
export type TSecretScanningV2ServiceFactoryDep = {
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
||||
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
secretScanningV2Queue: Pick<
|
||||
@@ -76,6 +79,7 @@ export const secretScanningV2ServiceFactory = ({
|
||||
appConnectionService,
|
||||
licenseService,
|
||||
secretScanningV2Queue,
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
}: TSecretScanningV2ServiceFactoryDep) => {
|
||||
const $checkListSecretScanningDataSourcesByProjectIdPermissions = async (
|
||||
@@ -255,7 +259,10 @@ export const secretScanningV2ServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
try {
|
||||
const createdDataSource = await factory.initialize(
|
||||
@@ -363,6 +370,31 @@ export const secretScanningV2ServiceFactory = ({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connectionId) {
|
||||
// validates permission to connect and app is valid for data source
|
||||
connection = await appConnectionService.connectAppConnectionById(
|
||||
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[dataSource.type],
|
||||
dataSource.connectionId,
|
||||
actor
|
||||
);
|
||||
}
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type]({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
if (payload.config) {
|
||||
await factory.validateConfigUpdate({
|
||||
dataSource: {
|
||||
...dataSource,
|
||||
connection
|
||||
} as TSecretScanningDataSourceWithConnection,
|
||||
config: payload.config as TSecretScanningDataSourceWithConnection["config"]
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const updatedDataSource = await secretScanningV2DAL.dataSources.updateById(dataSourceId, payload);
|
||||
|
||||
@@ -416,7 +448,10 @@ export const secretScanningV2ServiceFactory = ({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) {
|
||||
@@ -903,6 +938,7 @@ export const secretScanningV2ServiceFactory = ({
|
||||
findSecretScanningConfigByProjectId,
|
||||
upsertSecretScanningConfig,
|
||||
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue),
|
||||
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
|
||||
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService),
|
||||
gitlab: gitlabSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
|
||||
};
|
||||
};
|
||||
|
@@ -21,14 +21,25 @@ import {
|
||||
TGitHubFinding,
|
||||
TQueueGitHubResourceDiffScan
|
||||
} from "@app/ee/services/secret-scanning-v2/github";
|
||||
import {
|
||||
TGitLabDataSource,
|
||||
TGitLabDataSourceCredentials,
|
||||
TGitLabDataSourceInput,
|
||||
TGitLabDataSourceListItem,
|
||||
TGitLabDataSourceWithConnection,
|
||||
TGitLabFinding,
|
||||
TQueueGitLabResourceDiffScan
|
||||
} from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource;
|
||||
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource | TGitLabDataSource;
|
||||
|
||||
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
|
||||
lastScannedAt?: Date | null;
|
||||
@@ -52,15 +63,25 @@ export type TSecretScanningScanWithDetails = TSecretScanningScans & {
|
||||
|
||||
export type TSecretScanningDataSourceWithConnection =
|
||||
| TGitHubDataSourceWithConnection
|
||||
| TBitbucketDataSourceWithConnection;
|
||||
| TBitbucketDataSourceWithConnection
|
||||
| TGitLabDataSourceWithConnection;
|
||||
|
||||
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput | TBitbucketDataSourceInput;
|
||||
export type TSecretScanningDataSourceInput =
|
||||
| TGitHubDataSourceInput
|
||||
| TBitbucketDataSourceInput
|
||||
| TGitLabDataSourceInput;
|
||||
|
||||
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem | TBitbucketDataSourceListItem;
|
||||
export type TSecretScanningDataSourceListItem =
|
||||
| TGitHubDataSourceListItem
|
||||
| TBitbucketDataSourceListItem
|
||||
| TGitLabDataSourceListItem;
|
||||
|
||||
export type TSecretScanningDataSourceCredentials = TBitbucketDataSourceCredentials | undefined;
|
||||
export type TSecretScanningDataSourceCredentials =
|
||||
| TBitbucketDataSourceCredentials
|
||||
| TGitLabDataSourceCredentials
|
||||
| undefined;
|
||||
|
||||
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding;
|
||||
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding | TGitLabFinding;
|
||||
|
||||
export type TListSecretScanningDataSourcesByProjectId = {
|
||||
projectId: string;
|
||||
@@ -112,7 +133,10 @@ export type TQueueSecretScanningDataSourceFullScan = {
|
||||
scanId: string;
|
||||
};
|
||||
|
||||
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan | TQueueBitbucketResourceDiffScan;
|
||||
export type TQueueSecretScanningResourceDiffScan =
|
||||
| TQueueGitHubResourceDiffScan
|
||||
| TQueueBitbucketResourceDiffScan
|
||||
| TQueueGitLabResourceDiffScan;
|
||||
|
||||
export type TQueueSecretScanningSendNotification = {
|
||||
dataSource: TSecretScanningDataSources;
|
||||
@@ -170,6 +194,11 @@ export type TSecretScanningFactoryInitialize<
|
||||
callback: (parameters: { credentials?: C; externalId?: string }) => Promise<TSecretScanningDataSourceRaw>
|
||||
) => Promise<TSecretScanningDataSourceRaw>;
|
||||
|
||||
export type TSecretScanningFactoryValidateConfigUpdate<
|
||||
C extends TSecretScanningDataSourceInput["config"],
|
||||
T extends TSecretScanningDataSourceWithConnection
|
||||
> = (params: { config: C; dataSource: T }) => Promise<void>;
|
||||
|
||||
export type TSecretScanningFactoryPostInitialization<
|
||||
P extends TSecretScanningDataSourceInput,
|
||||
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
||||
@@ -181,17 +210,23 @@ export type TSecretScanningFactoryTeardown<
|
||||
C extends TSecretScanningDataSourceCredentials = undefined
|
||||
> = (params: { dataSource: T; credentials: C }) => Promise<void>;
|
||||
|
||||
export type TSecretScanningFactoryParams = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TSecretScanningFactory<
|
||||
T extends TSecretScanningDataSourceWithConnection,
|
||||
P extends TQueueSecretScanningResourceDiffScan["payload"],
|
||||
I extends TSecretScanningDataSourceInput,
|
||||
C extends TSecretScanningDataSourceCredentials | undefined = undefined
|
||||
> = () => {
|
||||
> = (params: TSecretScanningFactoryParams) => {
|
||||
listRawResources: TSecretScanningFactoryListRawResources<T>;
|
||||
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
|
||||
initialize: TSecretScanningFactoryInitialize<I, T["connection"] | undefined, C>;
|
||||
postInitialization: TSecretScanningFactoryPostInitialization<I, T["connection"] | undefined, C>;
|
||||
teardown: TSecretScanningFactoryTeardown<T, C>;
|
||||
validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<I["config"], T>;
|
||||
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
|
||||
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
|
||||
};
|
||||
|
@@ -2,10 +2,12 @@ import { z } from "zod";
|
||||
|
||||
import { BitbucketDataSourceSchema, BitbucketFindingSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { GitLabDataSourceSchema, GitLabFindingSchema } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
|
||||
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [
|
||||
GitHubDataSourceSchema,
|
||||
BitbucketDataSourceSchema
|
||||
BitbucketDataSourceSchema,
|
||||
GitLabDataSourceSchema
|
||||
]);
|
||||
|
||||
export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType", [
|
||||
@@ -18,5 +20,10 @@ export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType"
|
||||
JSON.stringify({
|
||||
title: "Bitbucket"
|
||||
})
|
||||
),
|
||||
GitLabFindingSchema.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
)
|
||||
]);
|
||||
|
@@ -46,7 +46,11 @@ export const KeyStorePrefixes = {
|
||||
IdentityAccessTokenStatusUpdate: (identityAccessTokenId: string) =>
|
||||
`identity-access-token-status:${identityAccessTokenId}`,
|
||||
ServiceTokenStatusUpdate: (serviceTokenId: string) => `service-token-status:${serviceTokenId}`,
|
||||
GatewayIdentityCredential: (identityId: string) => `gateway-credentials:${identityId}`
|
||||
GatewayIdentityCredential: (identityId: string) => `gateway-credentials:${identityId}`,
|
||||
ActiveSSEConnectionsSet: (projectId: string, identityId: string) =>
|
||||
`sse-connections:${projectId}:${identityId}` as const,
|
||||
ActiveSSEConnections: (projectId: string, identityId: string, connectionId: string) =>
|
||||
`sse-connections:${projectId}:${identityId}:${connectionId}` as const
|
||||
};
|
||||
|
||||
export const KeyStoreTtls = {
|
||||
|
@@ -18,6 +18,7 @@ import { SECRET_SYNC_CONNECTION_MAP, SECRET_SYNC_NAME_MAP } from "@app/services/
|
||||
|
||||
export enum ApiDocsTags {
|
||||
Identities = "Identities",
|
||||
IdentityTemplates = "Identity Templates",
|
||||
TokenAuth = "Token Auth",
|
||||
UniversalAuth = "Universal Auth",
|
||||
GcpAuth = "GCP Auth",
|
||||
@@ -69,7 +70,8 @@ export enum ApiDocsTags {
|
||||
SecretScanning = "Secret Scanning",
|
||||
OidcSso = "OIDC SSO",
|
||||
SamlSso = "SAML SSO",
|
||||
LdapSso = "LDAP SSO"
|
||||
LdapSso = "LDAP SSO",
|
||||
Events = "Event Subscriptions"
|
||||
}
|
||||
|
||||
export const GROUPS = {
|
||||
@@ -214,6 +216,7 @@ export const LDAP_AUTH = {
|
||||
password: "The password of the LDAP user to login."
|
||||
},
|
||||
ATTACH: {
|
||||
templateId: "The ID of the identity auth template to attach the configuration onto.",
|
||||
identityId: "The ID of the identity to attach the configuration onto.",
|
||||
url: "The URL of the LDAP server.",
|
||||
allowedFields:
|
||||
@@ -240,7 +243,8 @@ export const LDAP_AUTH = {
|
||||
accessTokenTTL: "The new lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
|
||||
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from."
|
||||
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
|
||||
templateId: "The ID of the identity auth template to update the configuration to."
|
||||
},
|
||||
RETRIEVE: {
|
||||
identityId: "The ID of the identity to retrieve the configuration for."
|
||||
@@ -664,6 +668,10 @@ export const ORGANIZATIONS = {
|
||||
organizationId: "The ID of the organization to delete the membership from.",
|
||||
membershipId: "The ID of the membership to delete."
|
||||
},
|
||||
BULK_DELETE_USER_MEMBERSHIPS: {
|
||||
organizationId: "The ID of the organization to delete the memberships from.",
|
||||
membershipIds: "The IDs of the memberships to delete."
|
||||
},
|
||||
LIST_IDENTITY_MEMBERSHIPS: {
|
||||
orgId: "The ID of the organization to get identity memberships from.",
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th identity membership.",
|
||||
@@ -2253,7 +2261,9 @@ export const AppConnections = {
|
||||
AZURE_DEVOPS: {
|
||||
code: "The OAuth code to use to connect with Azure DevOps.",
|
||||
tenantId: "The Tenant ID to use to connect with Azure DevOps.",
|
||||
orgName: "The Organization name to use to connect with Azure DevOps."
|
||||
orgName: "The Organization name to use to connect with Azure DevOps.",
|
||||
clientId: "The Client ID to use to connect with Azure Client Secrets.",
|
||||
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
|
||||
},
|
||||
OCI: {
|
||||
userOcid: "The OCID (Oracle Cloud Identifier) of the user making the request.",
|
||||
@@ -2296,6 +2306,9 @@ export const AppConnections = {
|
||||
DIGITAL_OCEAN_APP_PLATFORM: {
|
||||
apiToken: "The API token used to authenticate with Digital Ocean App Platform."
|
||||
},
|
||||
NETLIFY: {
|
||||
accessToken: "The Access token used to authenticate with Netlify."
|
||||
},
|
||||
OKTA: {
|
||||
instanceUrl: "The URL used to access your Okta organization.",
|
||||
apiToken: "The API token used to authenticate with Okta."
|
||||
@@ -2400,12 +2413,18 @@ export const SecretSyncs = {
|
||||
env: "The name of the GitHub environment."
|
||||
},
|
||||
AZURE_KEY_VAULT: {
|
||||
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/"
|
||||
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/",
|
||||
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
|
||||
clientId: "The Client ID to use to connect with Azure Client Secrets.",
|
||||
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
|
||||
},
|
||||
AZURE_APP_CONFIGURATION: {
|
||||
configurationUrl:
|
||||
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
|
||||
label: "An optional label to assign to secrets created in Azure App Configuration."
|
||||
label: "An optional label to assign to secrets created in Azure App Configuration.",
|
||||
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
|
||||
clientId: "The Client ID to use to connect with Azure Client Secrets.",
|
||||
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
|
||||
},
|
||||
AZURE_DEVOPS: {
|
||||
devopsProjectId: "The ID of the Azure DevOps project to sync secrets to.",
|
||||
@@ -2521,6 +2540,13 @@ export const SecretSyncs = {
|
||||
workspaceSlug: "The Bitbucket Workspace slug to sync secrets to.",
|
||||
repositorySlug: "The Bitbucket Repository slug to sync secrets to.",
|
||||
environmentId: "The Bitbucket Deployment Environment uuid to sync secrets to."
|
||||
},
|
||||
NETLIFY: {
|
||||
accountId: "The ID of the Netlify account to sync secrets to.",
|
||||
accountName: "The name of the Netlify account to sync secrets to.",
|
||||
siteName: "The name of the Netlify site to sync secrets to.",
|
||||
siteId: "The ID of the Netlify site to sync secrets to.",
|
||||
context: "The Netlify context to sync secrets to."
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -2702,6 +2728,14 @@ export const SecretScanningDataSources = {
|
||||
GITHUB: {
|
||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||
},
|
||||
GITLAB: {
|
||||
includeProjects: 'The projects to include when scanning. Defaults to all projects (["*"]).',
|
||||
scope: "The GitLab scope scanning should occur at (project or group level).",
|
||||
projectId: "The ID of the project to scan.",
|
||||
projectName: "The name of the project to scan.",
|
||||
groupId: "The ID of the group to scan projects from.",
|
||||
groupName: "The name of the group to scan projects from."
|
||||
},
|
||||
BITBUCKET: {
|
||||
workspaceSlug: "The workspace to scan.",
|
||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||
@@ -2839,3 +2873,10 @@ export const LdapSso = {
|
||||
caCert: "The CA certificate to use when verifying the LDAP server certificate."
|
||||
}
|
||||
};
|
||||
|
||||
export const EventSubscriptions = {
|
||||
SUBSCRIBE_PROJECT_EVENTS: {
|
||||
projectId: "The ID of the project to subscribe to events for.",
|
||||
register: "List of events you want to subscribe to"
|
||||
}
|
||||
};
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user