mirror of
https://github.com/Infisical/infisical.git
synced 2025-08-24 20:43:19 +00:00
Compare commits
184 Commits
mssql-ssl-
...
secrets-mi
Author | SHA1 | Date | |
---|---|---|---|
|
43752e1888 | ||
|
bd72129d8c | ||
|
bf10b2f58a | ||
|
d24f5a57a8 | ||
|
166104e523 | ||
|
a7847f177c | ||
|
48e5f550e9 | ||
|
4a4a7fd325 | ||
|
91b8ed8015 | ||
|
6cf978b593 | ||
|
68fbb399fc | ||
|
97366f6e95 | ||
|
c83d4af7a3 | ||
|
c35c937c63 | ||
|
b10752acb5 | ||
|
eb9b75d930 | ||
|
273a7b9657 | ||
|
a3b6fa9a53 | ||
|
f60dd528e8 | ||
|
8ffef1da8e | ||
|
f352f98374 | ||
|
91a76f50ca | ||
|
ea4bb0a062 | ||
|
3d6be7b1b2 | ||
|
12558e8614 | ||
|
987f87e562 | ||
|
4d06d5cbb0 | ||
|
bad934de48 | ||
|
90b93fbd15 | ||
|
c2db2a0bc7 | ||
|
b0d24de008 | ||
|
0473fb0ddb | ||
|
4ccb5dc9b0 | ||
|
930425d5dc | ||
|
f77a53bd8e | ||
|
4bd61e5607 | ||
|
aa4dbfa073 | ||
|
b479406ba0 | ||
|
7cf9d933da | ||
|
ca2825ba95 | ||
|
b8fa4d5255 | ||
|
0d3cb2d41a | ||
|
e0d19d7b65 | ||
|
f5a0d8be78 | ||
|
c7ae7be493 | ||
|
18881749fd | ||
|
fa54c406dc | ||
|
1a2eef3ba6 | ||
|
0c562150f5 | ||
|
6fde132804 | ||
|
799721782a | ||
|
86d430f911 | ||
|
7c28ee844e | ||
|
d5390fcafc | ||
|
1b40f5d475 | ||
|
3cec1b4021 | ||
|
97b2c534a7 | ||
|
d71362ccc3 | ||
|
e4d90eb055 | ||
|
55607a4886 | ||
|
385c75c543 | ||
|
f16dca45d9 | ||
|
118c28df54 | ||
|
249b2933da | ||
|
272336092d | ||
|
6f05a6d82c | ||
|
84ebdb8503 | ||
|
b464941fbc | ||
|
77e8d8a86d | ||
|
c61dd1ee6e | ||
|
9db8573e72 | ||
|
ce8653e908 | ||
|
fd4cdc2769 | ||
|
90a1cc9330 | ||
|
78bfd0922a | ||
|
458dcd31c1 | ||
|
372537f0b6 | ||
|
e173ff3828 | ||
|
2baadf60d1 | ||
|
e13fc93bac | ||
|
6b14fbcce2 | ||
|
86fbe5cc24 | ||
|
3f7862a345 | ||
|
9661458469 | ||
|
c7c1eb0f5f | ||
|
a1e48a1795 | ||
|
d14e80b771 | ||
|
0264d37d9b | ||
|
11a1604e14 | ||
|
f788dee398 | ||
|
88120ed45e | ||
|
d6a377416d | ||
|
dbbd58ffb7 | ||
|
5d2beb3604 | ||
|
ec65e0e29c | ||
|
b819848058 | ||
|
1b0ef540fe | ||
|
4496241002 | ||
|
52e32484ce | ||
|
8b497699d4 | ||
|
be73f62226 | ||
|
102620ff09 | ||
|
994ee88852 | ||
|
770e25b895 | ||
|
fcf3bdb440 | ||
|
89c11b5541 | ||
|
5f764904e2 | ||
|
1a75384dba | ||
|
50f434cd80 | ||
|
d879cfd90c | ||
|
ca1f5eaca3 | ||
|
04086376ea | ||
|
364027a88a | ||
|
ca110d11b0 | ||
|
4e8f404f16 | ||
|
22abb78f48 | ||
|
24f11406e1 | ||
|
d5d67c82b2 | ||
|
35cfcf1f0f | ||
|
368e00ea71 | ||
|
2c8cfeb826 | ||
|
23237dd055 | ||
|
70d22f90ec | ||
|
e10aec3170 | ||
|
0b11dcd627 | ||
|
d88a473b47 | ||
|
4f52400887 | ||
|
34eb9f475a | ||
|
902a0b0c56 | ||
|
d1e8ae3c98 | ||
|
5c9243d691 | ||
|
35d1eabf49 | ||
|
b6902160ce | ||
|
fbfc51ee93 | ||
|
9e6294786f | ||
|
9d92ffce95 | ||
|
9193418f8b | ||
|
847c50d2d4 | ||
|
efa043c3d2 | ||
|
352ef050c3 | ||
|
b6b9fb6ef5 | ||
|
7e94791635 | ||
|
eedc5f533e | ||
|
fc5d42baf0 | ||
|
b95c35620a | ||
|
fa867e5068 | ||
|
8851faec65 | ||
|
47fb666dc7 | ||
|
569edd2852 | ||
|
676ebaf3c2 | ||
|
adb3185042 | ||
|
8da0a4d846 | ||
|
eebf080e3c | ||
|
97be31f11e | ||
|
667cceebc0 | ||
|
1ad02e2da6 | ||
|
93445d96b3 | ||
|
72b80e1fd7 | ||
|
6429adfaf6 | ||
|
fd89b3c702 | ||
|
50e40e8bcf | ||
|
6100086338 | ||
|
000dd6c223 | ||
|
389e2e1fb7 | ||
|
88fcbcadd4 | ||
|
60dc1d1e00 | ||
|
2d68f9aa16 | ||
|
e694293ebe | ||
|
ef6f5ecc4b | ||
|
56f5249925 | ||
|
df5b3fa8dc | ||
|
035ac0fe8d | ||
|
c12408eb81 | ||
|
13194296c6 | ||
|
be20a507ac | ||
|
63cf36c722 | ||
|
4dcd3ed06c | ||
|
1b32de5c5b | ||
|
522795871e | ||
|
5c63955fde | ||
|
d7f3892b73 | ||
|
33af2fb2b8 | ||
|
c568f40954 | ||
|
28f87b8b27 |
123
.github/workflows/build-docker-image-to-prod.yml
vendored
123
.github/workflows/build-docker-image-to-prod.yml
vendored
@@ -1,123 +0,0 @@
|
|||||||
name: Release production images (frontend, backend)
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- "infisical/v*.*.*"
|
|
||||||
- "!infisical/v*.*.*-postgres"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
backend-image:
|
|
||||||
name: Build backend image
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Extract version from tag
|
|
||||||
id: extract_version
|
|
||||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
|
||||||
- name: ☁️ Checkout source
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
- name: 📦 Install dependencies to test all dependencies
|
|
||||||
run: npm ci --only-production
|
|
||||||
working-directory: backend
|
|
||||||
# - name: 🧪 Run tests
|
|
||||||
# run: npm run test:ci
|
|
||||||
# working-directory: backend
|
|
||||||
- name: Save commit hashes for tag
|
|
||||||
id: commit
|
|
||||||
uses: pr-mpt/actions-commit-hash@v2
|
|
||||||
- name: 🔧 Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
- name: 🐋 Login to Docker Hub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
- name: Set up Depot CLI
|
|
||||||
uses: depot/setup-action@v1
|
|
||||||
- name: 📦 Build backend and export to Docker
|
|
||||||
uses: depot/build-push-action@v1
|
|
||||||
with:
|
|
||||||
project: 64mmf0n610
|
|
||||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
|
||||||
load: true
|
|
||||||
context: backend
|
|
||||||
tags: infisical/infisical:test
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
- name: ⏻ Spawn backend container and dependencies
|
|
||||||
run: |
|
|
||||||
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
|
||||||
- name: 🧪 Test backend image
|
|
||||||
run: |
|
|
||||||
./.github/resources/healthcheck.sh infisical-backend-test
|
|
||||||
- name: ⏻ Shut down backend container and dependencies
|
|
||||||
run: |
|
|
||||||
docker compose -f .github/resources/docker-compose.be-test.yml down
|
|
||||||
- name: 🏗️ Build backend and push
|
|
||||||
uses: depot/build-push-action@v1
|
|
||||||
with:
|
|
||||||
project: 64mmf0n610
|
|
||||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
|
||||||
push: true
|
|
||||||
context: backend
|
|
||||||
tags: |
|
|
||||||
infisical/backend:${{ steps.commit.outputs.short }}
|
|
||||||
infisical/backend:latest
|
|
||||||
infisical/backend:${{ steps.extract_version.outputs.version }}
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
|
|
||||||
frontend-image:
|
|
||||||
name: Build frontend image
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Extract version from tag
|
|
||||||
id: extract_version
|
|
||||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
|
||||||
- name: ☁️ Checkout source
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
- name: Save commit hashes for tag
|
|
||||||
id: commit
|
|
||||||
uses: pr-mpt/actions-commit-hash@v2
|
|
||||||
- name: 🔧 Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
- name: 🐋 Login to Docker Hub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
- name: Set up Depot CLI
|
|
||||||
uses: depot/setup-action@v1
|
|
||||||
- name: 📦 Build frontend and export to Docker
|
|
||||||
uses: depot/build-push-action@v1
|
|
||||||
with:
|
|
||||||
load: true
|
|
||||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
|
||||||
project: 64mmf0n610
|
|
||||||
context: frontend
|
|
||||||
tags: infisical/frontend:test
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
build-args: |
|
|
||||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
|
||||||
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
|
||||||
- name: ⏻ Spawn frontend container
|
|
||||||
run: |
|
|
||||||
docker run -d --rm --name infisical-frontend-test infisical/frontend:test
|
|
||||||
- name: 🧪 Test frontend image
|
|
||||||
run: |
|
|
||||||
./.github/resources/healthcheck.sh infisical-frontend-test
|
|
||||||
- name: ⏻ Shut down frontend container
|
|
||||||
run: |
|
|
||||||
docker stop infisical-frontend-test
|
|
||||||
- name: 🏗️ Build frontend and push
|
|
||||||
uses: depot/build-push-action@v1
|
|
||||||
with:
|
|
||||||
project: 64mmf0n610
|
|
||||||
push: true
|
|
||||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
|
||||||
context: frontend
|
|
||||||
tags: |
|
|
||||||
infisical/frontend:${{ steps.commit.outputs.short }}
|
|
||||||
infisical/frontend:latest
|
|
||||||
infisical/frontend:${{ steps.extract_version.outputs.version }}
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
build-args: |
|
|
||||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
|
||||||
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
|
82
.github/workflows/nightly-tag-generation.yml
vendored
Normal file
82
.github/workflows/nightly-tag-generation.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
name: Generate Nightly Tag
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *' # Run daily at midnight UTC
|
||||||
|
workflow_dispatch: # Allow manual triggering for testing
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
create-nightly-tag:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Fetch all history for tags
|
||||||
|
token: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Configure Git
|
||||||
|
run: |
|
||||||
|
git config user.name "github-actions[bot]"
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
|
||||||
|
- name: Generate nightly tag
|
||||||
|
run: |
|
||||||
|
# Get the latest infisical production tag
|
||||||
|
LATEST_STABLE_TAG=$(git tag --list | grep "^v[0-9].*$" | grep -v "nightly" | sort -V | tail -n1)
|
||||||
|
|
||||||
|
if [ -z "$LATEST_STABLE_TAG" ]; then
|
||||||
|
echo "No infisical production tags found, using v0.1.0"
|
||||||
|
LATEST_STABLE_TAG="v0.1.0"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Latest production tag: $LATEST_STABLE_TAG"
|
||||||
|
|
||||||
|
# Get current date in YYYYMMDD format
|
||||||
|
DATE=$(date +%Y%m%d)
|
||||||
|
|
||||||
|
# Base nightly tag name
|
||||||
|
BASE_TAG="${LATEST_STABLE_TAG}-nightly-${DATE}"
|
||||||
|
|
||||||
|
# Check if this exact tag already exists
|
||||||
|
if git tag --list | grep -q "^${BASE_TAG}$"; then
|
||||||
|
echo "Base tag ${BASE_TAG} already exists, finding next increment"
|
||||||
|
|
||||||
|
# Find existing tags for this date and get the highest increment
|
||||||
|
EXISTING_TAGS=$(git tag --list | grep "^${BASE_TAG}" | grep -E '\.[0-9]+$' || true)
|
||||||
|
|
||||||
|
if [ -z "$EXISTING_TAGS" ]; then
|
||||||
|
# No incremental tags exist, create .1
|
||||||
|
NIGHTLY_TAG="${BASE_TAG}.1"
|
||||||
|
else
|
||||||
|
# Find the highest increment
|
||||||
|
HIGHEST_INCREMENT=$(echo "$EXISTING_TAGS" | sed "s|^${BASE_TAG}\.||" | sort -n | tail -n1)
|
||||||
|
NEXT_INCREMENT=$((HIGHEST_INCREMENT + 1))
|
||||||
|
NIGHTLY_TAG="${BASE_TAG}.${NEXT_INCREMENT}"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Base tag doesn't exist, use it
|
||||||
|
NIGHTLY_TAG="$BASE_TAG"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Generated nightly tag: $NIGHTLY_TAG"
|
||||||
|
echo "NIGHTLY_TAG=$NIGHTLY_TAG" >> $GITHUB_ENV
|
||||||
|
echo "LATEST_PRODUCTION_TAG=$LATEST_STABLE_TAG" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
git tag "$NIGHTLY_TAG"
|
||||||
|
git push origin "$NIGHTLY_TAG"
|
||||||
|
echo "✅ Created and pushed nightly tag: $NIGHTLY_TAG"
|
||||||
|
|
||||||
|
- name: Create GitHub Release
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
|
with:
|
||||||
|
tag_name: ${{ env.NIGHTLY_TAG }}
|
||||||
|
name: ${{ env.NIGHTLY_TAG }}
|
||||||
|
draft: false
|
||||||
|
prerelease: true
|
||||||
|
generate_release_notes: true
|
||||||
|
make_latest: false
|
@@ -2,7 +2,9 @@ name: Release standalone docker image
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- "infisical/v*.*.*-postgres"
|
- "v*.*.*"
|
||||||
|
- "v*.*.*-nightly-*"
|
||||||
|
- "v*.*.*-nightly-*.*"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
infisical-tests:
|
infisical-tests:
|
||||||
@@ -17,7 +19,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Extract version from tag
|
- name: Extract version from tag
|
||||||
id: extract_version
|
id: extract_version
|
||||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
run: echo "::set-output name=version::${GITHUB_REF_NAME}"
|
||||||
- name: ☁️ Checkout source
|
- name: ☁️ Checkout source
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
@@ -53,7 +55,7 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
context: .
|
context: .
|
||||||
tags: |
|
tags: |
|
||||||
infisical/infisical:latest-postgres
|
infisical/infisical:latest
|
||||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
@@ -69,7 +71,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Extract version from tag
|
- name: Extract version from tag
|
||||||
id: extract_version
|
id: extract_version
|
||||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
run: echo "::set-output name=version::${GITHUB_REF_NAME}"
|
||||||
- name: ☁️ Checkout source
|
- name: ☁️ Checkout source
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
@@ -105,7 +107,7 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
context: .
|
context: .
|
||||||
tags: |
|
tags: |
|
||||||
infisical/infisical-fips:latest-postgres
|
infisical/infisical-fips:latest
|
||||||
infisical/infisical-fips:${{ steps.commit.outputs.short }}
|
infisical/infisical-fips:${{ steps.commit.outputs.short }}
|
||||||
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
|
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
@@ -44,10 +44,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Generate Helm Chart
|
- name: Generate Helm Chart
|
||||||
working-directory: k8-operator
|
working-directory: k8-operator
|
||||||
run: make helm
|
run: make helm VERSION=${{ steps.extract_version.outputs.version }}
|
||||||
|
|
||||||
- name: Update Helm Chart Version
|
|
||||||
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
|
|
||||||
|
|
||||||
- name: Debug - Check file changes
|
- name: Debug - Check file changes
|
||||||
run: |
|
run: |
|
||||||
|
15
.github/workflows/run-backend-tests.yml
vendored
15
.github/workflows/run-backend-tests.yml
vendored
@@ -16,6 +16,16 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 15
|
timeout-minutes: 15
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
|
|
||||||
|
- name: Free up disk space
|
||||||
|
run: |
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
sudo rm -rf /opt/ghc
|
||||||
|
sudo rm -rf "/usr/local/share/boost"
|
||||||
|
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||||
|
docker system prune -af
|
||||||
|
|
||||||
- name: ☁️ Checkout source
|
- name: ☁️ Checkout source
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||||
@@ -34,6 +44,8 @@ jobs:
|
|||||||
working-directory: backend
|
working-directory: backend
|
||||||
- name: Start postgres and redis
|
- name: Start postgres and redis
|
||||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||||
|
- name: Start Secret Rotation testing databases
|
||||||
|
run: docker compose -f docker-compose.e2e-dbs.yml up -d --wait --wait-timeout 300
|
||||||
- name: Run unit test
|
- name: Run unit test
|
||||||
run: npm run test:unit
|
run: npm run test:unit
|
||||||
working-directory: backend
|
working-directory: backend
|
||||||
@@ -41,6 +53,9 @@ jobs:
|
|||||||
run: npm run test:e2e
|
run: npm run test:e2e
|
||||||
working-directory: backend
|
working-directory: backend
|
||||||
env:
|
env:
|
||||||
|
E2E_TEST_ORACLE_DB_19_HOST: ${{ secrets.E2E_TEST_ORACLE_DB_19_HOST }}
|
||||||
|
E2E_TEST_ORACLE_DB_19_USERNAME: ${{ secrets.E2E_TEST_ORACLE_DB_19_USERNAME }}
|
||||||
|
E2E_TEST_ORACLE_DB_19_PASSWORD: ${{ secrets.E2E_TEST_ORACLE_DB_19_PASSWORD }}
|
||||||
REDIS_URL: redis://172.17.0.1:6379
|
REDIS_URL: redis://172.17.0.1:6379
|
||||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||||
AUTH_SECRET: something-random
|
AUTH_SECRET: something-random
|
||||||
|
@@ -50,3 +50,4 @@ docs/integrations/app-connections/zabbix.mdx:generic-api-key:91
|
|||||||
docs/integrations/app-connections/bitbucket.mdx:generic-api-key:123
|
docs/integrations/app-connections/bitbucket.mdx:generic-api-key:123
|
||||||
docs/integrations/app-connections/railway.mdx:generic-api-key:156
|
docs/integrations/app-connections/railway.mdx:generic-api-key:156
|
||||||
.github/workflows/validate-db-schemas.yml:generic-api-key:21
|
.github/workflows/validate-db-schemas.yml:generic-api-key:21
|
||||||
|
k8-operator/config/samples/universalAuthIdentitySecret.yaml:generic-api-key:8
|
||||||
|
@@ -1,34 +0,0 @@
|
|||||||
import { TQueueServiceFactory } from "@app/queue";
|
|
||||||
|
|
||||||
export const mockQueue = (): TQueueServiceFactory => {
|
|
||||||
const queues: Record<string, unknown> = {};
|
|
||||||
const workers: Record<string, unknown> = {};
|
|
||||||
const job: Record<string, unknown> = {};
|
|
||||||
const events: Record<string, unknown> = {};
|
|
||||||
|
|
||||||
return {
|
|
||||||
queue: async (name, jobData) => {
|
|
||||||
job[name] = jobData;
|
|
||||||
},
|
|
||||||
queuePg: async () => {},
|
|
||||||
schedulePg: async () => {},
|
|
||||||
initialize: async () => {},
|
|
||||||
shutdown: async () => undefined,
|
|
||||||
stopRepeatableJob: async () => true,
|
|
||||||
start: (name, jobFn) => {
|
|
||||||
queues[name] = jobFn;
|
|
||||||
workers[name] = jobFn;
|
|
||||||
},
|
|
||||||
startPg: async () => {},
|
|
||||||
listen: (name, event) => {
|
|
||||||
events[name] = event;
|
|
||||||
},
|
|
||||||
getRepeatableJobs: async () => [],
|
|
||||||
getDelayedJobs: async () => [],
|
|
||||||
clearQueue: async () => {},
|
|
||||||
stopJobById: async () => {},
|
|
||||||
stopJobByIdPg: async () => {},
|
|
||||||
stopRepeatableJobByJobId: async () => true,
|
|
||||||
stopRepeatableJobByKey: async () => true
|
|
||||||
};
|
|
||||||
};
|
|
726
backend/e2e-test/routes/v3/secret-rotations.spec.ts
Normal file
726
backend/e2e-test/routes/v3/secret-rotations.spec.ts
Normal file
@@ -0,0 +1,726 @@
|
|||||||
|
/* eslint-disable no-promise-executor-return */
|
||||||
|
/* eslint-disable no-await-in-loop */
|
||||||
|
import knex from "knex";
|
||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
|
||||||
|
import { seedData1 } from "@app/db/seed-data";
|
||||||
|
|
||||||
|
enum SecretRotationType {
|
||||||
|
OracleDb = "oracledb",
|
||||||
|
MySQL = "mysql",
|
||||||
|
Postgres = "postgres"
|
||||||
|
}
|
||||||
|
|
||||||
|
type TGenericSqlCredentials = {
|
||||||
|
host: string;
|
||||||
|
port: number;
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
database: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type TSecretMapping = {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type TDatabaseUserCredentials = {
|
||||||
|
username: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const formatSqlUsername = (username: string) => `${username}_${uuidv4().slice(0, 8).replace(/-/g, "").toUpperCase()}`;
|
||||||
|
|
||||||
|
const getSecretValue = async (secretKey: string) => {
|
||||||
|
const passwordSecret = await testServer.inject({
|
||||||
|
url: `/api/v3/secrets/raw/${secretKey}`,
|
||||||
|
method: "GET",
|
||||||
|
query: {
|
||||||
|
workspaceId: seedData1.projectV3.id,
|
||||||
|
environment: seedData1.environment.slug
|
||||||
|
},
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${jwtAuthToken}`
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(passwordSecret.statusCode).toBe(200);
|
||||||
|
expect(passwordSecret.json().secret).toBeDefined();
|
||||||
|
|
||||||
|
const passwordSecretJson = JSON.parse(passwordSecret.payload);
|
||||||
|
|
||||||
|
return passwordSecretJson.secret.secretValue as string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const deleteSecretRotation = async (id: string, type: SecretRotationType) => {
|
||||||
|
const res = await testServer.inject({
|
||||||
|
method: "DELETE",
|
||||||
|
query: {
|
||||||
|
deleteSecrets: "true",
|
||||||
|
revokeGeneratedCredentials: "true"
|
||||||
|
},
|
||||||
|
url: `/api/v2/secret-rotations/${type}-credentials/${id}`,
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${jwtAuthToken}`
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(res.statusCode).toBe(200);
|
||||||
|
};
|
||||||
|
|
||||||
|
const deleteAppConnection = async (id: string, type: SecretRotationType) => {
|
||||||
|
const res = await testServer.inject({
|
||||||
|
method: "DELETE",
|
||||||
|
url: `/api/v1/app-connections/${type}/${id}`,
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${jwtAuthToken}`
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(res.statusCode).toBe(200);
|
||||||
|
};
|
||||||
|
|
||||||
|
const createOracleDBAppConnection = async (credentials: TGenericSqlCredentials) => {
|
||||||
|
const createOracleDBAppConnectionReqBody = {
|
||||||
|
credentials: {
|
||||||
|
database: credentials.database,
|
||||||
|
host: credentials.host,
|
||||||
|
username: credentials.username,
|
||||||
|
password: credentials.password,
|
||||||
|
port: credentials.port,
|
||||||
|
sslEnabled: true,
|
||||||
|
sslRejectUnauthorized: true
|
||||||
|
},
|
||||||
|
name: `oracle-db-${uuidv4()}`,
|
||||||
|
description: "Test OracleDB App Connection",
|
||||||
|
gatewayId: null,
|
||||||
|
isPlatformManagedCredentials: false,
|
||||||
|
method: "username-and-password"
|
||||||
|
};
|
||||||
|
|
||||||
|
const res = await testServer.inject({
|
||||||
|
method: "POST",
|
||||||
|
url: `/api/v1/app-connections/oracledb`,
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${jwtAuthToken}`
|
||||||
|
},
|
||||||
|
body: createOracleDBAppConnectionReqBody
|
||||||
|
});
|
||||||
|
|
||||||
|
const json = JSON.parse(res.payload);
|
||||||
|
|
||||||
|
expect(res.statusCode).toBe(200);
|
||||||
|
expect(json.appConnection).toBeDefined();
|
||||||
|
|
||||||
|
return json.appConnection.id as string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const createMySQLAppConnection = async (credentials: TGenericSqlCredentials) => {
|
||||||
|
const createMySQLAppConnectionReqBody = {
|
||||||
|
name: `mysql-test-${uuidv4()}`,
|
||||||
|
description: "test-mysql",
|
||||||
|
gatewayId: null,
|
||||||
|
method: "username-and-password",
|
||||||
|
credentials: {
|
||||||
|
host: credentials.host,
|
||||||
|
port: credentials.port,
|
||||||
|
database: credentials.database,
|
||||||
|
username: credentials.username,
|
||||||
|
password: credentials.password,
|
||||||
|
sslEnabled: false,
|
||||||
|
sslRejectUnauthorized: true
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const res = await testServer.inject({
|
||||||
|
method: "POST",
|
||||||
|
url: `/api/v1/app-connections/mysql`,
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${jwtAuthToken}`
|
||||||
|
},
|
||||||
|
body: createMySQLAppConnectionReqBody
|
||||||
|
});
|
||||||
|
|
||||||
|
const json = JSON.parse(res.payload);
|
||||||
|
|
||||||
|
expect(res.statusCode).toBe(200);
|
||||||
|
expect(json.appConnection).toBeDefined();
|
||||||
|
|
||||||
|
return json.appConnection.id as string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const createPostgresAppConnection = async (credentials: TGenericSqlCredentials) => {
|
||||||
|
const createPostgresAppConnectionReqBody = {
|
||||||
|
credentials: {
|
||||||
|
host: credentials.host,
|
||||||
|
port: credentials.port,
|
||||||
|
database: credentials.database,
|
||||||
|
username: credentials.username,
|
||||||
|
password: credentials.password,
|
||||||
|
sslEnabled: false,
|
||||||
|
sslRejectUnauthorized: true
|
||||||
|
},
|
||||||
|
name: `postgres-test-${uuidv4()}`,
|
||||||
|
description: "test-postgres",
|
||||||
|
gatewayId: null,
|
||||||
|
method: "username-and-password"
|
||||||
|
};
|
||||||
|
|
||||||
|
const res = await testServer.inject({
|
||||||
|
method: "POST",
|
||||||
|
url: `/api/v1/app-connections/postgres`,
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${jwtAuthToken}`
|
||||||
|
},
|
||||||
|
body: createPostgresAppConnectionReqBody
|
||||||
|
});
|
||||||
|
|
||||||
|
const json = JSON.parse(res.payload);
|
||||||
|
|
||||||
|
expect(res.statusCode).toBe(200);
|
||||||
|
expect(json.appConnection).toBeDefined();
|
||||||
|
|
||||||
|
return json.appConnection.id as string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const createOracleInfisicalUsers = async (
|
||||||
|
credentials: TGenericSqlCredentials,
|
||||||
|
userCredentials: TDatabaseUserCredentials[]
|
||||||
|
) => {
|
||||||
|
const client = knex({
|
||||||
|
client: "oracledb",
|
||||||
|
connection: {
|
||||||
|
database: credentials.database,
|
||||||
|
port: credentials.port,
|
||||||
|
host: credentials.host,
|
||||||
|
user: credentials.username,
|
||||||
|
password: credentials.password,
|
||||||
|
connectionTimeoutMillis: 10000,
|
||||||
|
ssl: {
|
||||||
|
// @ts-expect-error - this is a valid property for the ssl object
|
||||||
|
sslServerDNMatch: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
for await (const { username } of userCredentials) {
|
||||||
|
// check if user exists, and if it does, don't create it
|
||||||
|
const existingUser = await client.raw(`SELECT * FROM all_users WHERE username = '${username}'`);
|
||||||
|
|
||||||
|
if (!existingUser.length) {
|
||||||
|
await client.raw(`CREATE USER ${username} IDENTIFIED BY "temporary_password"`);
|
||||||
|
}
|
||||||
|
await client.raw(`GRANT ALL PRIVILEGES TO ${username} WITH ADMIN OPTION`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.destroy();
|
||||||
|
};
|
||||||
|
|
||||||
|
const createMySQLInfisicalUsers = async (
|
||||||
|
credentials: TGenericSqlCredentials,
|
||||||
|
userCredentials: TDatabaseUserCredentials[]
|
||||||
|
) => {
|
||||||
|
const client = knex({
|
||||||
|
client: "mysql2",
|
||||||
|
connection: {
|
||||||
|
database: credentials.database,
|
||||||
|
port: credentials.port,
|
||||||
|
host: credentials.host,
|
||||||
|
user: credentials.username,
|
||||||
|
password: credentials.password,
|
||||||
|
connectionTimeoutMillis: 10000
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fix: Ensure root has GRANT OPTION privileges
|
||||||
|
try {
|
||||||
|
await client.raw("GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' WITH GRANT OPTION;");
|
||||||
|
await client.raw("FLUSH PRIVILEGES;");
|
||||||
|
} catch (error) {
|
||||||
|
// Ignore if already has privileges
|
||||||
|
}
|
||||||
|
|
||||||
|
for await (const { username } of userCredentials) {
|
||||||
|
// check if user exists, and if it does, dont create it
|
||||||
|
|
||||||
|
const existingUser = await client.raw(`SELECT * FROM mysql.user WHERE user = '${username}'`);
|
||||||
|
|
||||||
|
if (!existingUser[0].length) {
|
||||||
|
await client.raw(`CREATE USER '${username}'@'%' IDENTIFIED BY 'temporary_password';`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.raw(`GRANT ALL PRIVILEGES ON \`${credentials.database}\`.* TO '${username}'@'%';`);
|
||||||
|
await client.raw("FLUSH PRIVILEGES;");
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.destroy();
|
||||||
|
};
|
||||||
|
|
||||||
|
const createPostgresInfisicalUsers = async (
|
||||||
|
credentials: TGenericSqlCredentials,
|
||||||
|
userCredentials: TDatabaseUserCredentials[]
|
||||||
|
) => {
|
||||||
|
const client = knex({
|
||||||
|
client: "pg",
|
||||||
|
connection: {
|
||||||
|
database: credentials.database,
|
||||||
|
port: credentials.port,
|
||||||
|
host: credentials.host,
|
||||||
|
user: credentials.username,
|
||||||
|
password: credentials.password,
|
||||||
|
connectionTimeoutMillis: 10000
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
for await (const { username } of userCredentials) {
|
||||||
|
// check if user exists, and if it does, don't create it
|
||||||
|
const existingUser = await client.raw("SELECT * FROM pg_catalog.pg_user WHERE usename = ?", [username]);
|
||||||
|
|
||||||
|
if (!existingUser.rows.length) {
|
||||||
|
await client.raw(`CREATE USER "${username}" WITH PASSWORD 'temporary_password'`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.raw("GRANT ALL PRIVILEGES ON DATABASE ?? TO ??", [credentials.database, username]);
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.destroy();
|
||||||
|
};
|
||||||
|
|
||||||
|
const createOracleDBSecretRotation = async (
|
||||||
|
appConnectionId: string,
|
||||||
|
credentials: TGenericSqlCredentials,
|
||||||
|
userCredentials: TDatabaseUserCredentials[],
|
||||||
|
secretMapping: TSecretMapping
|
||||||
|
) => {
|
||||||
|
const now = new Date();
|
||||||
|
const rotationTime = new Date(now.getTime() - 2 * 60 * 1000); // 2 minutes ago
|
||||||
|
|
||||||
|
await createOracleInfisicalUsers(credentials, userCredentials);
|
||||||
|
|
||||||
|
const createOracleDBSecretRotationReqBody = {
|
||||||
|
parameters: userCredentials.reduce(
|
||||||
|
(acc, user, index) => {
|
||||||
|
acc[`username${index + 1}`] = user.username;
|
||||||
|
return acc;
|
||||||
|
},
|
||||||
|
{} as Record<string, string>
|
||||||
|
),
|
||||||
|
secretsMapping: {
|
||||||
|
username: secretMapping.username,
|
||||||
|
password: secretMapping.password
|
||||||
|
},
|
||||||
|
name: `test-oracle-${uuidv4()}`,
|
||||||
|
description: "Test OracleDB Secret Rotation",
|
||||||
|
secretPath: "/",
|
||||||
|
isAutoRotationEnabled: true,
|
||||||
|
rotationInterval: 5, // 5 seconds for testing
|
||||||
|
rotateAtUtc: {
|
||||||
|
hours: rotationTime.getUTCHours(),
|
||||||
|
minutes: rotationTime.getUTCMinutes()
|
||||||
|
},
|
||||||
|
connectionId: appConnectionId,
|
||||||
|
environment: seedData1.environment.slug,
|
||||||
|
projectId: seedData1.projectV3.id
|
||||||
|
};
|
||||||
|
|
||||||
|
const res = await testServer.inject({
|
||||||
|
method: "POST",
|
||||||
|
url: `/api/v2/secret-rotations/oracledb-credentials`,
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${jwtAuthToken}`
|
||||||
|
},
|
||||||
|
body: createOracleDBSecretRotationReqBody
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(res.statusCode).toBe(200);
|
||||||
|
expect(res.json().secretRotation).toBeDefined();
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
const createMySQLSecretRotation = async (
|
||||||
|
appConnectionId: string,
|
||||||
|
credentials: TGenericSqlCredentials,
|
||||||
|
userCredentials: TDatabaseUserCredentials[],
|
||||||
|
secretMapping: TSecretMapping
|
||||||
|
) => {
|
||||||
|
const now = new Date();
|
||||||
|
const rotationTime = new Date(now.getTime() - 2 * 60 * 1000); // 2 minutes ago
|
||||||
|
|
||||||
|
await createMySQLInfisicalUsers(credentials, userCredentials);
|
||||||
|
|
||||||
|
const createMySQLSecretRotationReqBody = {
|
||||||
|
parameters: userCredentials.reduce(
|
||||||
|
(acc, user, index) => {
|
||||||
|
acc[`username${index + 1}`] = user.username;
|
||||||
|
return acc;
|
||||||
|
},
|
||||||
|
{} as Record<string, string>
|
||||||
|
),
|
||||||
|
secretsMapping: {
|
||||||
|
username: secretMapping.username,
|
||||||
|
password: secretMapping.password
|
||||||
|
},
|
||||||
|
name: `test-mysql-rotation-${uuidv4()}`,
|
||||||
|
description: "Test MySQL Secret Rotation",
|
||||||
|
secretPath: "/",
|
||||||
|
isAutoRotationEnabled: true,
|
||||||
|
rotationInterval: 5,
|
||||||
|
rotateAtUtc: {
|
||||||
|
hours: rotationTime.getUTCHours(),
|
||||||
|
minutes: rotationTime.getUTCMinutes()
|
||||||
|
},
|
||||||
|
connectionId: appConnectionId,
|
||||||
|
environment: seedData1.environment.slug,
|
||||||
|
projectId: seedData1.projectV3.id
|
||||||
|
};
|
||||||
|
|
||||||
|
const res = await testServer.inject({
|
||||||
|
method: "POST",
|
||||||
|
url: `/api/v2/secret-rotations/mysql-credentials`,
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${jwtAuthToken}`
|
||||||
|
},
|
||||||
|
body: createMySQLSecretRotationReqBody
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(res.statusCode).toBe(200);
|
||||||
|
expect(res.json().secretRotation).toBeDefined();
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
const createPostgresSecretRotation = async (
|
||||||
|
appConnectionId: string,
|
||||||
|
credentials: TGenericSqlCredentials,
|
||||||
|
userCredentials: TDatabaseUserCredentials[],
|
||||||
|
secretMapping: TSecretMapping
|
||||||
|
) => {
|
||||||
|
const now = new Date();
|
||||||
|
const rotationTime = new Date(now.getTime() - 2 * 60 * 1000); // 2 minutes ago
|
||||||
|
|
||||||
|
await createPostgresInfisicalUsers(credentials, userCredentials);
|
||||||
|
|
||||||
|
const createPostgresSecretRotationReqBody = {
|
||||||
|
parameters: userCredentials.reduce(
|
||||||
|
(acc, user, index) => {
|
||||||
|
acc[`username${index + 1}`] = user.username;
|
||||||
|
return acc;
|
||||||
|
},
|
||||||
|
{} as Record<string, string>
|
||||||
|
),
|
||||||
|
secretsMapping: {
|
||||||
|
username: secretMapping.username,
|
||||||
|
password: secretMapping.password
|
||||||
|
},
|
||||||
|
name: `test-postgres-rotation-${uuidv4()}`,
|
||||||
|
description: "Test Postgres Secret Rotation",
|
||||||
|
secretPath: "/",
|
||||||
|
isAutoRotationEnabled: true,
|
||||||
|
rotationInterval: 5,
|
||||||
|
rotateAtUtc: {
|
||||||
|
hours: rotationTime.getUTCHours(),
|
||||||
|
minutes: rotationTime.getUTCMinutes()
|
||||||
|
},
|
||||||
|
connectionId: appConnectionId,
|
||||||
|
environment: seedData1.environment.slug,
|
||||||
|
projectId: seedData1.projectV3.id
|
||||||
|
};
|
||||||
|
|
||||||
|
const res = await testServer.inject({
|
||||||
|
method: "POST",
|
||||||
|
url: `/api/v2/secret-rotations/postgres-credentials`,
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${jwtAuthToken}`
|
||||||
|
},
|
||||||
|
body: createPostgresSecretRotationReqBody
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(res.statusCode).toBe(200);
|
||||||
|
expect(res.json().secretRotation).toBeDefined();
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
describe("Secret Rotations", async () => {
|
||||||
|
const testCases = [
|
||||||
|
{
|
||||||
|
type: SecretRotationType.MySQL,
|
||||||
|
name: "MySQL (8.4.6) Secret Rotation",
|
||||||
|
dbCredentials: {
|
||||||
|
database: "mysql-test",
|
||||||
|
host: "127.0.0.1",
|
||||||
|
username: "root",
|
||||||
|
password: "mysql-test",
|
||||||
|
port: 3306
|
||||||
|
},
|
||||||
|
secretMapping: {
|
||||||
|
username: formatSqlUsername("MYSQL_USERNAME"),
|
||||||
|
password: formatSqlUsername("MYSQL_PASSWORD")
|
||||||
|
},
|
||||||
|
userCredentials: [
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("MYSQL_USER_1")
|
||||||
|
},
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("MYSQL_USER_2")
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: SecretRotationType.MySQL,
|
||||||
|
name: "MySQL (8.0.29) Secret Rotation",
|
||||||
|
dbCredentials: {
|
||||||
|
database: "mysql-test",
|
||||||
|
host: "127.0.0.1",
|
||||||
|
username: "root",
|
||||||
|
password: "mysql-test",
|
||||||
|
port: 3307
|
||||||
|
},
|
||||||
|
secretMapping: {
|
||||||
|
username: formatSqlUsername("MYSQL_USERNAME"),
|
||||||
|
password: formatSqlUsername("MYSQL_PASSWORD")
|
||||||
|
},
|
||||||
|
userCredentials: [
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("MYSQL_USER_1")
|
||||||
|
},
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("MYSQL_USER_2")
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: SecretRotationType.MySQL,
|
||||||
|
name: "MySQL (5.7.31) Secret Rotation",
|
||||||
|
dbCredentials: {
|
||||||
|
database: "mysql-test",
|
||||||
|
host: "127.0.0.1",
|
||||||
|
username: "root",
|
||||||
|
password: "mysql-test",
|
||||||
|
port: 3308
|
||||||
|
},
|
||||||
|
secretMapping: {
|
||||||
|
username: formatSqlUsername("MYSQL_USERNAME"),
|
||||||
|
password: formatSqlUsername("MYSQL_PASSWORD")
|
||||||
|
},
|
||||||
|
userCredentials: [
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("MYSQL_USER_1")
|
||||||
|
},
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("MYSQL_USER_2")
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: SecretRotationType.OracleDb,
|
||||||
|
name: "OracleDB (23.8) Secret Rotation",
|
||||||
|
dbCredentials: {
|
||||||
|
database: "FREEPDB1",
|
||||||
|
host: "127.0.0.1",
|
||||||
|
username: "system",
|
||||||
|
password: "pdb-password",
|
||||||
|
port: 1521
|
||||||
|
},
|
||||||
|
secretMapping: {
|
||||||
|
username: formatSqlUsername("ORACLEDB_USERNAME"),
|
||||||
|
password: formatSqlUsername("ORACLEDB_PASSWORD")
|
||||||
|
},
|
||||||
|
userCredentials: [
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("INFISICAL_USER_1")
|
||||||
|
},
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("INFISICAL_USER_2")
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: SecretRotationType.OracleDb,
|
||||||
|
name: "OracleDB (19.3) Secret Rotation",
|
||||||
|
skippable: true,
|
||||||
|
dbCredentials: {
|
||||||
|
password: process.env.E2E_TEST_ORACLE_DB_19_PASSWORD!,
|
||||||
|
host: process.env.E2E_TEST_ORACLE_DB_19_HOST!,
|
||||||
|
username: process.env.E2E_TEST_ORACLE_DB_19_USERNAME!,
|
||||||
|
port: 1521,
|
||||||
|
database: "ORCLPDB1"
|
||||||
|
},
|
||||||
|
secretMapping: {
|
||||||
|
username: formatSqlUsername("ORACLEDB_USERNAME"),
|
||||||
|
password: formatSqlUsername("ORACLEDB_PASSWORD")
|
||||||
|
},
|
||||||
|
userCredentials: [
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("INFISICAL_USER_1")
|
||||||
|
},
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("INFISICAL_USER_2")
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: SecretRotationType.Postgres,
|
||||||
|
name: "Postgres (17) Secret Rotation",
|
||||||
|
dbCredentials: {
|
||||||
|
database: "postgres-test",
|
||||||
|
host: "127.0.0.1",
|
||||||
|
username: "postgres-test",
|
||||||
|
password: "postgres-test",
|
||||||
|
port: 5433
|
||||||
|
},
|
||||||
|
secretMapping: {
|
||||||
|
username: formatSqlUsername("POSTGRES_USERNAME"),
|
||||||
|
password: formatSqlUsername("POSTGRES_PASSWORD")
|
||||||
|
},
|
||||||
|
userCredentials: [
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("INFISICAL_USER_1")
|
||||||
|
},
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("INFISICAL_USER_2")
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: SecretRotationType.Postgres,
|
||||||
|
name: "Postgres (16) Secret Rotation",
|
||||||
|
dbCredentials: {
|
||||||
|
database: "postgres-test",
|
||||||
|
host: "127.0.0.1",
|
||||||
|
username: "postgres-test",
|
||||||
|
password: "postgres-test",
|
||||||
|
port: 5434
|
||||||
|
},
|
||||||
|
secretMapping: {
|
||||||
|
username: formatSqlUsername("POSTGRES_USERNAME"),
|
||||||
|
password: formatSqlUsername("POSTGRES_PASSWORD")
|
||||||
|
},
|
||||||
|
userCredentials: [
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("INFISICAL_USER_1")
|
||||||
|
},
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("INFISICAL_USER_2")
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: SecretRotationType.Postgres,
|
||||||
|
name: "Postgres (10.12) Secret Rotation",
|
||||||
|
dbCredentials: {
|
||||||
|
database: "postgres-test",
|
||||||
|
host: "127.0.0.1",
|
||||||
|
username: "postgres-test",
|
||||||
|
password: "postgres-test",
|
||||||
|
port: 5435
|
||||||
|
},
|
||||||
|
secretMapping: {
|
||||||
|
username: formatSqlUsername("POSTGRES_USERNAME"),
|
||||||
|
password: formatSqlUsername("POSTGRES_PASSWORD")
|
||||||
|
},
|
||||||
|
userCredentials: [
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("INFISICAL_USER_1")
|
||||||
|
},
|
||||||
|
{
|
||||||
|
username: formatSqlUsername("INFISICAL_USER_2")
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
] as {
|
||||||
|
skippable?: boolean;
|
||||||
|
type: SecretRotationType;
|
||||||
|
name: string;
|
||||||
|
dbCredentials: TGenericSqlCredentials;
|
||||||
|
secretMapping: TSecretMapping;
|
||||||
|
userCredentials: TDatabaseUserCredentials[];
|
||||||
|
}[];
|
||||||
|
|
||||||
|
const createAppConnectionMap = {
|
||||||
|
[SecretRotationType.OracleDb]: createOracleDBAppConnection,
|
||||||
|
[SecretRotationType.MySQL]: createMySQLAppConnection,
|
||||||
|
[SecretRotationType.Postgres]: createPostgresAppConnection
|
||||||
|
};
|
||||||
|
|
||||||
|
const createRotationMap = {
|
||||||
|
[SecretRotationType.OracleDb]: createOracleDBSecretRotation,
|
||||||
|
[SecretRotationType.MySQL]: createMySQLSecretRotation,
|
||||||
|
[SecretRotationType.Postgres]: createPostgresSecretRotation
|
||||||
|
};
|
||||||
|
|
||||||
|
const appConnectionIds: { id: string; type: SecretRotationType }[] = [];
|
||||||
|
const secretRotationIds: { id: string; type: SecretRotationType }[] = [];
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
for (const { id, type } of secretRotationIds) {
|
||||||
|
await deleteSecretRotation(id, type);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const { id, type } of appConnectionIds) {
|
||||||
|
await deleteAppConnection(id, type);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
testCases.forEach(({ skippable, dbCredentials, secretMapping, userCredentials, type, name }) => {
|
||||||
|
const shouldSkip = () => {
|
||||||
|
if (skippable) {
|
||||||
|
if (type === SecretRotationType.OracleDb) {
|
||||||
|
if (!process.env.E2E_TEST_ORACLE_DB_19_HOST) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (shouldSkip()) {
|
||||||
|
test.skip(`Skipping Secret Rotation for ${type} (${name}) because E2E_TEST_ORACLE_DB_19_HOST is not set`);
|
||||||
|
} else {
|
||||||
|
test.concurrent(
|
||||||
|
`Create secret rotation for ${name}`,
|
||||||
|
async () => {
|
||||||
|
const appConnectionId = await createAppConnectionMap[type](dbCredentials);
|
||||||
|
|
||||||
|
if (appConnectionId) {
|
||||||
|
appConnectionIds.push({ id: appConnectionId, type });
|
||||||
|
}
|
||||||
|
|
||||||
|
const res = await createRotationMap[type](appConnectionId, dbCredentials, userCredentials, secretMapping);
|
||||||
|
|
||||||
|
const resJson = JSON.parse(res.payload);
|
||||||
|
|
||||||
|
if (resJson.secretRotation) {
|
||||||
|
secretRotationIds.push({ id: resJson.secretRotation.id, type });
|
||||||
|
}
|
||||||
|
|
||||||
|
const startSecretValue = await getSecretValue(secretMapping.password);
|
||||||
|
expect(startSecretValue).toBeDefined();
|
||||||
|
|
||||||
|
let attempts = 0;
|
||||||
|
while (attempts < 60) {
|
||||||
|
const currentSecretValue = await getSecretValue(secretMapping.password);
|
||||||
|
|
||||||
|
if (currentSecretValue !== startSecretValue) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
attempts += 1;
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 2_500));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attempts >= 60) {
|
||||||
|
throw new Error("Secret rotation failed to rotate after 60 attempts");
|
||||||
|
}
|
||||||
|
|
||||||
|
const finalSecretValue = await getSecretValue(secretMapping.password);
|
||||||
|
expect(finalSecretValue).not.toBe(startSecretValue);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
timeout: 300_000
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
@@ -18,6 +18,7 @@ import { keyStoreFactory } from "@app/keystore/keystore";
|
|||||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||||
import { buildRedisFromConfig } from "@app/lib/config/redis";
|
import { buildRedisFromConfig } from "@app/lib/config/redis";
|
||||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||||
|
import { bootstrapCheck } from "@app/server/boot-strap-check";
|
||||||
|
|
||||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||||
export default {
|
export default {
|
||||||
@@ -63,6 +64,8 @@ export default {
|
|||||||
const queue = queueServiceFactory(envCfg, { dbConnectionUrl: envCfg.DB_CONNECTION_URI });
|
const queue = queueServiceFactory(envCfg, { dbConnectionUrl: envCfg.DB_CONNECTION_URI });
|
||||||
const keyStore = keyStoreFactory(envCfg);
|
const keyStore = keyStoreFactory(envCfg);
|
||||||
|
|
||||||
|
await queue.initialize();
|
||||||
|
|
||||||
const hsmModule = initializeHsmModule(envCfg);
|
const hsmModule = initializeHsmModule(envCfg);
|
||||||
hsmModule.initialize();
|
hsmModule.initialize();
|
||||||
|
|
||||||
@@ -78,9 +81,13 @@ export default {
|
|||||||
envConfig: envCfg
|
envConfig: envCfg
|
||||||
});
|
});
|
||||||
|
|
||||||
|
await bootstrapCheck({ db });
|
||||||
|
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
globalThis.testServer = server;
|
globalThis.testServer = server;
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
|
globalThis.testQueue = queue;
|
||||||
|
// @ts-expect-error type
|
||||||
globalThis.testSuperAdminDAL = superAdminDAL;
|
globalThis.testSuperAdminDAL = superAdminDAL;
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
globalThis.jwtAuthToken = crypto.jwt().sign(
|
globalThis.jwtAuthToken = crypto.jwt().sign(
|
||||||
@@ -105,6 +112,8 @@ export default {
|
|||||||
// custom setup
|
// custom setup
|
||||||
return {
|
return {
|
||||||
async teardown() {
|
async teardown() {
|
||||||
|
// @ts-expect-error type
|
||||||
|
await globalThis.testQueue.shutdown();
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
await globalThis.testServer.close();
|
await globalThis.testServer.close();
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
@@ -112,7 +121,9 @@ export default {
|
|||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
delete globalThis.testSuperAdminDAL;
|
delete globalThis.testSuperAdminDAL;
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
delete globalThis.jwtToken;
|
delete globalThis.jwtAuthToken;
|
||||||
|
// @ts-expect-error type
|
||||||
|
delete globalThis.testQueue;
|
||||||
// called after all tests with this env have been run
|
// called after all tests with this env have been run
|
||||||
await db.migrate.rollback(
|
await db.migrate.rollback(
|
||||||
{
|
{
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
import { Knex } from "knex";
|
import { Knex } from "knex";
|
||||||
|
|
||||||
import { chunkArray } from "@app/lib/fn";
|
import { chunkArray } from "@app/lib/fn";
|
||||||
import { logger } from "@app/lib/logger";
|
import { initLogger, logger } from "@app/lib/logger";
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
import { TableName } from "../schemas";
|
||||||
import { TReminders, TRemindersInsert } from "../schemas/reminders";
|
import { TReminders, TRemindersInsert } from "../schemas/reminders";
|
||||||
@@ -107,5 +107,6 @@ export async function up(knex: Knex): Promise<void> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function down(): Promise<void> {
|
export async function down(): Promise<void> {
|
||||||
|
initLogger();
|
||||||
logger.info("Rollback not implemented for secret reminders fix migration");
|
logger.info("Rollback not implemented for secret reminders fix migration");
|
||||||
}
|
}
|
||||||
|
@@ -0,0 +1,65 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const lastUserLoggedInAuthMethod = await knex.schema.hasColumn(TableName.OrgMembership, "lastLoginAuthMethod");
|
||||||
|
const lastIdentityLoggedInAuthMethod = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityOrgMembership,
|
||||||
|
"lastLoginAuthMethod"
|
||||||
|
);
|
||||||
|
const lastUserLoggedInTime = await knex.schema.hasColumn(TableName.OrgMembership, "lastLoginTime");
|
||||||
|
const lastIdentityLoggedInTime = await knex.schema.hasColumn(TableName.IdentityOrgMembership, "lastLoginTime");
|
||||||
|
if (!lastUserLoggedInAuthMethod || !lastUserLoggedInTime) {
|
||||||
|
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||||
|
if (!lastUserLoggedInAuthMethod) {
|
||||||
|
t.string("lastLoginAuthMethod").nullable();
|
||||||
|
}
|
||||||
|
if (!lastUserLoggedInTime) {
|
||||||
|
t.datetime("lastLoginTime").nullable();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!lastIdentityLoggedInAuthMethod || !lastIdentityLoggedInTime) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityOrgMembership, (t) => {
|
||||||
|
if (!lastIdentityLoggedInAuthMethod) {
|
||||||
|
t.string("lastLoginAuthMethod").nullable();
|
||||||
|
}
|
||||||
|
if (!lastIdentityLoggedInTime) {
|
||||||
|
t.datetime("lastLoginTime").nullable();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const lastUserLoggedInAuthMethod = await knex.schema.hasColumn(TableName.OrgMembership, "lastLoginAuthMethod");
|
||||||
|
const lastIdentityLoggedInAuthMethod = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityOrgMembership,
|
||||||
|
"lastLoginAuthMethod"
|
||||||
|
);
|
||||||
|
const lastUserLoggedInTime = await knex.schema.hasColumn(TableName.OrgMembership, "lastLoginTime");
|
||||||
|
const lastIdentityLoggedInTime = await knex.schema.hasColumn(TableName.IdentityOrgMembership, "lastLoginTime");
|
||||||
|
if (lastUserLoggedInAuthMethod || lastUserLoggedInTime) {
|
||||||
|
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||||
|
if (lastUserLoggedInAuthMethod) {
|
||||||
|
t.dropColumn("lastLoginAuthMethod");
|
||||||
|
}
|
||||||
|
if (lastUserLoggedInTime) {
|
||||||
|
t.dropColumn("lastLoginTime");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lastIdentityLoggedInAuthMethod || lastIdentityLoggedInTime) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityOrgMembership, (t) => {
|
||||||
|
if (lastIdentityLoggedInAuthMethod) {
|
||||||
|
t.dropColumn("lastLoginAuthMethod");
|
||||||
|
}
|
||||||
|
if (lastIdentityLoggedInTime) {
|
||||||
|
t.dropColumn("lastLoginTime");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,19 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas/models";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "maxTimePeriod"))) {
|
||||||
|
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||||
|
t.string("maxTimePeriod").nullable(); // Ex: 1h - Null is permanent
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "maxTimePeriod")) {
|
||||||
|
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||||
|
t.dropColumn("maxTimePeriod");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,38 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "@app/db/schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasEditNoteCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "editNote");
|
||||||
|
const hasEditedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "editedByUserId");
|
||||||
|
|
||||||
|
if (!hasEditNoteCol || !hasEditedByUserId) {
|
||||||
|
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||||
|
if (!hasEditedByUserId) {
|
||||||
|
t.uuid("editedByUserId").nullable();
|
||||||
|
t.foreign("editedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!hasEditNoteCol) {
|
||||||
|
t.string("editNote").nullable();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasEditNoteCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "editNote");
|
||||||
|
const hasEditedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "editedByUserId");
|
||||||
|
|
||||||
|
if (hasEditNoteCol || hasEditedByUserId) {
|
||||||
|
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||||
|
if (hasEditedByUserId) {
|
||||||
|
t.dropColumn("editedByUserId");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasEditNoteCol) {
|
||||||
|
t.dropColumn("editNote");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@@ -17,7 +17,8 @@ export const AccessApprovalPoliciesSchema = z.object({
|
|||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
enforcementLevel: z.string().default("hard"),
|
enforcementLevel: z.string().default("hard"),
|
||||||
deletedAt: z.date().nullable().optional(),
|
deletedAt: z.date().nullable().optional(),
|
||||||
allowedSelfApprovals: z.boolean().default(true)
|
allowedSelfApprovals: z.boolean().default(true),
|
||||||
|
maxTimePeriod: z.string().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;
|
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;
|
||||||
|
@@ -20,7 +20,9 @@ export const AccessApprovalRequestsSchema = z.object({
|
|||||||
requestedByUserId: z.string().uuid(),
|
requestedByUserId: z.string().uuid(),
|
||||||
note: z.string().nullable().optional(),
|
note: z.string().nullable().optional(),
|
||||||
privilegeDeletedAt: z.date().nullable().optional(),
|
privilegeDeletedAt: z.date().nullable().optional(),
|
||||||
status: z.string().default("pending")
|
status: z.string().default("pending"),
|
||||||
|
editedByUserId: z.string().uuid().nullable().optional(),
|
||||||
|
editNote: z.string().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;
|
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;
|
||||||
|
@@ -14,7 +14,9 @@ export const IdentityOrgMembershipsSchema = z.object({
|
|||||||
orgId: z.string().uuid(),
|
orgId: z.string().uuid(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
identityId: z.string().uuid()
|
identityId: z.string().uuid(),
|
||||||
|
lastLoginAuthMethod: z.string().nullable().optional(),
|
||||||
|
lastLoginTime: z.date().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TIdentityOrgMemberships = z.infer<typeof IdentityOrgMembershipsSchema>;
|
export type TIdentityOrgMemberships = z.infer<typeof IdentityOrgMembershipsSchema>;
|
||||||
|
@@ -19,7 +19,9 @@ export const OrgMembershipsSchema = z.object({
|
|||||||
roleId: z.string().uuid().nullable().optional(),
|
roleId: z.string().uuid().nullable().optional(),
|
||||||
projectFavorites: z.string().array().nullable().optional(),
|
projectFavorites: z.string().array().nullable().optional(),
|
||||||
isActive: z.boolean().default(true),
|
isActive: z.boolean().default(true),
|
||||||
lastInvitedAt: z.date().nullable().optional()
|
lastInvitedAt: z.date().nullable().optional(),
|
||||||
|
lastLoginAuthMethod: z.string().nullable().optional(),
|
||||||
|
lastLoginTime: z.date().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;
|
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;
|
||||||
|
@@ -3,12 +3,32 @@ import { z } from "zod";
|
|||||||
|
|
||||||
import { ApproverType, BypasserType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
import { ApproverType, BypasserType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||||
import { removeTrailingSlash } from "@app/lib/fn";
|
import { removeTrailingSlash } from "@app/lib/fn";
|
||||||
|
import { ms } from "@app/lib/ms";
|
||||||
import { EnforcementLevel } from "@app/lib/types";
|
import { EnforcementLevel } from "@app/lib/types";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
|
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
|
const maxTimePeriodSchema = z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.nullish()
|
||||||
|
.transform((val, ctx) => {
|
||||||
|
if (val === undefined) return undefined;
|
||||||
|
if (!val || val === "permanent") return null;
|
||||||
|
const parsedMs = ms(val);
|
||||||
|
|
||||||
|
if (typeof parsedMs !== "number" || parsedMs <= 0) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: "Invalid time period format or value. Must be a positive duration (e.g., '1h', '30m', '2d')."
|
||||||
|
});
|
||||||
|
return z.NEVER;
|
||||||
|
}
|
||||||
|
return val;
|
||||||
|
});
|
||||||
|
|
||||||
export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvider) => {
|
export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvider) => {
|
||||||
server.route({
|
server.route({
|
||||||
url: "/",
|
url: "/",
|
||||||
@@ -71,7 +91,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
|||||||
.optional(),
|
.optional(),
|
||||||
approvals: z.number().min(1).default(1),
|
approvals: z.number().min(1).default(1),
|
||||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||||
allowedSelfApprovals: z.boolean().default(true)
|
allowedSelfApprovals: z.boolean().default(true),
|
||||||
|
maxTimePeriod: maxTimePeriodSchema
|
||||||
})
|
})
|
||||||
.refine(
|
.refine(
|
||||||
(val) => Boolean(val.environment) || Boolean(val.environments),
|
(val) => Boolean(val.environment) || Boolean(val.environments),
|
||||||
@@ -124,7 +145,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
|||||||
.array()
|
.array()
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
bypassers: z.object({ type: z.nativeEnum(BypasserType), id: z.string().nullable().optional() }).array()
|
bypassers: z.object({ type: z.nativeEnum(BypasserType), id: z.string().nullable().optional() }).array(),
|
||||||
|
maxTimePeriod: z.string().nullable().optional()
|
||||||
})
|
})
|
||||||
.array()
|
.array()
|
||||||
.nullable()
|
.nullable()
|
||||||
@@ -233,7 +255,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
|||||||
stepNumber: z.number().int()
|
stepNumber: z.number().int()
|
||||||
})
|
})
|
||||||
.array()
|
.array()
|
||||||
.optional()
|
.optional(),
|
||||||
|
maxTimePeriod: maxTimePeriodSchema
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
@@ -314,7 +337,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
|||||||
})
|
})
|
||||||
.array()
|
.array()
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional()
|
.optional(),
|
||||||
|
maxTimePeriod: z.string().nullable().optional()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@@ -2,6 +2,7 @@ import { z } from "zod";
|
|||||||
|
|
||||||
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
|
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
|
||||||
import { ApprovalStatus } from "@app/ee/services/access-approval-request/access-approval-request-types";
|
import { ApprovalStatus } from "@app/ee/services/access-approval-request/access-approval-request-types";
|
||||||
|
import { ms } from "@app/lib/ms";
|
||||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
@@ -26,7 +27,23 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
|||||||
body: z.object({
|
body: z.object({
|
||||||
permissions: z.any().array(),
|
permissions: z.any().array(),
|
||||||
isTemporary: z.boolean(),
|
isTemporary: z.boolean(),
|
||||||
temporaryRange: z.string().optional(),
|
temporaryRange: z
|
||||||
|
.string()
|
||||||
|
.optional()
|
||||||
|
.transform((val, ctx) => {
|
||||||
|
if (!val || val === "permanent") return undefined;
|
||||||
|
|
||||||
|
const parsedMs = ms(val);
|
||||||
|
|
||||||
|
if (typeof parsedMs !== "number" || parsedMs <= 0) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: "Invalid time period format or value. Must be a positive duration (e.g., '1h', '30m', '2d')."
|
||||||
|
});
|
||||||
|
return z.NEVER;
|
||||||
|
}
|
||||||
|
return val;
|
||||||
|
}),
|
||||||
note: z.string().max(255).optional()
|
note: z.string().max(255).optional()
|
||||||
}),
|
}),
|
||||||
querystring: z.object({
|
querystring: z.object({
|
||||||
@@ -128,7 +145,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
|||||||
envId: z.string(),
|
envId: z.string(),
|
||||||
enforcementLevel: z.string(),
|
enforcementLevel: z.string(),
|
||||||
deletedAt: z.date().nullish(),
|
deletedAt: z.date().nullish(),
|
||||||
allowedSelfApprovals: z.boolean()
|
allowedSelfApprovals: z.boolean(),
|
||||||
|
maxTimePeriod: z.string().nullable().optional()
|
||||||
}),
|
}),
|
||||||
reviewers: z
|
reviewers: z
|
||||||
.object({
|
.object({
|
||||||
@@ -189,4 +207,47 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
|||||||
return { review };
|
return { review };
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
url: "/:requestId",
|
||||||
|
method: "PATCH",
|
||||||
|
schema: {
|
||||||
|
params: z.object({
|
||||||
|
requestId: z.string().trim()
|
||||||
|
}),
|
||||||
|
body: z.object({
|
||||||
|
temporaryRange: z.string().transform((val, ctx) => {
|
||||||
|
const parsedMs = ms(val);
|
||||||
|
|
||||||
|
if (typeof parsedMs !== "number" || parsedMs <= 0) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: "Invalid time period format or value. Must be a positive duration (e.g., '1h', '30m', '2d')."
|
||||||
|
});
|
||||||
|
return z.NEVER;
|
||||||
|
}
|
||||||
|
return val;
|
||||||
|
}),
|
||||||
|
editNote: z.string().max(255)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
approval: AccessApprovalRequestsSchema
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { request } = await server.services.accessApprovalRequest.updateAccessApprovalRequest({
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId,
|
||||||
|
temporaryRange: req.body.temporaryRange,
|
||||||
|
editNote: req.body.editNote,
|
||||||
|
requestId: req.params.requestId
|
||||||
|
});
|
||||||
|
return { approval: request };
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
@@ -379,14 +379,17 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
|||||||
|
|
||||||
server.route({
|
server.route({
|
||||||
method: "POST",
|
method: "POST",
|
||||||
url: "/config/:configId/test-connection",
|
url: "/config/test-connection",
|
||||||
config: {
|
config: {
|
||||||
rateLimit: readLimit
|
rateLimit: readLimit
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT]),
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
schema: {
|
schema: {
|
||||||
params: z.object({
|
body: z.object({
|
||||||
configId: z.string().trim()
|
url: z.string().trim(),
|
||||||
|
bindDN: z.string().trim(),
|
||||||
|
bindPass: z.string().trim(),
|
||||||
|
caCert: z.string().trim()
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.boolean()
|
200: z.boolean()
|
||||||
@@ -399,8 +402,9 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
|||||||
orgId: req.permission.orgId,
|
orgId: req.permission.orgId,
|
||||||
actorAuthMethod: req.permission.authMethod,
|
actorAuthMethod: req.permission.authMethod,
|
||||||
actorOrgId: req.permission.orgId,
|
actorOrgId: req.permission.orgId,
|
||||||
ldapConfigId: req.params.configId
|
...req.body
|
||||||
});
|
});
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@@ -56,6 +56,7 @@ export interface TAccessApprovalPolicyDALFactory
|
|||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
secretPath: string;
|
secretPath: string;
|
||||||
deletedAt?: Date | null | undefined;
|
deletedAt?: Date | null | undefined;
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
projectId: string;
|
projectId: string;
|
||||||
bypassers: (
|
bypassers: (
|
||||||
| {
|
| {
|
||||||
@@ -96,6 +97,7 @@ export interface TAccessApprovalPolicyDALFactory
|
|||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
secretPath: string;
|
secretPath: string;
|
||||||
deletedAt?: Date | null | undefined;
|
deletedAt?: Date | null | undefined;
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
environments: {
|
environments: {
|
||||||
id: string;
|
id: string;
|
||||||
name: string;
|
name: string;
|
||||||
@@ -141,6 +143,7 @@ export interface TAccessApprovalPolicyDALFactory
|
|||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
secretPath: string;
|
secretPath: string;
|
||||||
deletedAt?: Date | null | undefined;
|
deletedAt?: Date | null | undefined;
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
}
|
}
|
||||||
| undefined
|
| undefined
|
||||||
>;
|
>;
|
||||||
|
@@ -100,7 +100,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
|||||||
environments,
|
environments,
|
||||||
enforcementLevel,
|
enforcementLevel,
|
||||||
allowedSelfApprovals,
|
allowedSelfApprovals,
|
||||||
approvalsRequired
|
approvalsRequired,
|
||||||
|
maxTimePeriod
|
||||||
}) => {
|
}) => {
|
||||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||||
@@ -219,7 +220,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
|||||||
secretPath,
|
secretPath,
|
||||||
name,
|
name,
|
||||||
enforcementLevel,
|
enforcementLevel,
|
||||||
allowedSelfApprovals
|
allowedSelfApprovals,
|
||||||
|
maxTimePeriod
|
||||||
},
|
},
|
||||||
tx
|
tx
|
||||||
);
|
);
|
||||||
@@ -318,7 +320,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
|||||||
enforcementLevel,
|
enforcementLevel,
|
||||||
allowedSelfApprovals,
|
allowedSelfApprovals,
|
||||||
approvalsRequired,
|
approvalsRequired,
|
||||||
environments
|
environments,
|
||||||
|
maxTimePeriod
|
||||||
}: TUpdateAccessApprovalPolicy) => {
|
}: TUpdateAccessApprovalPolicy) => {
|
||||||
const groupApprovers = approvers.filter((approver) => approver.type === ApproverType.Group);
|
const groupApprovers = approvers.filter((approver) => approver.type === ApproverType.Group);
|
||||||
|
|
||||||
@@ -461,7 +464,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
|||||||
secretPath,
|
secretPath,
|
||||||
name,
|
name,
|
||||||
enforcementLevel,
|
enforcementLevel,
|
||||||
allowedSelfApprovals
|
allowedSelfApprovals,
|
||||||
|
maxTimePeriod
|
||||||
},
|
},
|
||||||
tx
|
tx
|
||||||
);
|
);
|
||||||
|
@@ -41,6 +41,7 @@ export type TCreateAccessApprovalPolicy = {
|
|||||||
enforcementLevel: EnforcementLevel;
|
enforcementLevel: EnforcementLevel;
|
||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
|
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
} & Omit<TProjectPermission, "projectId">;
|
} & Omit<TProjectPermission, "projectId">;
|
||||||
|
|
||||||
export type TUpdateAccessApprovalPolicy = {
|
export type TUpdateAccessApprovalPolicy = {
|
||||||
@@ -60,6 +61,7 @@ export type TUpdateAccessApprovalPolicy = {
|
|||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
|
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
|
||||||
environments?: string[];
|
environments?: string[];
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
} & Omit<TProjectPermission, "projectId">;
|
} & Omit<TProjectPermission, "projectId">;
|
||||||
|
|
||||||
export type TDeleteAccessApprovalPolicy = {
|
export type TDeleteAccessApprovalPolicy = {
|
||||||
@@ -104,7 +106,8 @@ export interface TAccessApprovalPolicyServiceFactory {
|
|||||||
environment,
|
environment,
|
||||||
enforcementLevel,
|
enforcementLevel,
|
||||||
allowedSelfApprovals,
|
allowedSelfApprovals,
|
||||||
approvalsRequired
|
approvalsRequired,
|
||||||
|
maxTimePeriod
|
||||||
}: TCreateAccessApprovalPolicy) => Promise<{
|
}: TCreateAccessApprovalPolicy) => Promise<{
|
||||||
environment: {
|
environment: {
|
||||||
name: string;
|
name: string;
|
||||||
@@ -135,6 +138,7 @@ export interface TAccessApprovalPolicyServiceFactory {
|
|||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
secretPath: string;
|
secretPath: string;
|
||||||
deletedAt?: Date | null | undefined;
|
deletedAt?: Date | null | undefined;
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
}>;
|
}>;
|
||||||
deleteAccessApprovalPolicy: ({
|
deleteAccessApprovalPolicy: ({
|
||||||
policyId,
|
policyId,
|
||||||
@@ -159,6 +163,7 @@ export interface TAccessApprovalPolicyServiceFactory {
|
|||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
secretPath: string;
|
secretPath: string;
|
||||||
deletedAt?: Date | null | undefined;
|
deletedAt?: Date | null | undefined;
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
environment: {
|
environment: {
|
||||||
id: string;
|
id: string;
|
||||||
name: string;
|
name: string;
|
||||||
@@ -185,7 +190,8 @@ export interface TAccessApprovalPolicyServiceFactory {
|
|||||||
enforcementLevel,
|
enforcementLevel,
|
||||||
allowedSelfApprovals,
|
allowedSelfApprovals,
|
||||||
approvalsRequired,
|
approvalsRequired,
|
||||||
environments
|
environments,
|
||||||
|
maxTimePeriod
|
||||||
}: TUpdateAccessApprovalPolicy) => Promise<{
|
}: TUpdateAccessApprovalPolicy) => Promise<{
|
||||||
environment: {
|
environment: {
|
||||||
id: string;
|
id: string;
|
||||||
@@ -208,6 +214,7 @@ export interface TAccessApprovalPolicyServiceFactory {
|
|||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
secretPath?: string | null | undefined;
|
secretPath?: string | null | undefined;
|
||||||
deletedAt?: Date | null | undefined;
|
deletedAt?: Date | null | undefined;
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
}>;
|
}>;
|
||||||
getAccessApprovalPolicyByProjectSlug: ({
|
getAccessApprovalPolicyByProjectSlug: ({
|
||||||
actorId,
|
actorId,
|
||||||
@@ -242,6 +249,7 @@ export interface TAccessApprovalPolicyServiceFactory {
|
|||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
secretPath: string;
|
secretPath: string;
|
||||||
deletedAt?: Date | null | undefined;
|
deletedAt?: Date | null | undefined;
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
environment: {
|
environment: {
|
||||||
id: string;
|
id: string;
|
||||||
name: string;
|
name: string;
|
||||||
@@ -298,6 +306,7 @@ export interface TAccessApprovalPolicyServiceFactory {
|
|||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
secretPath: string;
|
secretPath: string;
|
||||||
deletedAt?: Date | null | undefined;
|
deletedAt?: Date | null | undefined;
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
environment: {
|
environment: {
|
||||||
id: string;
|
id: string;
|
||||||
name: string;
|
name: string;
|
||||||
|
@@ -63,6 +63,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
|||||||
enforcementLevel: string;
|
enforcementLevel: string;
|
||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
deletedAt: Date | null | undefined;
|
deletedAt: Date | null | undefined;
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
};
|
};
|
||||||
projectId: string;
|
projectId: string;
|
||||||
environments: string[];
|
environments: string[];
|
||||||
@@ -161,6 +162,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
|||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
envId: string;
|
envId: string;
|
||||||
deletedAt: Date | null | undefined;
|
deletedAt: Date | null | undefined;
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
};
|
};
|
||||||
projectId: string;
|
projectId: string;
|
||||||
environment: string;
|
environment: string;
|
||||||
@@ -297,7 +299,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
|||||||
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||||
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
|
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
|
||||||
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt"),
|
||||||
|
db.ref("maxTimePeriod").withSchema(TableName.AccessApprovalPolicy).as("policyMaxTimePeriod")
|
||||||
)
|
)
|
||||||
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||||
.select(db.ref("sequence").withSchema(TableName.AccessApprovalPolicyApprover).as("approverSequence"))
|
.select(db.ref("sequence").withSchema(TableName.AccessApprovalPolicyApprover).as("approverSequence"))
|
||||||
@@ -364,7 +367,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
|||||||
enforcementLevel: doc.policyEnforcementLevel,
|
enforcementLevel: doc.policyEnforcementLevel,
|
||||||
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
|
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
|
||||||
envId: doc.policyEnvId,
|
envId: doc.policyEnvId,
|
||||||
deletedAt: doc.policyDeletedAt
|
deletedAt: doc.policyDeletedAt,
|
||||||
|
maxTimePeriod: doc.policyMaxTimePeriod
|
||||||
},
|
},
|
||||||
requestedByUser: {
|
requestedByUser: {
|
||||||
userId: doc.requestedByUserId,
|
userId: doc.requestedByUserId,
|
||||||
@@ -574,7 +578,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
|||||||
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||||
tx.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
tx.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||||
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt"),
|
||||||
|
tx.ref("maxTimePeriod").withSchema(TableName.AccessApprovalPolicy).as("policyMaxTimePeriod")
|
||||||
);
|
);
|
||||||
|
|
||||||
const findById: TAccessApprovalRequestDALFactory["findById"] = async (id, tx) => {
|
const findById: TAccessApprovalRequestDALFactory["findById"] = async (id, tx) => {
|
||||||
@@ -595,7 +600,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
|||||||
secretPath: el.policySecretPath,
|
secretPath: el.policySecretPath,
|
||||||
enforcementLevel: el.policyEnforcementLevel,
|
enforcementLevel: el.policyEnforcementLevel,
|
||||||
allowedSelfApprovals: el.policyAllowedSelfApprovals,
|
allowedSelfApprovals: el.policyAllowedSelfApprovals,
|
||||||
deletedAt: el.policyDeletedAt
|
deletedAt: el.policyDeletedAt,
|
||||||
|
maxTimePeriod: el.policyMaxTimePeriod
|
||||||
},
|
},
|
||||||
requestedByUser: {
|
requestedByUser: {
|
||||||
userId: el.requestedByUserId,
|
userId: el.requestedByUserId,
|
||||||
|
@@ -54,7 +54,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
|||||||
accessApprovalPolicyDAL: Pick<TAccessApprovalPolicyDALFactory, "findOne" | "find" | "findLastValidPolicy">;
|
accessApprovalPolicyDAL: Pick<TAccessApprovalPolicyDALFactory, "findOne" | "find" | "findLastValidPolicy">;
|
||||||
accessApprovalRequestReviewerDAL: Pick<
|
accessApprovalRequestReviewerDAL: Pick<
|
||||||
TAccessApprovalRequestReviewerDALFactory,
|
TAccessApprovalRequestReviewerDALFactory,
|
||||||
"create" | "find" | "findOne" | "transaction"
|
"create" | "find" | "findOne" | "transaction" | "delete"
|
||||||
>;
|
>;
|
||||||
groupDAL: Pick<TGroupDALFactory, "findAllGroupPossibleMembers">;
|
groupDAL: Pick<TGroupDALFactory, "findAllGroupPossibleMembers">;
|
||||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findById">;
|
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findById">;
|
||||||
@@ -156,6 +156,15 @@ export const accessApprovalRequestServiceFactory = ({
|
|||||||
throw new BadRequestError({ message: "The policy linked to this request has been deleted" });
|
throw new BadRequestError({ message: "The policy linked to this request has been deleted" });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if the requested time falls under policy.maxTimePeriod
|
||||||
|
if (policy.maxTimePeriod) {
|
||||||
|
if (!temporaryRange || ms(temporaryRange) > ms(policy.maxTimePeriod)) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Requested access time range is limited to ${policy.maxTimePeriod} by policy`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const approverIds: string[] = [];
|
const approverIds: string[] = [];
|
||||||
const approverGroupIds: string[] = [];
|
const approverGroupIds: string[] = [];
|
||||||
|
|
||||||
@@ -292,6 +301,155 @@ export const accessApprovalRequestServiceFactory = ({
|
|||||||
return { request: approval };
|
return { request: approval };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const updateAccessApprovalRequest: TAccessApprovalRequestServiceFactory["updateAccessApprovalRequest"] = async ({
|
||||||
|
temporaryRange,
|
||||||
|
actorId,
|
||||||
|
actor,
|
||||||
|
actorOrgId,
|
||||||
|
actorAuthMethod,
|
||||||
|
editNote,
|
||||||
|
requestId
|
||||||
|
}) => {
|
||||||
|
const cfg = getConfig();
|
||||||
|
|
||||||
|
const accessApprovalRequest = await accessApprovalRequestDAL.findById(requestId);
|
||||||
|
if (!accessApprovalRequest) {
|
||||||
|
throw new NotFoundError({ message: `Access request with ID '${requestId}' not found` });
|
||||||
|
}
|
||||||
|
|
||||||
|
const { policy, requestedByUser } = accessApprovalRequest;
|
||||||
|
if (policy.deletedAt) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "The policy associated with this access request has been deleted."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const { membership, hasRole } = await permissionService.getProjectPermission({
|
||||||
|
actor,
|
||||||
|
actorId,
|
||||||
|
projectId: accessApprovalRequest.projectId,
|
||||||
|
actorAuthMethod,
|
||||||
|
actorOrgId,
|
||||||
|
actionProjectType: ActionProjectType.SecretManager
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!membership) {
|
||||||
|
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const isApprover = policy.approvers.find((approver) => approver.userId === actorId);
|
||||||
|
|
||||||
|
if (!hasRole(ProjectMembershipRole.Admin) && !isApprover) {
|
||||||
|
throw new ForbiddenRequestError({ message: "You are not authorized to modify this request" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const project = await projectDAL.findById(accessApprovalRequest.projectId);
|
||||||
|
|
||||||
|
if (!project) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: `The project associated with this access request was not found. [projectId=${accessApprovalRequest.projectId}]`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (accessApprovalRequest.status !== ApprovalStatus.PENDING) {
|
||||||
|
throw new BadRequestError({ message: "The request has been closed" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const editedByUser = await userDAL.findById(actorId);
|
||||||
|
|
||||||
|
if (!editedByUser) throw new NotFoundError({ message: "Editing user not found" });
|
||||||
|
|
||||||
|
if (accessApprovalRequest.isTemporary && accessApprovalRequest.temporaryRange) {
|
||||||
|
if (ms(temporaryRange) > ms(accessApprovalRequest.temporaryRange)) {
|
||||||
|
throw new BadRequestError({ message: "Updated access duration must be less than current access duration" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { envSlug, secretPath, accessTypes } = verifyRequestedPermissions({
|
||||||
|
permissions: accessApprovalRequest.permissions
|
||||||
|
});
|
||||||
|
|
||||||
|
const approval = await accessApprovalRequestDAL.transaction(async (tx) => {
|
||||||
|
const approvalRequest = await accessApprovalRequestDAL.updateById(
|
||||||
|
requestId,
|
||||||
|
{
|
||||||
|
temporaryRange,
|
||||||
|
isTemporary: true,
|
||||||
|
editNote,
|
||||||
|
editedByUserId: actorId
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
|
|
||||||
|
// reset review progress
|
||||||
|
await accessApprovalRequestReviewerDAL.delete(
|
||||||
|
{
|
||||||
|
requestId
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
|
|
||||||
|
const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`;
|
||||||
|
const editorFullName = `${editedByUser.firstName} ${editedByUser.lastName}`;
|
||||||
|
const approvalUrl = `${cfg.SITE_URL}/projects/secret-management/${project.id}/approval`;
|
||||||
|
|
||||||
|
await triggerWorkflowIntegrationNotification({
|
||||||
|
input: {
|
||||||
|
notification: {
|
||||||
|
type: TriggerFeature.ACCESS_REQUEST_UPDATED,
|
||||||
|
payload: {
|
||||||
|
projectName: project.name,
|
||||||
|
requesterFullName,
|
||||||
|
isTemporary: true,
|
||||||
|
requesterEmail: requestedByUser.email as string,
|
||||||
|
secretPath,
|
||||||
|
environment: envSlug,
|
||||||
|
permissions: accessTypes,
|
||||||
|
approvalUrl,
|
||||||
|
editNote,
|
||||||
|
editorEmail: editedByUser.email as string,
|
||||||
|
editorFullName
|
||||||
|
}
|
||||||
|
},
|
||||||
|
projectId: project.id
|
||||||
|
},
|
||||||
|
dependencies: {
|
||||||
|
projectDAL,
|
||||||
|
projectSlackConfigDAL,
|
||||||
|
kmsService,
|
||||||
|
microsoftTeamsService,
|
||||||
|
projectMicrosoftTeamsConfigDAL
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
await smtpService.sendMail({
|
||||||
|
recipients: policy.approvers
|
||||||
|
.filter((approver) => Boolean(approver.email) && approver.userId !== editedByUser.id)
|
||||||
|
.map((approver) => approver.email!),
|
||||||
|
subjectLine: "Access Approval Request Updated",
|
||||||
|
substitutions: {
|
||||||
|
projectName: project.name,
|
||||||
|
requesterFullName,
|
||||||
|
requesterEmail: requestedByUser.email,
|
||||||
|
isTemporary: true,
|
||||||
|
expiresIn: msFn(ms(temporaryRange || ""), { long: true }),
|
||||||
|
secretPath,
|
||||||
|
environment: envSlug,
|
||||||
|
permissions: accessTypes,
|
||||||
|
approvalUrl,
|
||||||
|
editNote,
|
||||||
|
editorFullName,
|
||||||
|
editorEmail: editedByUser.email
|
||||||
|
},
|
||||||
|
template: SmtpTemplates.AccessApprovalRequestUpdated
|
||||||
|
});
|
||||||
|
|
||||||
|
return approvalRequest;
|
||||||
|
});
|
||||||
|
|
||||||
|
return { request: approval };
|
||||||
|
};
|
||||||
|
|
||||||
const listApprovalRequests: TAccessApprovalRequestServiceFactory["listApprovalRequests"] = async ({
|
const listApprovalRequests: TAccessApprovalRequestServiceFactory["listApprovalRequests"] = async ({
|
||||||
projectSlug,
|
projectSlug,
|
||||||
authorUserId,
|
authorUserId,
|
||||||
@@ -641,6 +799,7 @@ export const accessApprovalRequestServiceFactory = ({
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
createAccessApprovalRequest,
|
createAccessApprovalRequest,
|
||||||
|
updateAccessApprovalRequest,
|
||||||
listApprovalRequests,
|
listApprovalRequests,
|
||||||
reviewAccessRequest,
|
reviewAccessRequest,
|
||||||
getCount
|
getCount
|
||||||
|
@@ -30,6 +30,12 @@ export type TCreateAccessApprovalRequestDTO = {
|
|||||||
note?: string;
|
note?: string;
|
||||||
} & Omit<TProjectPermission, "projectId">;
|
} & Omit<TProjectPermission, "projectId">;
|
||||||
|
|
||||||
|
export type TUpdateAccessApprovalRequestDTO = {
|
||||||
|
requestId: string;
|
||||||
|
temporaryRange: string;
|
||||||
|
editNote: string;
|
||||||
|
} & Omit<TProjectPermission, "projectId">;
|
||||||
|
|
||||||
export type TListApprovalRequestsDTO = {
|
export type TListApprovalRequestsDTO = {
|
||||||
projectSlug: string;
|
projectSlug: string;
|
||||||
authorUserId?: string;
|
authorUserId?: string;
|
||||||
@@ -54,6 +60,23 @@ export interface TAccessApprovalRequestServiceFactory {
|
|||||||
privilegeDeletedAt?: Date | null | undefined;
|
privilegeDeletedAt?: Date | null | undefined;
|
||||||
};
|
};
|
||||||
}>;
|
}>;
|
||||||
|
updateAccessApprovalRequest: (arg: TUpdateAccessApprovalRequestDTO) => Promise<{
|
||||||
|
request: {
|
||||||
|
status: string;
|
||||||
|
id: string;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
policyId: string;
|
||||||
|
isTemporary: boolean;
|
||||||
|
requestedByUserId: string;
|
||||||
|
privilegeId?: string | null | undefined;
|
||||||
|
requestedBy?: string | null | undefined;
|
||||||
|
temporaryRange?: string | null | undefined;
|
||||||
|
permissions?: unknown;
|
||||||
|
note?: string | null | undefined;
|
||||||
|
privilegeDeletedAt?: Date | null | undefined;
|
||||||
|
};
|
||||||
|
}>;
|
||||||
listApprovalRequests: (arg: TListApprovalRequestsDTO) => Promise<{
|
listApprovalRequests: (arg: TListApprovalRequestsDTO) => Promise<{
|
||||||
requests: {
|
requests: {
|
||||||
policy: {
|
policy: {
|
||||||
@@ -82,6 +105,7 @@ export interface TAccessApprovalRequestServiceFactory {
|
|||||||
allowedSelfApprovals: boolean;
|
allowedSelfApprovals: boolean;
|
||||||
envId: string;
|
envId: string;
|
||||||
deletedAt: Date | null | undefined;
|
deletedAt: Date | null | undefined;
|
||||||
|
maxTimePeriod?: string | null;
|
||||||
};
|
};
|
||||||
projectId: string;
|
projectId: string;
|
||||||
environment: string;
|
environment: string;
|
||||||
|
@@ -1,8 +1,6 @@
|
|||||||
import { AxiosError, RawAxiosRequestHeaders } from "axios";
|
import { AxiosError, RawAxiosRequestHeaders } from "axios";
|
||||||
|
|
||||||
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||||
import { TEventBusService } from "@app/ee/services/event/event-bus-service";
|
|
||||||
import { TopicName, toPublishableEvent } from "@app/ee/services/event/types";
|
|
||||||
import { request } from "@app/lib/config/request";
|
import { request } from "@app/lib/config/request";
|
||||||
import { crypto } from "@app/lib/crypto/cryptography";
|
import { crypto } from "@app/lib/crypto/cryptography";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
@@ -22,7 +20,6 @@ type TAuditLogQueueServiceFactoryDep = {
|
|||||||
queueService: TQueueServiceFactory;
|
queueService: TQueueServiceFactory;
|
||||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
eventBusService: TEventBusService;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TAuditLogQueueServiceFactory = {
|
export type TAuditLogQueueServiceFactory = {
|
||||||
@@ -38,8 +35,7 @@ export const auditLogQueueServiceFactory = async ({
|
|||||||
queueService,
|
queueService,
|
||||||
projectDAL,
|
projectDAL,
|
||||||
licenseService,
|
licenseService,
|
||||||
auditLogStreamDAL,
|
auditLogStreamDAL
|
||||||
eventBusService
|
|
||||||
}: TAuditLogQueueServiceFactoryDep): Promise<TAuditLogQueueServiceFactory> => {
|
}: TAuditLogQueueServiceFactoryDep): Promise<TAuditLogQueueServiceFactory> => {
|
||||||
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
||||||
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||||
@@ -145,16 +141,6 @@ export const auditLogQueueServiceFactory = async ({
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const publishable = toPublishableEvent(event);
|
|
||||||
|
|
||||||
if (publishable) {
|
|
||||||
await eventBusService.publish(TopicName.CoreServers, {
|
|
||||||
type: ProjectType.SecretManager,
|
|
||||||
source: "infiscal",
|
|
||||||
data: publishable.data
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@@ -9,7 +9,7 @@ import { getDbConnectionHost } from "@app/lib/knex";
|
|||||||
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
|
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
|
|
||||||
if (appCfg.isDevelopmentMode) return [host];
|
if (appCfg.isDevelopmentMode || appCfg.isTestMode) return [host];
|
||||||
|
|
||||||
if (isGateway) return [host];
|
if (isGateway) return [host];
|
||||||
|
|
||||||
|
@@ -15,6 +15,7 @@ import { z } from "zod";
|
|||||||
import { CustomAWSHasher } from "@app/lib/aws/hashing";
|
import { CustomAWSHasher } from "@app/lib/aws/hashing";
|
||||||
import { crypto } from "@app/lib/crypto";
|
import { crypto } from "@app/lib/crypto";
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||||
|
|
||||||
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
|
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
|
||||||
@@ -170,14 +171,29 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
};
|
};
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
await ElastiCacheUserManager(
|
try {
|
||||||
{
|
await ElastiCacheUserManager(
|
||||||
accessKeyId: providerInputs.accessKeyId,
|
{
|
||||||
secretAccessKey: providerInputs.secretAccessKey
|
accessKeyId: providerInputs.accessKeyId,
|
||||||
},
|
secretAccessKey: providerInputs.secretAccessKey
|
||||||
providerInputs.region
|
},
|
||||||
).verifyCredentials(providerInputs.clusterName);
|
providerInputs.region
|
||||||
return true;
|
).verifyCredentials(providerInputs.clusterName);
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [
|
||||||
|
providerInputs.accessKeyId,
|
||||||
|
providerInputs.secretAccessKey,
|
||||||
|
providerInputs.clusterName,
|
||||||
|
providerInputs.region
|
||||||
|
]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: {
|
const create = async (data: {
|
||||||
@@ -206,21 +222,37 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const parsedStatement = CreateElastiCacheUserSchema.parse(JSON.parse(creationStatement));
|
const parsedStatement = CreateElastiCacheUserSchema.parse(JSON.parse(creationStatement));
|
||||||
|
|
||||||
await ElastiCacheUserManager(
|
try {
|
||||||
{
|
await ElastiCacheUserManager(
|
||||||
accessKeyId: providerInputs.accessKeyId,
|
{
|
||||||
secretAccessKey: providerInputs.secretAccessKey
|
accessKeyId: providerInputs.accessKeyId,
|
||||||
},
|
secretAccessKey: providerInputs.secretAccessKey
|
||||||
providerInputs.region
|
},
|
||||||
).createUser(parsedStatement, providerInputs.clusterName);
|
providerInputs.region
|
||||||
|
).createUser(parsedStatement, providerInputs.clusterName);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
entityId: leaseUsername,
|
entityId: leaseUsername,
|
||||||
data: {
|
data: {
|
||||||
DB_USERNAME: leaseUsername,
|
DB_USERNAME: leaseUsername,
|
||||||
DB_PASSWORD: leasePassword
|
DB_PASSWORD: leasePassword
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [
|
||||||
|
leaseUsername,
|
||||||
|
leasePassword,
|
||||||
|
providerInputs.accessKeyId,
|
||||||
|
providerInputs.secretAccessKey,
|
||||||
|
providerInputs.clusterName
|
||||||
|
]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
@@ -229,15 +261,25 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username: entityId });
|
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username: entityId });
|
||||||
const parsedStatement = DeleteElasticCacheUserSchema.parse(JSON.parse(revokeStatement));
|
const parsedStatement = DeleteElasticCacheUserSchema.parse(JSON.parse(revokeStatement));
|
||||||
|
|
||||||
await ElastiCacheUserManager(
|
try {
|
||||||
{
|
await ElastiCacheUserManager(
|
||||||
accessKeyId: providerInputs.accessKeyId,
|
{
|
||||||
secretAccessKey: providerInputs.secretAccessKey
|
accessKeyId: providerInputs.accessKeyId,
|
||||||
},
|
secretAccessKey: providerInputs.secretAccessKey
|
||||||
providerInputs.region
|
},
|
||||||
).deleteUser(parsedStatement);
|
providerInputs.region
|
||||||
|
).deleteUser(parsedStatement);
|
||||||
|
|
||||||
return { entityId };
|
return { entityId };
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [entityId, providerInputs.accessKeyId, providerInputs.secretAccessKey, providerInputs.clusterName]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (_inputs: unknown, entityId: string) => {
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
|
@@ -23,6 +23,7 @@ import { CustomAWSHasher } from "@app/lib/aws/hashing";
|
|||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { crypto } from "@app/lib/crypto/cryptography";
|
import { crypto } from "@app/lib/crypto/cryptography";
|
||||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
|
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
|
||||||
@@ -118,22 +119,39 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown, { projectId }: { projectId: string }) => {
|
const validateConnection = async (inputs: unknown, { projectId }: { projectId: string }) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await $getClient(providerInputs, projectId);
|
try {
|
||||||
const isConnected = await client
|
const client = await $getClient(providerInputs, projectId);
|
||||||
.send(new GetUserCommand({}))
|
const isConnected = await client
|
||||||
.then(() => true)
|
.send(new GetUserCommand({}))
|
||||||
.catch((err) => {
|
.then(() => true)
|
||||||
const message = (err as Error)?.message;
|
.catch((err) => {
|
||||||
if (
|
const message = (err as Error)?.message;
|
||||||
(providerInputs.method === AwsIamAuthType.AssumeRole || providerInputs.method === AwsIamAuthType.IRSA) &&
|
if (
|
||||||
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
|
(providerInputs.method === AwsIamAuthType.AssumeRole || providerInputs.method === AwsIamAuthType.IRSA) &&
|
||||||
message.includes("Must specify userName when calling with non-User credentials")
|
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
|
||||||
) {
|
message.includes("Must specify userName when calling with non-User credentials")
|
||||||
return true;
|
) {
|
||||||
}
|
return true;
|
||||||
throw err;
|
}
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
return isConnected;
|
||||||
|
} catch (err) {
|
||||||
|
const sensitiveTokens = [];
|
||||||
|
if (providerInputs.method === AwsIamAuthType.AccessKey) {
|
||||||
|
sensitiveTokens.push(providerInputs.accessKey, providerInputs.secretAccessKey);
|
||||||
|
}
|
||||||
|
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
|
||||||
|
sensitiveTokens.push(providerInputs.roleArn);
|
||||||
|
}
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: sensitiveTokens
|
||||||
});
|
});
|
||||||
return isConnected;
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: {
|
const create = async (data: {
|
||||||
@@ -162,62 +180,81 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
|||||||
awsTags.push(...additionalTags);
|
awsTags.push(...additionalTags);
|
||||||
}
|
}
|
||||||
|
|
||||||
const createUserRes = await client.send(
|
try {
|
||||||
new CreateUserCommand({
|
const createUserRes = await client.send(
|
||||||
Path: awsPath,
|
new CreateUserCommand({
|
||||||
PermissionsBoundary: permissionBoundaryPolicyArn || undefined,
|
Path: awsPath,
|
||||||
Tags: awsTags,
|
PermissionsBoundary: permissionBoundaryPolicyArn || undefined,
|
||||||
UserName: username
|
Tags: awsTags,
|
||||||
})
|
UserName: username
|
||||||
);
|
|
||||||
|
|
||||||
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
|
|
||||||
if (userGroups) {
|
|
||||||
await Promise.all(
|
|
||||||
userGroups
|
|
||||||
.split(",")
|
|
||||||
.filter(Boolean)
|
|
||||||
.map((group) =>
|
|
||||||
client.send(new AddUserToGroupCommand({ UserName: createUserRes?.User?.UserName, GroupName: group }))
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (policyArns) {
|
|
||||||
await Promise.all(
|
|
||||||
policyArns
|
|
||||||
.split(",")
|
|
||||||
.filter(Boolean)
|
|
||||||
.map((policyArn) =>
|
|
||||||
client.send(new AttachUserPolicyCommand({ UserName: createUserRes?.User?.UserName, PolicyArn: policyArn }))
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (policyDocument) {
|
|
||||||
await client.send(
|
|
||||||
new PutUserPolicyCommand({
|
|
||||||
UserName: createUserRes.User.UserName,
|
|
||||||
PolicyName: `infisical-dynamic-policy-${alphaNumericNanoId(4)}`,
|
|
||||||
PolicyDocument: policyDocument
|
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
}
|
|
||||||
|
|
||||||
const createAccessKeyRes = await client.send(
|
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
|
||||||
new CreateAccessKeyCommand({
|
if (userGroups) {
|
||||||
UserName: createUserRes.User.UserName
|
await Promise.all(
|
||||||
})
|
userGroups
|
||||||
);
|
.split(",")
|
||||||
if (!createAccessKeyRes.AccessKey)
|
.filter(Boolean)
|
||||||
throw new BadRequestError({ message: "Failed to create AWS IAM User access key" });
|
.map((group) =>
|
||||||
|
client.send(new AddUserToGroupCommand({ UserName: createUserRes?.User?.UserName, GroupName: group }))
|
||||||
return {
|
)
|
||||||
entityId: username,
|
);
|
||||||
data: {
|
|
||||||
ACCESS_KEY: createAccessKeyRes.AccessKey.AccessKeyId,
|
|
||||||
SECRET_ACCESS_KEY: createAccessKeyRes.AccessKey.SecretAccessKey,
|
|
||||||
USERNAME: username
|
|
||||||
}
|
}
|
||||||
};
|
if (policyArns) {
|
||||||
|
await Promise.all(
|
||||||
|
policyArns
|
||||||
|
.split(",")
|
||||||
|
.filter(Boolean)
|
||||||
|
.map((policyArn) =>
|
||||||
|
client.send(
|
||||||
|
new AttachUserPolicyCommand({ UserName: createUserRes?.User?.UserName, PolicyArn: policyArn })
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (policyDocument) {
|
||||||
|
await client.send(
|
||||||
|
new PutUserPolicyCommand({
|
||||||
|
UserName: createUserRes.User.UserName,
|
||||||
|
PolicyName: `infisical-dynamic-policy-${alphaNumericNanoId(4)}`,
|
||||||
|
PolicyDocument: policyDocument
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const createAccessKeyRes = await client.send(
|
||||||
|
new CreateAccessKeyCommand({
|
||||||
|
UserName: createUserRes.User.UserName
|
||||||
|
})
|
||||||
|
);
|
||||||
|
if (!createAccessKeyRes.AccessKey)
|
||||||
|
throw new BadRequestError({ message: "Failed to create AWS IAM User access key" });
|
||||||
|
|
||||||
|
return {
|
||||||
|
entityId: username,
|
||||||
|
data: {
|
||||||
|
ACCESS_KEY: createAccessKeyRes.AccessKey.AccessKeyId,
|
||||||
|
SECRET_ACCESS_KEY: createAccessKeyRes.AccessKey.SecretAccessKey,
|
||||||
|
USERNAME: username
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
const sensitiveTokens = [username];
|
||||||
|
if (providerInputs.method === AwsIamAuthType.AccessKey) {
|
||||||
|
sensitiveTokens.push(providerInputs.accessKey, providerInputs.secretAccessKey);
|
||||||
|
}
|
||||||
|
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
|
||||||
|
sensitiveTokens.push(providerInputs.roleArn);
|
||||||
|
}
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: sensitiveTokens
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string, metadata: { projectId: string }) => {
|
const revoke = async (inputs: unknown, entityId: string, metadata: { projectId: string }) => {
|
||||||
@@ -278,8 +315,25 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
await client.send(new DeleteUserCommand({ UserName: username }));
|
try {
|
||||||
return { entityId: username };
|
await client.send(new DeleteUserCommand({ UserName: username }));
|
||||||
|
return { entityId: username };
|
||||||
|
} catch (err) {
|
||||||
|
const sensitiveTokens = [username];
|
||||||
|
if (providerInputs.method === AwsIamAuthType.AccessKey) {
|
||||||
|
sensitiveTokens.push(providerInputs.accessKey, providerInputs.secretAccessKey);
|
||||||
|
}
|
||||||
|
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
|
||||||
|
sensitiveTokens.push(providerInputs.roleArn);
|
||||||
|
}
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: sensitiveTokens
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (_inputs: unknown, entityId: string) => {
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
|
@@ -2,6 +2,7 @@ import axios from "axios";
|
|||||||
import { customAlphabet } from "nanoid";
|
import { customAlphabet } from "nanoid";
|
||||||
|
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
|
|
||||||
import { AzureEntraIDSchema, TDynamicProviderFns } from "./models";
|
import { AzureEntraIDSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
@@ -51,45 +52,82 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
try {
|
||||||
return data.success;
|
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||||
|
return data.success;
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.clientSecret, providerInputs.applicationId, providerInputs.tenantId]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async ({ inputs }: { inputs: unknown }) => {
|
const create = async ({ inputs }: { inputs: unknown }) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
|
||||||
if (!data.success) {
|
|
||||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
|
||||||
}
|
|
||||||
|
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
|
try {
|
||||||
const response = await axios.patch(
|
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||||
`${MSFT_GRAPH_API_URL}/users/${providerInputs.userId}`,
|
if (!data.success) {
|
||||||
{
|
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||||
passwordProfile: {
|
|
||||||
forceChangePasswordNextSignIn: false,
|
|
||||||
password
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
Authorization: `Bearer ${data.token}`
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
);
|
|
||||||
if (response.status !== 204) {
|
|
||||||
throw new BadRequestError({ message: "Failed to update password" });
|
|
||||||
}
|
|
||||||
|
|
||||||
return { entityId: providerInputs.userId, data: { email: providerInputs.email, password } };
|
const response = await axios.patch(
|
||||||
|
`${MSFT_GRAPH_API_URL}/users/${providerInputs.userId}`,
|
||||||
|
{
|
||||||
|
passwordProfile: {
|
||||||
|
forceChangePasswordNextSignIn: false,
|
||||||
|
password
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Authorization: `Bearer ${data.token}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
if (response.status !== 204) {
|
||||||
|
throw new BadRequestError({ message: "Failed to update password" });
|
||||||
|
}
|
||||||
|
|
||||||
|
return { entityId: providerInputs.userId, data: { email: providerInputs.email, password } };
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [
|
||||||
|
providerInputs.clientSecret,
|
||||||
|
providerInputs.applicationId,
|
||||||
|
providerInputs.userId,
|
||||||
|
providerInputs.email,
|
||||||
|
password
|
||||||
|
]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
// Creates a new password
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
await create({ inputs });
|
try {
|
||||||
return { entityId };
|
// Creates a new password
|
||||||
|
await create({ inputs });
|
||||||
|
return { entityId };
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.clientSecret, providerInputs.applicationId, entityId]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
|
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
|
||||||
|
@@ -3,6 +3,8 @@ import handlebars from "handlebars";
|
|||||||
import { customAlphabet } from "nanoid";
|
import { customAlphabet } from "nanoid";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||||
|
|
||||||
@@ -71,9 +73,24 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
|||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await $getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
try {
|
||||||
await client.shutdown();
|
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
||||||
return isConnected;
|
await client.shutdown();
|
||||||
|
return isConnected;
|
||||||
|
} catch (err) {
|
||||||
|
const tokens = [providerInputs.password, providerInputs.username];
|
||||||
|
if (providerInputs.keyspace) {
|
||||||
|
tokens.push(providerInputs.keyspace);
|
||||||
|
}
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens
|
||||||
|
});
|
||||||
|
await client.shutdown();
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: {
|
const create = async (data: {
|
||||||
@@ -89,23 +106,39 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
|||||||
const username = generateUsername(usernameTemplate, identity);
|
const username = generateUsername(usernameTemplate, identity);
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
const { keyspace } = providerInputs;
|
const { keyspace } = providerInputs;
|
||||||
const expiration = new Date(expireAt).toISOString();
|
|
||||||
|
|
||||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
try {
|
||||||
username,
|
const expiration = new Date(expireAt).toISOString();
|
||||||
password,
|
|
||||||
expiration,
|
|
||||||
keyspace
|
|
||||||
});
|
|
||||||
|
|
||||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||||
for (const query of queries) {
|
username,
|
||||||
// eslint-disable-next-line
|
password,
|
||||||
await client.execute(query);
|
expiration,
|
||||||
|
keyspace
|
||||||
|
});
|
||||||
|
|
||||||
|
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||||
|
for (const query of queries) {
|
||||||
|
// eslint-disable-next-line
|
||||||
|
await client.execute(query);
|
||||||
|
}
|
||||||
|
await client.shutdown();
|
||||||
|
|
||||||
|
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||||
|
} catch (err) {
|
||||||
|
const tokens = [username, password];
|
||||||
|
if (keyspace) {
|
||||||
|
tokens.push(keyspace);
|
||||||
|
}
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens
|
||||||
|
});
|
||||||
|
await client.shutdown();
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
}
|
}
|
||||||
await client.shutdown();
|
|
||||||
|
|
||||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
@@ -115,14 +148,29 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
|||||||
const username = entityId;
|
const username = entityId;
|
||||||
const { keyspace } = providerInputs;
|
const { keyspace } = providerInputs;
|
||||||
|
|
||||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace });
|
try {
|
||||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace });
|
||||||
for (const query of queries) {
|
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||||
// eslint-disable-next-line
|
for (const query of queries) {
|
||||||
await client.execute(query);
|
// eslint-disable-next-line
|
||||||
|
await client.execute(query);
|
||||||
|
}
|
||||||
|
await client.shutdown();
|
||||||
|
return { entityId: username };
|
||||||
|
} catch (err) {
|
||||||
|
const tokens = [username];
|
||||||
|
if (keyspace) {
|
||||||
|
tokens.push(keyspace);
|
||||||
|
}
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens
|
||||||
|
});
|
||||||
|
await client.shutdown();
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
}
|
}
|
||||||
await client.shutdown();
|
|
||||||
return { entityId: username };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||||
@@ -130,21 +178,36 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
|||||||
if (!providerInputs.renewStatement) return { entityId };
|
if (!providerInputs.renewStatement) return { entityId };
|
||||||
|
|
||||||
const client = await $getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const expiration = new Date(expireAt).toISOString();
|
|
||||||
const { keyspace } = providerInputs;
|
const { keyspace } = providerInputs;
|
||||||
|
|
||||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
try {
|
||||||
username: entityId,
|
const expiration = new Date(expireAt).toISOString();
|
||||||
keyspace,
|
|
||||||
expiration
|
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||||
});
|
username: entityId,
|
||||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
keyspace,
|
||||||
for await (const query of queries) {
|
expiration
|
||||||
await client.execute(query);
|
});
|
||||||
|
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||||
|
for await (const query of queries) {
|
||||||
|
await client.execute(query);
|
||||||
|
}
|
||||||
|
await client.shutdown();
|
||||||
|
return { entityId };
|
||||||
|
} catch (err) {
|
||||||
|
const tokens = [entityId];
|
||||||
|
if (keyspace) {
|
||||||
|
tokens.push(keyspace);
|
||||||
|
}
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens
|
||||||
|
});
|
||||||
|
await client.shutdown();
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
}
|
}
|
||||||
await client.shutdown();
|
|
||||||
return { entityId };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
289
backend/src/ee/services/dynamic-secret/providers/couchbase.ts
Normal file
289
backend/src/ee/services/dynamic-secret/providers/couchbase.ts
Normal file
@@ -0,0 +1,289 @@
|
|||||||
|
import crypto from "node:crypto";
|
||||||
|
|
||||||
|
import axios from "axios";
|
||||||
|
import RE2 from "re2";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator/validate-url";
|
||||||
|
|
||||||
|
import { DynamicSecretCouchbaseSchema, PasswordRequirements, TDynamicProviderFns } from "./models";
|
||||||
|
import { compileUsernameTemplate } from "./templateUtils";
|
||||||
|
|
||||||
|
type TCreateCouchbaseUser = {
|
||||||
|
name: string;
|
||||||
|
password: string;
|
||||||
|
access: {
|
||||||
|
privileges: string[];
|
||||||
|
resources: {
|
||||||
|
buckets: {
|
||||||
|
name: string;
|
||||||
|
scopes?: {
|
||||||
|
name: string;
|
||||||
|
collections?: string[];
|
||||||
|
}[];
|
||||||
|
}[];
|
||||||
|
};
|
||||||
|
}[];
|
||||||
|
};
|
||||||
|
|
||||||
|
type CouchbaseUserResponse = {
|
||||||
|
id: string;
|
||||||
|
uuid?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const sanitizeCouchbaseUsername = (username: string): string => {
|
||||||
|
// Couchbase username restrictions:
|
||||||
|
// - Cannot contain: ) ( > < , ; : " \ / ] [ ? = } {
|
||||||
|
// - Cannot begin with @ character
|
||||||
|
|
||||||
|
const forbiddenCharsPattern = new RE2('[\\)\\(><,;:"\\\\\\[\\]\\?=\\}\\{]', "g");
|
||||||
|
let sanitized = forbiddenCharsPattern.replace(username, "-");
|
||||||
|
|
||||||
|
const leadingAtPattern = new RE2("^@+");
|
||||||
|
sanitized = leadingAtPattern.replace(sanitized, "");
|
||||||
|
|
||||||
|
if (!sanitized || sanitized.length === 0) {
|
||||||
|
return alphaNumericNanoId(12);
|
||||||
|
}
|
||||||
|
|
||||||
|
return sanitized;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalizes bucket configuration to handle wildcard (*) access consistently.
|
||||||
|
*
|
||||||
|
* Key behaviors:
|
||||||
|
* - If "*" appears anywhere (string or array), grants access to ALL buckets, scopes, and collections
|
||||||
|
*
|
||||||
|
* @param buckets - Either a string or array of bucket configurations
|
||||||
|
* @returns Normalized bucket resources for Couchbase API
|
||||||
|
*/
|
||||||
|
const normalizeBucketConfiguration = (
|
||||||
|
buckets:
|
||||||
|
| string
|
||||||
|
| Array<{
|
||||||
|
name: string;
|
||||||
|
scopes?: Array<{
|
||||||
|
name: string;
|
||||||
|
collections?: string[];
|
||||||
|
}>;
|
||||||
|
}>
|
||||||
|
) => {
|
||||||
|
if (typeof buckets === "string") {
|
||||||
|
// Simple string format - either "*" or comma-separated bucket names
|
||||||
|
const bucketNames = buckets
|
||||||
|
.split(",")
|
||||||
|
.map((bucket) => bucket.trim())
|
||||||
|
.filter((bucket) => bucket.length > 0);
|
||||||
|
|
||||||
|
// If "*" is present anywhere, grant access to all buckets, scopes, and collections
|
||||||
|
if (bucketNames.includes("*") || buckets === "*") {
|
||||||
|
return [{ name: "*" }];
|
||||||
|
}
|
||||||
|
return bucketNames.map((bucketName) => ({ name: bucketName }));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Array of bucket objects with scopes and collections
|
||||||
|
// Check if any bucket is "*" - if so, grant access to all buckets, scopes, and collections
|
||||||
|
const hasWildcardBucket = buckets.some((bucket) => bucket.name === "*");
|
||||||
|
|
||||||
|
if (hasWildcardBucket) {
|
||||||
|
return [{ name: "*" }];
|
||||||
|
}
|
||||||
|
|
||||||
|
return buckets.map((bucket) => ({
|
||||||
|
name: bucket.name,
|
||||||
|
scopes: bucket.scopes?.map((scope) => ({
|
||||||
|
name: scope.name,
|
||||||
|
collections: scope.collections || []
|
||||||
|
}))
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
|
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
|
||||||
|
const randomUsername = alphaNumericNanoId(12);
|
||||||
|
if (!usernameTemplate) return sanitizeCouchbaseUsername(randomUsername);
|
||||||
|
|
||||||
|
const compiledUsername = compileUsernameTemplate({
|
||||||
|
usernameTemplate,
|
||||||
|
randomUsername,
|
||||||
|
identity
|
||||||
|
});
|
||||||
|
|
||||||
|
return sanitizeCouchbaseUsername(compiledUsername);
|
||||||
|
};
|
||||||
|
|
||||||
|
const generatePassword = (requirements?: PasswordRequirements): string => {
|
||||||
|
const {
|
||||||
|
length = 12,
|
||||||
|
required = { lowercase: 1, uppercase: 1, digits: 1, symbols: 1 },
|
||||||
|
allowedSymbols = "!@#$%^()_+-=[]{}:,?/~`"
|
||||||
|
} = requirements || {};
|
||||||
|
|
||||||
|
const lowercase = "abcdefghijklmnopqrstuvwxyz";
|
||||||
|
const uppercase = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||||
|
const digits = "0123456789";
|
||||||
|
const symbols = allowedSymbols;
|
||||||
|
|
||||||
|
let password = "";
|
||||||
|
let remaining = length;
|
||||||
|
|
||||||
|
// Add required characters
|
||||||
|
for (let i = 0; i < required.lowercase; i += 1) {
|
||||||
|
password += lowercase[crypto.randomInt(lowercase.length)];
|
||||||
|
remaining -= 1;
|
||||||
|
}
|
||||||
|
for (let i = 0; i < required.uppercase; i += 1) {
|
||||||
|
password += uppercase[crypto.randomInt(uppercase.length)];
|
||||||
|
remaining -= 1;
|
||||||
|
}
|
||||||
|
for (let i = 0; i < required.digits; i += 1) {
|
||||||
|
password += digits[crypto.randomInt(digits.length)];
|
||||||
|
remaining -= 1;
|
||||||
|
}
|
||||||
|
for (let i = 0; i < required.symbols; i += 1) {
|
||||||
|
password += symbols[crypto.randomInt(symbols.length)];
|
||||||
|
remaining -= 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fill remaining with random characters from all sets
|
||||||
|
const allChars = lowercase + uppercase + digits + symbols;
|
||||||
|
for (let i = 0; i < remaining; i += 1) {
|
||||||
|
password += allChars[crypto.randomInt(allChars.length)];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Shuffle the password
|
||||||
|
return password
|
||||||
|
.split("")
|
||||||
|
.sort(() => crypto.randomInt(3) - 1)
|
||||||
|
.join("");
|
||||||
|
};
|
||||||
|
|
||||||
|
const couchbaseApiRequest = async (
|
||||||
|
method: string,
|
||||||
|
url: string,
|
||||||
|
apiKey: string,
|
||||||
|
data?: unknown
|
||||||
|
): Promise<CouchbaseUserResponse> => {
|
||||||
|
await blockLocalAndPrivateIpAddresses(url);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await axios({
|
||||||
|
method: method.toLowerCase() as "get" | "post" | "put" | "delete",
|
||||||
|
url,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${apiKey}`,
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
data: data || undefined,
|
||||||
|
timeout: 30000
|
||||||
|
});
|
||||||
|
|
||||||
|
return response.data as CouchbaseUserResponse;
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [apiKey]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const CouchbaseProvider = (): TDynamicProviderFns => {
|
||||||
|
const validateProviderInputs = async (inputs: object) => {
|
||||||
|
const providerInputs = DynamicSecretCouchbaseSchema.parse(inputs);
|
||||||
|
|
||||||
|
await blockLocalAndPrivateIpAddresses(providerInputs.url);
|
||||||
|
|
||||||
|
return providerInputs;
|
||||||
|
};
|
||||||
|
|
||||||
|
const validateConnection = async (inputs: unknown): Promise<boolean> => {
|
||||||
|
try {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs as object);
|
||||||
|
|
||||||
|
// Test connection by trying to get organization info
|
||||||
|
const url = `${providerInputs.url}/v4/organizations/${providerInputs.orgId}`;
|
||||||
|
await couchbaseApiRequest("GET", url, providerInputs.auth.apiKey);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect to Couchbase: ${error instanceof Error ? error.message : "Unknown error"}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const create = async ({
|
||||||
|
inputs,
|
||||||
|
usernameTemplate,
|
||||||
|
identity
|
||||||
|
}: {
|
||||||
|
inputs: unknown;
|
||||||
|
usernameTemplate?: string | null;
|
||||||
|
identity?: { name: string };
|
||||||
|
}) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs as object);
|
||||||
|
|
||||||
|
const username = generateUsername(usernameTemplate, identity);
|
||||||
|
|
||||||
|
const password = generatePassword(providerInputs.passwordRequirements);
|
||||||
|
|
||||||
|
const createUserUrl = `${providerInputs.url}/v4/organizations/${providerInputs.orgId}/projects/${providerInputs.projectId}/clusters/${providerInputs.clusterId}/users`;
|
||||||
|
|
||||||
|
const bucketResources = normalizeBucketConfiguration(providerInputs.buckets);
|
||||||
|
|
||||||
|
const userData: TCreateCouchbaseUser = {
|
||||||
|
name: username,
|
||||||
|
password,
|
||||||
|
access: [
|
||||||
|
{
|
||||||
|
privileges: providerInputs.roles,
|
||||||
|
resources: {
|
||||||
|
buckets: bucketResources
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = await couchbaseApiRequest("POST", createUserUrl, providerInputs.auth.apiKey, userData);
|
||||||
|
|
||||||
|
const userUuid = response?.id || response?.uuid || username;
|
||||||
|
|
||||||
|
return {
|
||||||
|
entityId: userUuid,
|
||||||
|
data: {
|
||||||
|
username,
|
||||||
|
password
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs as object);
|
||||||
|
|
||||||
|
const deleteUserUrl = `${providerInputs.url}/v4/organizations/${providerInputs.orgId}/projects/${providerInputs.projectId}/clusters/${providerInputs.clusterId}/users/${encodeURIComponent(entityId)}`;
|
||||||
|
|
||||||
|
await couchbaseApiRequest("DELETE", deleteUserUrl, providerInputs.auth.apiKey);
|
||||||
|
|
||||||
|
return { entityId };
|
||||||
|
};
|
||||||
|
|
||||||
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
|
// Couchbase Cloud API doesn't support renewing user credentials
|
||||||
|
// The user remains valid until explicitly deleted
|
||||||
|
return { entityId };
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
validateProviderInputs,
|
||||||
|
validateConnection,
|
||||||
|
create,
|
||||||
|
revoke,
|
||||||
|
renew
|
||||||
|
};
|
||||||
|
};
|
@@ -2,6 +2,8 @@ import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
|
|||||||
import { customAlphabet } from "nanoid";
|
import { customAlphabet } from "nanoid";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||||
@@ -63,12 +65,24 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
|||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await $getClient(providerInputs);
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
const infoResponse = await connection
|
try {
|
||||||
.info()
|
const infoResponse = await connection.info().then(() => true);
|
||||||
.then(() => true)
|
return infoResponse;
|
||||||
.catch(() => false);
|
} catch (err) {
|
||||||
|
const tokens = [];
|
||||||
return infoResponse;
|
if (providerInputs.auth.type === ElasticSearchAuthTypes.ApiKey) {
|
||||||
|
tokens.push(providerInputs.auth.apiKey, providerInputs.auth.apiKeyId);
|
||||||
|
} else {
|
||||||
|
tokens.push(providerInputs.auth.username, providerInputs.auth.password);
|
||||||
|
}
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||||
@@ -79,27 +93,49 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
|||||||
const username = generateUsername(usernameTemplate, identity);
|
const username = generateUsername(usernameTemplate, identity);
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
|
|
||||||
await connection.security.putUser({
|
try {
|
||||||
username,
|
await connection.security.putUser({
|
||||||
password,
|
username,
|
||||||
full_name: "Managed by Infisical.com",
|
password,
|
||||||
roles: providerInputs.roles
|
full_name: "Managed by Infisical.com",
|
||||||
});
|
roles: providerInputs.roles
|
||||||
|
});
|
||||||
|
|
||||||
await connection.close();
|
await connection.close();
|
||||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, password]
|
||||||
|
});
|
||||||
|
await connection.close();
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await $getClient(providerInputs);
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
await connection.security.deleteUser({
|
try {
|
||||||
username: entityId
|
await connection.security.deleteUser({
|
||||||
});
|
username: entityId
|
||||||
|
});
|
||||||
|
|
||||||
await connection.close();
|
await connection.close();
|
||||||
return { entityId };
|
return { entityId };
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [entityId]
|
||||||
|
});
|
||||||
|
await connection.close();
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (_inputs: unknown, entityId: string) => {
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
|
@@ -3,6 +3,7 @@ import { GetAccessTokenResponse } from "google-auth-library/build/src/auth/oauth
|
|||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
import { DynamicSecretGcpIamSchema, TDynamicProviderFns } from "./models";
|
import { DynamicSecretGcpIamSchema, TDynamicProviderFns } from "./models";
|
||||||
@@ -65,8 +66,18 @@ export const GcpIamProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
await $getToken(providerInputs.serviceAccountEmail, 10);
|
try {
|
||||||
return true;
|
await $getToken(providerInputs.serviceAccountEmail, 10);
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.serviceAccountEmail]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: { inputs: unknown; expireAt: number }) => {
|
const create = async (data: { inputs: unknown; expireAt: number }) => {
|
||||||
@@ -74,13 +85,23 @@ export const GcpIamProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
const now = Math.floor(Date.now() / 1000);
|
try {
|
||||||
const ttl = Math.max(Math.floor(expireAt / 1000) - now, 0);
|
const now = Math.floor(Date.now() / 1000);
|
||||||
|
const ttl = Math.max(Math.floor(expireAt / 1000) - now, 0);
|
||||||
|
|
||||||
const token = await $getToken(providerInputs.serviceAccountEmail, ttl);
|
const token = await $getToken(providerInputs.serviceAccountEmail, ttl);
|
||||||
const entityId = alphaNumericNanoId(32);
|
const entityId = alphaNumericNanoId(32);
|
||||||
|
|
||||||
return { entityId, data: { SERVICE_ACCOUNT_EMAIL: providerInputs.serviceAccountEmail, TOKEN: token } };
|
return { entityId, data: { SERVICE_ACCOUNT_EMAIL: providerInputs.serviceAccountEmail, TOKEN: token } };
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.serviceAccountEmail]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||||
@@ -89,10 +110,21 @@ export const GcpIamProvider = (): TDynamicProviderFns => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||||
// To renew a token it must be re-created
|
try {
|
||||||
const data = await create({ inputs, expireAt });
|
// To renew a token it must be re-created
|
||||||
|
const data = await create({ inputs, expireAt });
|
||||||
|
|
||||||
return { ...data, entityId };
|
return { ...data, entityId };
|
||||||
|
} catch (err) {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.serviceAccountEmail]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@@ -3,6 +3,7 @@ import jwt from "jsonwebtoken";
|
|||||||
|
|
||||||
import { crypto } from "@app/lib/crypto";
|
import { crypto } from "@app/lib/crypto";
|
||||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||||
|
|
||||||
@@ -89,26 +90,46 @@ export const GithubProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
await $generateGitHubInstallationAccessToken(providerInputs);
|
try {
|
||||||
return true;
|
await $generateGitHubInstallationAccessToken(providerInputs);
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.privateKey, String(providerInputs.appId), String(providerInputs.installationId)]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: { inputs: unknown }) => {
|
const create = async (data: { inputs: unknown }) => {
|
||||||
const { inputs } = data;
|
const { inputs } = data;
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
const ghTokenData = await $generateGitHubInstallationAccessToken(providerInputs);
|
try {
|
||||||
const entityId = alphaNumericNanoId(32);
|
const ghTokenData = await $generateGitHubInstallationAccessToken(providerInputs);
|
||||||
|
const entityId = alphaNumericNanoId(32);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
entityId,
|
entityId,
|
||||||
data: {
|
data: {
|
||||||
TOKEN: ghTokenData.token,
|
TOKEN: ghTokenData.token,
|
||||||
EXPIRES_AT: ghTokenData.expires_at,
|
EXPIRES_AT: ghTokenData.expires_at,
|
||||||
PERMISSIONS: ghTokenData.permissions,
|
PERMISSIONS: ghTokenData.permissions,
|
||||||
REPOSITORY_SELECTION: ghTokenData.repository_selection
|
REPOSITORY_SELECTION: ghTokenData.repository_selection
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.privateKey, String(providerInputs.appId), String(providerInputs.installationId)]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async () => {
|
const revoke = async () => {
|
||||||
|
@@ -5,6 +5,7 @@ import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
|
|||||||
import { AwsIamProvider } from "./aws-iam";
|
import { AwsIamProvider } from "./aws-iam";
|
||||||
import { AzureEntraIDProvider } from "./azure-entra-id";
|
import { AzureEntraIDProvider } from "./azure-entra-id";
|
||||||
import { CassandraProvider } from "./cassandra";
|
import { CassandraProvider } from "./cassandra";
|
||||||
|
import { CouchbaseProvider } from "./couchbase";
|
||||||
import { ElasticSearchProvider } from "./elastic-search";
|
import { ElasticSearchProvider } from "./elastic-search";
|
||||||
import { GcpIamProvider } from "./gcp-iam";
|
import { GcpIamProvider } from "./gcp-iam";
|
||||||
import { GithubProvider } from "./github";
|
import { GithubProvider } from "./github";
|
||||||
@@ -46,5 +47,6 @@ export const buildDynamicSecretProviders = ({
|
|||||||
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService }),
|
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService }),
|
||||||
[DynamicSecretProviders.Vertica]: VerticaProvider({ gatewayService }),
|
[DynamicSecretProviders.Vertica]: VerticaProvider({ gatewayService }),
|
||||||
[DynamicSecretProviders.GcpIam]: GcpIamProvider(),
|
[DynamicSecretProviders.GcpIam]: GcpIamProvider(),
|
||||||
[DynamicSecretProviders.Github]: GithubProvider()
|
[DynamicSecretProviders.Github]: GithubProvider(),
|
||||||
|
[DynamicSecretProviders.Couchbase]: CouchbaseProvider()
|
||||||
});
|
});
|
||||||
|
@@ -2,7 +2,8 @@ import axios, { AxiosError } from "axios";
|
|||||||
import handlebars from "handlebars";
|
import handlebars from "handlebars";
|
||||||
import https from "https";
|
import https from "https";
|
||||||
|
|
||||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||||
@@ -356,8 +357,12 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
|||||||
errorMessage = (error.response?.data as { message: string }).message;
|
errorMessage = (error.response?.data as { message: string }).message;
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new InternalServerError({
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
message: `Failed to validate connection: ${errorMessage}`
|
unsanitizedString: errorMessage,
|
||||||
|
tokens: [providerInputs.clusterToken || ""]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -602,8 +607,12 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
|||||||
errorMessage = (error.response?.data as { message: string }).message;
|
errorMessage = (error.response?.data as { message: string }).message;
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new InternalServerError({
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
message: `Failed to create dynamic secret: ${errorMessage}`
|
unsanitizedString: errorMessage,
|
||||||
|
tokens: [providerInputs.clusterToken || ""]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -683,50 +692,65 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (providerInputs.credentialType === KubernetesCredentialType.Dynamic) {
|
if (providerInputs.credentialType === KubernetesCredentialType.Dynamic) {
|
||||||
const rawUrl =
|
try {
|
||||||
providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
const rawUrl =
|
||||||
? GATEWAY_AUTH_DEFAULT_URL
|
providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||||
: providerInputs.url || "";
|
? GATEWAY_AUTH_DEFAULT_URL
|
||||||
|
: providerInputs.url || "";
|
||||||
|
|
||||||
const url = new URL(rawUrl);
|
const url = new URL(rawUrl);
|
||||||
const k8sGatewayHost = url.hostname;
|
const k8sGatewayHost = url.hostname;
|
||||||
const k8sPort = url.port ? Number(url.port) : 443;
|
const k8sPort = url.port ? Number(url.port) : 443;
|
||||||
const k8sHost = `${url.protocol}//${url.hostname}`;
|
const k8sHost = `${url.protocol}//${url.hostname}`;
|
||||||
|
|
||||||
const httpsAgent =
|
const httpsAgent =
|
||||||
providerInputs.ca && providerInputs.sslEnabled
|
providerInputs.ca && providerInputs.sslEnabled
|
||||||
? new https.Agent({
|
? new https.Agent({
|
||||||
ca: providerInputs.ca,
|
ca: providerInputs.ca,
|
||||||
rejectUnauthorized: true
|
rejectUnauthorized: true
|
||||||
})
|
})
|
||||||
: undefined;
|
: undefined;
|
||||||
|
|
||||||
if (providerInputs.gatewayId) {
|
if (providerInputs.gatewayId) {
|
||||||
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
|
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
|
||||||
await $gatewayProxyWrapper(
|
await $gatewayProxyWrapper(
|
||||||
{
|
{
|
||||||
gatewayId: providerInputs.gatewayId,
|
gatewayId: providerInputs.gatewayId,
|
||||||
targetHost: k8sHost,
|
targetHost: k8sHost,
|
||||||
targetPort: k8sPort,
|
targetPort: k8sPort,
|
||||||
httpsAgent,
|
httpsAgent,
|
||||||
reviewTokenThroughGateway: true
|
reviewTokenThroughGateway: true
|
||||||
},
|
},
|
||||||
serviceAccountDynamicCallback
|
serviceAccountDynamicCallback
|
||||||
);
|
);
|
||||||
|
} else {
|
||||||
|
await $gatewayProxyWrapper(
|
||||||
|
{
|
||||||
|
gatewayId: providerInputs.gatewayId,
|
||||||
|
targetHost: k8sGatewayHost,
|
||||||
|
targetPort: k8sPort,
|
||||||
|
httpsAgent,
|
||||||
|
reviewTokenThroughGateway: false
|
||||||
|
},
|
||||||
|
serviceAccountDynamicCallback
|
||||||
|
);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
await $gatewayProxyWrapper(
|
await serviceAccountDynamicCallback(k8sHost, k8sPort, httpsAgent);
|
||||||
{
|
|
||||||
gatewayId: providerInputs.gatewayId,
|
|
||||||
targetHost: k8sGatewayHost,
|
|
||||||
targetPort: k8sPort,
|
|
||||||
httpsAgent,
|
|
||||||
reviewTokenThroughGateway: false
|
|
||||||
},
|
|
||||||
serviceAccountDynamicCallback
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} else {
|
} catch (error) {
|
||||||
await serviceAccountDynamicCallback(k8sHost, k8sPort, httpsAgent);
|
let errorMessage = error instanceof Error ? error.message : "Unknown error";
|
||||||
|
if (axios.isAxiosError(error) && (error.response?.data as { message: string })?.message) {
|
||||||
|
errorMessage = (error.response?.data as { message: string }).message;
|
||||||
|
}
|
||||||
|
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: errorMessage,
|
||||||
|
tokens: [entityId, providerInputs.clusterToken || ""]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -6,6 +6,7 @@ import RE2 from "re2";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
|
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
|
||||||
@@ -91,8 +92,18 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await $getClient(providerInputs);
|
try {
|
||||||
return client.connected;
|
const client = await $getClient(providerInputs);
|
||||||
|
return client.connected;
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.bindpass, providerInputs.binddn]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const executeLdif = async (client: ldapjs.Client, ldif_file: string) => {
|
const executeLdif = async (client: ldapjs.Client, ldif_file: string) => {
|
||||||
@@ -205,11 +216,11 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||||
const dnRegex = new RE2("^dn:\\s*(.+)", "m");
|
const dnRegex = new RE2("^dn:\\s*(.+)", "m");
|
||||||
const dnMatch = dnRegex.exec(providerInputs.rotationLdif);
|
const dnMatch = dnRegex.exec(providerInputs.rotationLdif);
|
||||||
|
const username = dnMatch?.[1];
|
||||||
|
if (!username) throw new BadRequestError({ message: "Username not found from Ldif" });
|
||||||
|
const password = generatePassword();
|
||||||
|
|
||||||
if (dnMatch) {
|
if (dnMatch) {
|
||||||
const username = dnMatch[1];
|
|
||||||
const password = generatePassword();
|
|
||||||
|
|
||||||
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rotationLdif });
|
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rotationLdif });
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -217,7 +228,11 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw new BadRequestError({ message: (err as Error).message });
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, password, providerInputs.binddn, providerInputs.bindpass]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({ message: sanitizedErrorMessage });
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new BadRequestError({
|
throw new BadRequestError({
|
||||||
@@ -238,7 +253,11 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
const rollbackLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rollbackLdif });
|
const rollbackLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rollbackLdif });
|
||||||
await executeLdif(client, rollbackLdif);
|
await executeLdif(client, rollbackLdif);
|
||||||
}
|
}
|
||||||
throw new BadRequestError({ message: (err as Error).message });
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, password, providerInputs.binddn, providerInputs.bindpass]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({ message: sanitizedErrorMessage });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -262,7 +281,11 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw new BadRequestError({ message: (err as Error).message });
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, password, providerInputs.binddn, providerInputs.bindpass]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({ message: sanitizedErrorMessage });
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new BadRequestError({
|
throw new BadRequestError({
|
||||||
@@ -278,7 +301,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
return { entityId };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string) => {
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
// No renewal necessary
|
// No renewal necessary
|
||||||
return { entityId };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
@@ -505,6 +505,91 @@ export const DynamicSecretGithubSchema = z.object({
|
|||||||
.describe("The private key generated for your GitHub App.")
|
.describe("The private key generated for your GitHub App.")
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const DynamicSecretCouchbaseSchema = z.object({
|
||||||
|
url: z.string().url().trim().min(1).describe("Couchbase Cloud API URL"),
|
||||||
|
orgId: z.string().trim().min(1).describe("Organization ID"),
|
||||||
|
projectId: z.string().trim().min(1).describe("Project ID"),
|
||||||
|
clusterId: z.string().trim().min(1).describe("Cluster ID"),
|
||||||
|
roles: z.array(z.string().trim().min(1)).min(1).describe("Roles to assign to the user"),
|
||||||
|
buckets: z
|
||||||
|
.union([
|
||||||
|
z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1)
|
||||||
|
.default("*")
|
||||||
|
.refine((val) => {
|
||||||
|
if (val.includes(",")) {
|
||||||
|
const buckets = val
|
||||||
|
.split(",")
|
||||||
|
.map((b) => b.trim())
|
||||||
|
.filter((b) => b.length > 0);
|
||||||
|
if (buckets.includes("*") && buckets.length > 1) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}, "Cannot combine '*' with other bucket names"),
|
||||||
|
z
|
||||||
|
.array(
|
||||||
|
z.object({
|
||||||
|
name: z.string().trim().min(1).describe("Bucket name"),
|
||||||
|
scopes: z
|
||||||
|
.array(
|
||||||
|
z.object({
|
||||||
|
name: z.string().trim().min(1).describe("Scope name"),
|
||||||
|
collections: z.array(z.string().trim().min(1)).optional().describe("Collection names")
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.optional()
|
||||||
|
.describe("Scopes within the bucket")
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.refine((buckets) => {
|
||||||
|
const hasWildcard = buckets.some((bucket) => bucket.name === "*");
|
||||||
|
if (hasWildcard && buckets.length > 1) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}, "Cannot combine '*' bucket with other buckets")
|
||||||
|
])
|
||||||
|
.default("*")
|
||||||
|
.describe(
|
||||||
|
"Bucket configuration: '*' for all buckets, scopes, and collections or array of bucket objects with specific scopes and collections"
|
||||||
|
),
|
||||||
|
passwordRequirements: z
|
||||||
|
.object({
|
||||||
|
length: z.number().min(8, "Password must be at least 8 characters").max(128),
|
||||||
|
required: z
|
||||||
|
.object({
|
||||||
|
lowercase: z.number().min(1, "At least 1 lowercase character required"),
|
||||||
|
uppercase: z.number().min(1, "At least 1 uppercase character required"),
|
||||||
|
digits: z.number().min(1, "At least 1 digit required"),
|
||||||
|
symbols: z.number().min(1, "At least 1 special character required")
|
||||||
|
})
|
||||||
|
.refine((data) => {
|
||||||
|
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
|
||||||
|
return total <= 128;
|
||||||
|
}, "Sum of required characters cannot exceed 128"),
|
||||||
|
allowedSymbols: z
|
||||||
|
.string()
|
||||||
|
.refine((symbols) => {
|
||||||
|
const forbiddenChars = ["<", ">", ";", ".", "*", "&", "|", "£"];
|
||||||
|
return !forbiddenChars.some((char) => symbols?.includes(char));
|
||||||
|
}, "Cannot contain: < > ; . * & | £")
|
||||||
|
.optional()
|
||||||
|
})
|
||||||
|
.refine((data) => {
|
||||||
|
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
|
||||||
|
return total <= data.length;
|
||||||
|
}, "Sum of required characters cannot exceed the total length")
|
||||||
|
.optional()
|
||||||
|
.describe("Password generation requirements for Couchbase"),
|
||||||
|
auth: z.object({
|
||||||
|
apiKey: z.string().trim().min(1).describe("Couchbase Cloud API Key")
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
export enum DynamicSecretProviders {
|
export enum DynamicSecretProviders {
|
||||||
SqlDatabase = "sql-database",
|
SqlDatabase = "sql-database",
|
||||||
Cassandra = "cassandra",
|
Cassandra = "cassandra",
|
||||||
@@ -524,7 +609,8 @@ export enum DynamicSecretProviders {
|
|||||||
Kubernetes = "kubernetes",
|
Kubernetes = "kubernetes",
|
||||||
Vertica = "vertica",
|
Vertica = "vertica",
|
||||||
GcpIam = "gcp-iam",
|
GcpIam = "gcp-iam",
|
||||||
Github = "github"
|
Github = "github",
|
||||||
|
Couchbase = "couchbase"
|
||||||
}
|
}
|
||||||
|
|
||||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||||
@@ -546,7 +632,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
|||||||
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.Vertica), inputs: DynamicSecretVerticaSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.Vertica), inputs: DynamicSecretVerticaSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.GcpIam), inputs: DynamicSecretGcpIamSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.GcpIam), inputs: DynamicSecretGcpIamSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.Github), inputs: DynamicSecretGithubSchema })
|
z.object({ type: z.literal(DynamicSecretProviders.Github), inputs: DynamicSecretGithubSchema }),
|
||||||
|
z.object({ type: z.literal(DynamicSecretProviders.Couchbase), inputs: DynamicSecretCouchbaseSchema })
|
||||||
]);
|
]);
|
||||||
|
|
||||||
export type TDynamicProviderFns = {
|
export type TDynamicProviderFns = {
|
||||||
|
@@ -3,6 +3,8 @@ import { customAlphabet } from "nanoid";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
|
import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||||
@@ -49,19 +51,25 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
|||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await $getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const isConnected = await client({
|
try {
|
||||||
method: "GET",
|
const isConnected = await client({
|
||||||
url: `v2/groups/${providerInputs.groupId}/databaseUsers`,
|
method: "GET",
|
||||||
params: { itemsPerPage: 1 }
|
url: `v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||||
})
|
params: { itemsPerPage: 1 }
|
||||||
.then(() => true)
|
}).then(() => true);
|
||||||
.catch((error) => {
|
return isConnected;
|
||||||
if ((error as AxiosError).response) {
|
} catch (error) {
|
||||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
const errorMessage = (error as AxiosError).response
|
||||||
}
|
? JSON.stringify((error as AxiosError).response?.data)
|
||||||
throw error;
|
: (error as Error)?.message;
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: errorMessage,
|
||||||
|
tokens: [providerInputs.adminPublicKey, providerInputs.adminPrivateKey, providerInputs.groupId]
|
||||||
});
|
});
|
||||||
return isConnected;
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: {
|
const create = async (data: {
|
||||||
@@ -77,25 +85,39 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
|||||||
const username = generateUsername(usernameTemplate, identity);
|
const username = generateUsername(usernameTemplate, identity);
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
const expiration = new Date(expireAt).toISOString();
|
const expiration = new Date(expireAt).toISOString();
|
||||||
await client({
|
try {
|
||||||
method: "POST",
|
await client({
|
||||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers`,
|
method: "POST",
|
||||||
data: {
|
url: `/v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||||
roles: providerInputs.roles,
|
data: {
|
||||||
scopes: providerInputs.scopes,
|
roles: providerInputs.roles,
|
||||||
deleteAfterDate: expiration,
|
scopes: providerInputs.scopes,
|
||||||
username,
|
deleteAfterDate: expiration,
|
||||||
password,
|
username,
|
||||||
databaseName: "admin",
|
password,
|
||||||
groupId: providerInputs.groupId
|
databaseName: "admin",
|
||||||
}
|
groupId: providerInputs.groupId
|
||||||
}).catch((error) => {
|
}
|
||||||
if ((error as AxiosError).response) {
|
});
|
||||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||||
}
|
} catch (error) {
|
||||||
throw error;
|
const errorMessage = (error as AxiosError).response
|
||||||
});
|
? JSON.stringify((error as AxiosError).response?.data)
|
||||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
: (error as Error)?.message;
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: errorMessage,
|
||||||
|
tokens: [
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
providerInputs.adminPublicKey,
|
||||||
|
providerInputs.adminPrivateKey,
|
||||||
|
providerInputs.groupId
|
||||||
|
]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
@@ -111,15 +133,23 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
|||||||
throw err;
|
throw err;
|
||||||
});
|
});
|
||||||
if (isExisting) {
|
if (isExisting) {
|
||||||
await client({
|
try {
|
||||||
method: "DELETE",
|
await client({
|
||||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
method: "DELETE",
|
||||||
}).catch((error) => {
|
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||||
if ((error as AxiosError).response) {
|
});
|
||||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
} catch (error) {
|
||||||
}
|
const errorMessage = (error as AxiosError).response
|
||||||
throw error;
|
? JSON.stringify((error as AxiosError).response?.data)
|
||||||
});
|
: (error as Error)?.message;
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: errorMessage,
|
||||||
|
tokens: [username, providerInputs.adminPublicKey, providerInputs.adminPrivateKey, providerInputs.groupId]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return { entityId: username };
|
return { entityId: username };
|
||||||
@@ -132,21 +162,29 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
|||||||
const username = entityId;
|
const username = entityId;
|
||||||
const expiration = new Date(expireAt).toISOString();
|
const expiration = new Date(expireAt).toISOString();
|
||||||
|
|
||||||
await client({
|
try {
|
||||||
method: "PATCH",
|
await client({
|
||||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`,
|
method: "PATCH",
|
||||||
data: {
|
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`,
|
||||||
deleteAfterDate: expiration,
|
data: {
|
||||||
databaseName: "admin",
|
deleteAfterDate: expiration,
|
||||||
groupId: providerInputs.groupId
|
databaseName: "admin",
|
||||||
}
|
groupId: providerInputs.groupId
|
||||||
}).catch((error) => {
|
}
|
||||||
if ((error as AxiosError).response) {
|
});
|
||||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
return { entityId: username };
|
||||||
}
|
} catch (error) {
|
||||||
throw error;
|
const errorMessage = (error as AxiosError).response
|
||||||
});
|
? JSON.stringify((error as AxiosError).response?.data)
|
||||||
return { entityId: username };
|
: (error as Error)?.message;
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: errorMessage,
|
||||||
|
tokens: [username, providerInputs.adminPublicKey, providerInputs.adminPrivateKey, providerInputs.groupId]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@@ -2,6 +2,8 @@ import { MongoClient } from "mongodb";
|
|||||||
import { customAlphabet } from "nanoid";
|
import { customAlphabet } from "nanoid";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||||
@@ -51,13 +53,24 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
|||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await $getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const isConnected = await client
|
try {
|
||||||
.db(providerInputs.database)
|
const isConnected = await client
|
||||||
.command({ ping: 1 })
|
.db(providerInputs.database)
|
||||||
.then(() => true);
|
.command({ ping: 1 })
|
||||||
|
.then(() => true);
|
||||||
|
|
||||||
await client.close();
|
await client.close();
|
||||||
return isConnected;
|
return isConnected;
|
||||||
|
} catch (err) {
|
||||||
|
await client.close();
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.password, providerInputs.username, providerInputs.database, providerInputs.host]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||||
@@ -68,16 +81,27 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
|||||||
const username = generateUsername(usernameTemplate, identity);
|
const username = generateUsername(usernameTemplate, identity);
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
|
|
||||||
const db = client.db(providerInputs.database);
|
try {
|
||||||
|
const db = client.db(providerInputs.database);
|
||||||
|
|
||||||
await db.command({
|
await db.command({
|
||||||
createUser: username,
|
createUser: username,
|
||||||
pwd: password,
|
pwd: password,
|
||||||
roles: providerInputs.roles
|
roles: providerInputs.roles
|
||||||
});
|
});
|
||||||
await client.close();
|
await client.close();
|
||||||
|
|
||||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||||
|
} catch (err) {
|
||||||
|
await client.close();
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, password, providerInputs.password, providerInputs.username, providerInputs.database]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
@@ -86,13 +110,24 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const username = entityId;
|
const username = entityId;
|
||||||
|
|
||||||
const db = client.db(providerInputs.database);
|
try {
|
||||||
await db.command({
|
const db = client.db(providerInputs.database);
|
||||||
dropUser: username
|
await db.command({
|
||||||
});
|
dropUser: username
|
||||||
await client.close();
|
});
|
||||||
|
await client.close();
|
||||||
|
|
||||||
return { entityId: username };
|
return { entityId: username };
|
||||||
|
} catch (err) {
|
||||||
|
await client.close();
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, providerInputs.password, providerInputs.username, providerInputs.database]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (_inputs: unknown, entityId: string) => {
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
|
@@ -3,6 +3,8 @@ import https from "https";
|
|||||||
import { customAlphabet } from "nanoid";
|
import { customAlphabet } from "nanoid";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
@@ -110,11 +112,19 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await $getClient(providerInputs);
|
try {
|
||||||
|
const connection = await $getClient(providerInputs);
|
||||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||||
|
return infoResponse;
|
||||||
return infoResponse;
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.password, providerInputs.username, providerInputs.host]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||||
@@ -125,26 +135,44 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
|||||||
const username = generateUsername(usernameTemplate, identity);
|
const username = generateUsername(usernameTemplate, identity);
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
|
|
||||||
await createRabbitMqUser({
|
try {
|
||||||
axiosInstance: connection,
|
await createRabbitMqUser({
|
||||||
virtualHost: providerInputs.virtualHost,
|
axiosInstance: connection,
|
||||||
createUser: {
|
virtualHost: providerInputs.virtualHost,
|
||||||
password,
|
createUser: {
|
||||||
username,
|
password,
|
||||||
tags: [...(providerInputs.tags ?? []), "infisical-user"]
|
username,
|
||||||
}
|
tags: [...(providerInputs.tags ?? []), "infisical-user"]
|
||||||
});
|
}
|
||||||
|
});
|
||||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, password, providerInputs.password, providerInputs.username]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await $getClient(providerInputs);
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
try {
|
||||||
|
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||||
return { entityId };
|
return { entityId };
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [entityId, providerInputs.password, providerInputs.username]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (_inputs: unknown, entityId: string) => {
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
|
@@ -4,6 +4,7 @@ import { customAlphabet } from "nanoid";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||||
|
|
||||||
@@ -112,14 +113,27 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await $getClient(providerInputs);
|
let connection;
|
||||||
|
try {
|
||||||
const pingResponse = await connection
|
connection = await $getClient(providerInputs);
|
||||||
.ping()
|
const pingResponse = await connection.ping().then(() => true);
|
||||||
.then(() => true)
|
await connection.quit();
|
||||||
.catch(() => false);
|
return pingResponse;
|
||||||
|
} catch (err) {
|
||||||
return pingResponse;
|
if (connection) await connection.quit();
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [
|
||||||
|
providerInputs.password || "",
|
||||||
|
providerInputs.username,
|
||||||
|
providerInputs.host,
|
||||||
|
String(providerInputs.port)
|
||||||
|
]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: {
|
const create = async (data: {
|
||||||
@@ -144,10 +158,20 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||||
|
|
||||||
await executeTransactions(connection, queries);
|
try {
|
||||||
|
await executeTransactions(connection, queries);
|
||||||
await connection.quit();
|
await connection.quit();
|
||||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||||
|
} catch (err) {
|
||||||
|
await connection.quit();
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, password, providerInputs.password || "", providerInputs.username]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
@@ -159,10 +183,20 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||||
|
|
||||||
await executeTransactions(connection, queries);
|
try {
|
||||||
|
await executeTransactions(connection, queries);
|
||||||
await connection.quit();
|
await connection.quit();
|
||||||
return { entityId: username };
|
return { entityId: username };
|
||||||
|
} catch (err) {
|
||||||
|
await connection.quit();
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, providerInputs.password || "", providerInputs.username]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||||
@@ -176,13 +210,23 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
||||||
|
|
||||||
if (renewStatement) {
|
try {
|
||||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
if (renewStatement) {
|
||||||
await executeTransactions(connection, queries);
|
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||||
|
await executeTransactions(connection, queries);
|
||||||
|
}
|
||||||
|
await connection.quit();
|
||||||
|
return { entityId: username };
|
||||||
|
} catch (err) {
|
||||||
|
await connection.quit();
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, providerInputs.password || "", providerInputs.username]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
await connection.quit();
|
|
||||||
return { entityId: username };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@@ -4,6 +4,7 @@ import odbc from "odbc";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||||
|
|
||||||
@@ -67,25 +68,41 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const masterClient = await $getClient(providerInputs, true);
|
let masterClient;
|
||||||
const client = await $getClient(providerInputs);
|
let client;
|
||||||
|
try {
|
||||||
|
masterClient = await $getClient(providerInputs, true);
|
||||||
|
client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
|
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||||
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
|
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||||
|
|
||||||
if (!resultFromSelectedDatabase.version) {
|
if (!resultFromSelectedDatabase.version) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Failed to validate SAP ASE connection, version query failed"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Failed to validate SAP ASE connection (master), version mismatch"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await masterClient.close();
|
||||||
|
await client.close();
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
if (masterClient) await masterClient.close();
|
||||||
|
if (client) await client.close();
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.password, providerInputs.username, providerInputs.host, providerInputs.database]
|
||||||
|
});
|
||||||
throw new BadRequestError({
|
throw new BadRequestError({
|
||||||
message: "Failed to validate SAP ASE connection, version query failed"
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
|
|
||||||
throw new BadRequestError({
|
|
||||||
message: "Failed to validate SAP ASE connection (master), version mismatch"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||||
@@ -105,16 +122,26 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
|
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
|
||||||
|
|
||||||
for await (const query of queries) {
|
try {
|
||||||
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
for await (const query of queries) {
|
||||||
// If not done, then the newly created user won't be able to authenticate.
|
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||||
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
|
// If not done, then the newly created user won't be able to authenticate.
|
||||||
|
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
|
||||||
|
}
|
||||||
|
await masterClient.close();
|
||||||
|
await client.close();
|
||||||
|
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||||
|
} catch (err) {
|
||||||
|
await masterClient.close();
|
||||||
|
await client.close();
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, password, providerInputs.password, providerInputs.username, providerInputs.database]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
await masterClient.close();
|
|
||||||
await client.close();
|
|
||||||
|
|
||||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (inputs: unknown, username: string) => {
|
const revoke = async (inputs: unknown, username: string) => {
|
||||||
@@ -140,14 +167,24 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for await (const query of queries) {
|
try {
|
||||||
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
|
for await (const query of queries) {
|
||||||
|
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
|
||||||
|
}
|
||||||
|
await masterClient.close();
|
||||||
|
await client.close();
|
||||||
|
return { entityId: username };
|
||||||
|
} catch (err) {
|
||||||
|
await masterClient.close();
|
||||||
|
await client.close();
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, providerInputs.password, providerInputs.username, providerInputs.database]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
await masterClient.close();
|
|
||||||
await client.close();
|
|
||||||
|
|
||||||
return { entityId: username };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (_: unknown, username: string) => {
|
const renew = async (_: unknown, username: string) => {
|
||||||
|
@@ -10,6 +10,7 @@ import { customAlphabet } from "nanoid";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||||
|
|
||||||
@@ -83,19 +84,26 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await $getClient(providerInputs);
|
try {
|
||||||
|
const client = await $getClient(providerInputs);
|
||||||
const testResult = await new Promise<boolean>((resolve, reject) => {
|
const testResult = await new Promise<boolean>((resolve, reject) => {
|
||||||
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
reject();
|
return reject(err);
|
||||||
}
|
}
|
||||||
|
resolve(true);
|
||||||
resolve(true);
|
});
|
||||||
});
|
});
|
||||||
});
|
return testResult;
|
||||||
|
} catch (err) {
|
||||||
return testResult;
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.password, providerInputs.username, providerInputs.host]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: {
|
const create = async (data: {
|
||||||
@@ -119,18 +127,22 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||||
for await (const query of queries) {
|
try {
|
||||||
await new Promise((resolve, reject) => {
|
for await (const query of queries) {
|
||||||
client.exec(query, (err: any) => {
|
await new Promise((resolve, reject) => {
|
||||||
if (err) {
|
client.exec(query, (err: any) => {
|
||||||
reject(
|
if (err) return reject(err);
|
||||||
new BadRequestError({
|
resolve(true);
|
||||||
message: err.message
|
});
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
resolve(true);
|
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, password, providerInputs.password, providerInputs.username]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -142,18 +154,24 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
|||||||
const client = await $getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||||
for await (const query of queries) {
|
try {
|
||||||
await new Promise((resolve, reject) => {
|
for await (const query of queries) {
|
||||||
client.exec(query, (err: any) => {
|
await new Promise((resolve, reject) => {
|
||||||
if (err) {
|
client.exec(query, (err: any) => {
|
||||||
reject(
|
if (err) {
|
||||||
new BadRequestError({
|
reject(err);
|
||||||
message: err.message
|
}
|
||||||
})
|
resolve(true);
|
||||||
);
|
});
|
||||||
}
|
|
||||||
resolve(true);
|
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, providerInputs.password, providerInputs.username]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -174,16 +192,20 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
|||||||
await new Promise((resolve, reject) => {
|
await new Promise((resolve, reject) => {
|
||||||
client.exec(query, (err: any) => {
|
client.exec(query, (err: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
reject(
|
reject(err);
|
||||||
new BadRequestError({
|
|
||||||
message: err.message
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
resolve(true);
|
resolve(true);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [entityId, providerInputs.password, providerInputs.username]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
} finally {
|
} finally {
|
||||||
client.disconnect();
|
client.disconnect();
|
||||||
}
|
}
|
||||||
|
@@ -4,6 +4,7 @@ import snowflake from "snowflake-sdk";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||||
|
|
||||||
@@ -69,12 +70,10 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await $getClient(providerInputs);
|
let client;
|
||||||
|
|
||||||
let isValidConnection: boolean;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
isValidConnection = await Promise.race([
|
client = await $getClient(providerInputs);
|
||||||
|
const isValidConnection = await Promise.race([
|
||||||
client.isValidAsync(),
|
client.isValidAsync(),
|
||||||
new Promise((resolve) => {
|
new Promise((resolve) => {
|
||||||
setTimeout(resolve, 10000);
|
setTimeout(resolve, 10000);
|
||||||
@@ -82,11 +81,18 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
|||||||
throw new BadRequestError({ message: "Unable to establish connection - verify credentials" });
|
throw new BadRequestError({ message: "Unable to establish connection - verify credentials" });
|
||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
return isValidConnection;
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.password, providerInputs.username, providerInputs.accountId, providerInputs.orgId]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
} finally {
|
} finally {
|
||||||
client.destroy(noop);
|
if (client) client.destroy(noop);
|
||||||
}
|
}
|
||||||
|
|
||||||
return isValidConnection;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (data: {
|
const create = async (data: {
|
||||||
@@ -116,13 +122,19 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
|||||||
sqlText: creationStatement,
|
sqlText: creationStatement,
|
||||||
complete(err) {
|
complete(err) {
|
||||||
if (err) {
|
if (err) {
|
||||||
return reject(new BadRequestError({ name: "CreateLease", message: err.message }));
|
return reject(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
return resolve(true);
|
return resolve(true);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error).message,
|
||||||
|
tokens: [username, password, providerInputs.password, providerInputs.username]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({ message: `Failed to create lease from provider: ${sanitizedErrorMessage}` });
|
||||||
} finally {
|
} finally {
|
||||||
client.destroy(noop);
|
client.destroy(noop);
|
||||||
}
|
}
|
||||||
@@ -143,13 +155,19 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
|||||||
sqlText: revokeStatement,
|
sqlText: revokeStatement,
|
||||||
complete(err) {
|
complete(err) {
|
||||||
if (err) {
|
if (err) {
|
||||||
return reject(new BadRequestError({ name: "RevokeLease", message: err.message }));
|
return reject(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
return resolve(true);
|
return resolve(true);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error).message,
|
||||||
|
tokens: [username, providerInputs.password, providerInputs.username]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({ message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}` });
|
||||||
} finally {
|
} finally {
|
||||||
client.destroy(noop);
|
client.destroy(noop);
|
||||||
}
|
}
|
||||||
@@ -175,13 +193,19 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
|||||||
sqlText: renewStatement,
|
sqlText: renewStatement,
|
||||||
complete(err) {
|
complete(err) {
|
||||||
if (err) {
|
if (err) {
|
||||||
return reject(new BadRequestError({ name: "RenewLease", message: err.message }));
|
return reject(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
return resolve(true);
|
return resolve(true);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error).message,
|
||||||
|
tokens: [entityId, providerInputs.password, providerInputs.username]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({ message: `Failed to renew lease from provider: ${sanitizedErrorMessage}` });
|
||||||
} finally {
|
} finally {
|
||||||
client.destroy(noop);
|
client.destroy(noop);
|
||||||
}
|
}
|
||||||
|
@@ -3,6 +3,8 @@ import knex from "knex";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { crypto } from "@app/lib/crypto/cryptography";
|
import { crypto } from "@app/lib/crypto/cryptography";
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||||
@@ -212,8 +214,19 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
|||||||
// oracle needs from keyword
|
// oracle needs from keyword
|
||||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||||
|
|
||||||
isConnected = await db.raw(testStatement).then(() => true);
|
try {
|
||||||
await db.destroy();
|
isConnected = await db.raw(testStatement).then(() => true);
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [providerInputs.username]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
await db.destroy();
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if (providerInputs.gatewayId) {
|
if (providerInputs.gatewayId) {
|
||||||
@@ -233,13 +246,13 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
|||||||
const { inputs, expireAt, usernameTemplate, identity } = data;
|
const { inputs, expireAt, usernameTemplate, identity } = data;
|
||||||
|
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
const { database } = providerInputs;
|
||||||
const username = generateUsername(providerInputs.client, usernameTemplate, identity);
|
const username = generateUsername(providerInputs.client, usernameTemplate, identity);
|
||||||
|
|
||||||
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
|
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
|
||||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||||
const db = await $getClient({ ...providerInputs, port, host });
|
const db = await $getClient({ ...providerInputs, port, host });
|
||||||
try {
|
try {
|
||||||
const { database } = providerInputs;
|
|
||||||
const expiration = new Date(expireAt).toISOString();
|
const expiration = new Date(expireAt).toISOString();
|
||||||
|
|
||||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||||
@@ -256,6 +269,14 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
|||||||
await tx.raw(query);
|
await tx.raw(query);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, password, database]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
} finally {
|
} finally {
|
||||||
await db.destroy();
|
await db.destroy();
|
||||||
}
|
}
|
||||||
@@ -283,6 +304,14 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
|||||||
await tx.raw(query);
|
await tx.raw(query);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, database]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
} finally {
|
} finally {
|
||||||
await db.destroy();
|
await db.destroy();
|
||||||
}
|
}
|
||||||
@@ -319,6 +348,14 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [database]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
} finally {
|
} finally {
|
||||||
await db.destroy();
|
await db.destroy();
|
||||||
}
|
}
|
||||||
|
@@ -1,6 +1,8 @@
|
|||||||
import { authenticator } from "otplib";
|
import { authenticator } from "otplib";
|
||||||
import { HashAlgorithms } from "otplib/core";
|
import { HashAlgorithms } from "otplib/core";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
|
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
|
||||||
@@ -12,62 +14,84 @@ export const TotpProvider = (): TDynamicProviderFns => {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const validateConnection = async () => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
return true;
|
try {
|
||||||
|
await validateProviderInputs(inputs);
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: []
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = async (inputs: unknown) => {
|
const create = async (data: { inputs: unknown }) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const { inputs } = data;
|
||||||
|
try {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
const entityId = alphaNumericNanoId(32);
|
const entityId = alphaNumericNanoId(32);
|
||||||
const authenticatorInstance = authenticator.clone();
|
const authenticatorInstance = authenticator.clone();
|
||||||
|
|
||||||
let secret: string;
|
let secret: string;
|
||||||
let period: number | null | undefined;
|
let period: number | null | undefined;
|
||||||
let digits: number | null | undefined;
|
let digits: number | null | undefined;
|
||||||
let algorithm: HashAlgorithms | null | undefined;
|
let algorithm: HashAlgorithms | null | undefined;
|
||||||
|
|
||||||
if (providerInputs.configType === TotpConfigType.URL) {
|
if (providerInputs.configType === TotpConfigType.URL) {
|
||||||
const urlObj = new URL(providerInputs.url);
|
const urlObj = new URL(providerInputs.url);
|
||||||
secret = urlObj.searchParams.get("secret") as string;
|
secret = urlObj.searchParams.get("secret") as string;
|
||||||
const periodFromUrl = urlObj.searchParams.get("period");
|
const periodFromUrl = urlObj.searchParams.get("period");
|
||||||
const digitsFromUrl = urlObj.searchParams.get("digits");
|
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||||
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||||
|
|
||||||
if (periodFromUrl) {
|
if (periodFromUrl) {
|
||||||
period = +periodFromUrl;
|
period = +periodFromUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (digitsFromUrl) {
|
||||||
|
digits = +digitsFromUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (algorithmFromUrl) {
|
||||||
|
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
secret = providerInputs.secret;
|
||||||
|
period = providerInputs.period;
|
||||||
|
digits = providerInputs.digits;
|
||||||
|
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (digitsFromUrl) {
|
if (digits) {
|
||||||
digits = +digitsFromUrl;
|
authenticatorInstance.options = { digits };
|
||||||
}
|
}
|
||||||
|
|
||||||
if (algorithmFromUrl) {
|
if (algorithm) {
|
||||||
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
authenticatorInstance.options = { algorithm };
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
secret = providerInputs.secret;
|
|
||||||
period = providerInputs.period;
|
|
||||||
digits = providerInputs.digits;
|
|
||||||
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (digits) {
|
if (period) {
|
||||||
authenticatorInstance.options = { digits };
|
authenticatorInstance.options = { step: period };
|
||||||
}
|
}
|
||||||
|
|
||||||
if (algorithm) {
|
return {
|
||||||
authenticatorInstance.options = { algorithm };
|
entityId,
|
||||||
|
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: []
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (period) {
|
|
||||||
authenticatorInstance.options = { step: period };
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
entityId,
|
|
||||||
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||||
|
@@ -4,6 +4,7 @@ import { z } from "zod";
|
|||||||
|
|
||||||
import { crypto } from "@app/lib/crypto/cryptography";
|
import { crypto } from "@app/lib/crypto/cryptography";
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { sanitizeString } from "@app/lib/fn";
|
||||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
@@ -275,6 +276,14 @@ export const VerticaProvider = ({ gatewayService }: TVerticaProviderDTO): TDynam
|
|||||||
await client.raw(trimmedQuery);
|
await client.raw(trimmedQuery);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, password, providerInputs.username, providerInputs.password]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
} finally {
|
} finally {
|
||||||
if (client) await client.destroy();
|
if (client) await client.destroy();
|
||||||
}
|
}
|
||||||
@@ -339,6 +348,14 @@ export const VerticaProvider = ({ gatewayService }: TVerticaProviderDTO): TDynam
|
|||||||
await client.raw(trimmedQuery);
|
await client.raw(trimmedQuery);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} catch (err) {
|
||||||
|
const sanitizedErrorMessage = sanitizeString({
|
||||||
|
unsanitizedString: (err as Error)?.message,
|
||||||
|
tokens: [username, providerInputs.username, providerInputs.password]
|
||||||
|
});
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||||
|
});
|
||||||
} finally {
|
} finally {
|
||||||
if (client) await client.destroy();
|
if (client) await client.destroy();
|
||||||
}
|
}
|
||||||
|
@@ -3,7 +3,7 @@ import { z } from "zod";
|
|||||||
|
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
|
|
||||||
import { EventSchema, TopicName } from "./types";
|
import { BusEventSchema, TopicName } from "./types";
|
||||||
|
|
||||||
export const eventBusFactory = (redis: Redis) => {
|
export const eventBusFactory = (redis: Redis) => {
|
||||||
const publisher = redis.duplicate();
|
const publisher = redis.duplicate();
|
||||||
@@ -28,7 +28,7 @@ export const eventBusFactory = (redis: Redis) => {
|
|||||||
* @param topic - The topic to publish the event to.
|
* @param topic - The topic to publish the event to.
|
||||||
* @param event - The event data to publish.
|
* @param event - The event data to publish.
|
||||||
*/
|
*/
|
||||||
const publish = async <T extends z.input<typeof EventSchema>>(topic: TopicName, event: T) => {
|
const publish = async <T extends z.input<typeof BusEventSchema>>(topic: TopicName, event: T) => {
|
||||||
const json = JSON.stringify(event);
|
const json = JSON.stringify(event);
|
||||||
|
|
||||||
return publisher.publish(topic, json, (err) => {
|
return publisher.publish(topic, json, (err) => {
|
||||||
@@ -44,7 +44,7 @@ export const eventBusFactory = (redis: Redis) => {
|
|||||||
* @template T - The type of the event data, which should match the schema defined in EventSchema.
|
* @template T - The type of the event data, which should match the schema defined in EventSchema.
|
||||||
* @returns A function that can be called to unsubscribe from the event bus.
|
* @returns A function that can be called to unsubscribe from the event bus.
|
||||||
*/
|
*/
|
||||||
const subscribe = <T extends z.infer<typeof EventSchema>>(fn: (data: T) => Promise<void> | void) => {
|
const subscribe = <T extends z.infer<typeof BusEventSchema>>(fn: (data: T) => Promise<void> | void) => {
|
||||||
// Not using async await cause redis client's `on` method does not expect async listeners.
|
// Not using async await cause redis client's `on` method does not expect async listeners.
|
||||||
const listener = (channel: string, message: string) => {
|
const listener = (channel: string, message: string) => {
|
||||||
try {
|
try {
|
||||||
|
@@ -7,7 +7,7 @@ import { logger } from "@app/lib/logger";
|
|||||||
|
|
||||||
import { TEventBusService } from "./event-bus-service";
|
import { TEventBusService } from "./event-bus-service";
|
||||||
import { createEventStreamClient, EventStreamClient, IEventStreamClientOpts } from "./event-sse-stream";
|
import { createEventStreamClient, EventStreamClient, IEventStreamClientOpts } from "./event-sse-stream";
|
||||||
import { EventData, RegisteredEvent, toBusEventName } from "./types";
|
import { BusEvent, RegisteredEvent } from "./types";
|
||||||
|
|
||||||
const AUTH_REFRESH_INTERVAL = 60 * 1000;
|
const AUTH_REFRESH_INTERVAL = 60 * 1000;
|
||||||
const HEART_BEAT_INTERVAL = 15 * 1000;
|
const HEART_BEAT_INTERVAL = 15 * 1000;
|
||||||
@@ -69,8 +69,8 @@ export const sseServiceFactory = (bus: TEventBusService, redis: Redis) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
function filterEventsForClient(client: EventStreamClient, event: EventData, registered: RegisteredEvent[]) {
|
function filterEventsForClient(client: EventStreamClient, event: BusEvent, registered: RegisteredEvent[]) {
|
||||||
const eventType = toBusEventName(event.data.eventType);
|
const eventType = event.data.event;
|
||||||
const match = registered.find((r) => r.event === eventType);
|
const match = registered.find((r) => r.event === eventType);
|
||||||
if (!match) return;
|
if (!match) return;
|
||||||
|
|
||||||
|
@@ -12,7 +12,7 @@ import { KeyStorePrefixes } from "@app/keystore/keystore";
|
|||||||
import { conditionsMatcher } from "@app/lib/casl";
|
import { conditionsMatcher } from "@app/lib/casl";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
|
|
||||||
import { EventData, RegisteredEvent } from "./types";
|
import { BusEvent, RegisteredEvent } from "./types";
|
||||||
|
|
||||||
export const getServerSentEventsHeaders = () =>
|
export const getServerSentEventsHeaders = () =>
|
||||||
({
|
({
|
||||||
@@ -55,7 +55,7 @@ export type EventStreamClient = {
|
|||||||
id: string;
|
id: string;
|
||||||
stream: Readable;
|
stream: Readable;
|
||||||
open: () => Promise<void>;
|
open: () => Promise<void>;
|
||||||
send: (data: EventMessage | EventData) => void;
|
send: (data: EventMessage | BusEvent) => void;
|
||||||
ping: () => Promise<void>;
|
ping: () => Promise<void>;
|
||||||
refresh: () => Promise<void>;
|
refresh: () => Promise<void>;
|
||||||
close: () => void;
|
close: () => void;
|
||||||
@@ -73,15 +73,12 @@ export function createEventStreamClient(redis: Redis, options: IEventStreamClien
|
|||||||
return {
|
return {
|
||||||
subject: options.type,
|
subject: options.type,
|
||||||
action: "subscribe",
|
action: "subscribe",
|
||||||
conditions: {
|
conditions: hasConditions
|
||||||
eventType: r.event,
|
? {
|
||||||
...(hasConditions
|
environment: r.conditions?.environmentSlug ?? "",
|
||||||
? {
|
secretPath: { $glob: secretPath }
|
||||||
environment: r.conditions?.environmentSlug ?? "",
|
}
|
||||||
secretPath: { $glob: secretPath }
|
: undefined
|
||||||
}
|
|
||||||
: {})
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -98,7 +95,7 @@ export function createEventStreamClient(redis: Redis, options: IEventStreamClien
|
|||||||
// We will manually push data to the stream
|
// We will manually push data to the stream
|
||||||
stream._read = () => {};
|
stream._read = () => {};
|
||||||
|
|
||||||
const send = (data: EventMessage | EventData) => {
|
const send = (data: EventMessage | BusEvent) => {
|
||||||
const chunk = serializeSseEvent(data);
|
const chunk = serializeSseEvent(data);
|
||||||
if (!stream.push(chunk)) {
|
if (!stream.push(chunk)) {
|
||||||
logger.debug("Backpressure detected: dropped manual event");
|
logger.debug("Backpressure detected: dropped manual event");
|
||||||
|
@@ -1,7 +1,8 @@
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { ProjectType } from "@app/db/schemas";
|
import { ProjectType } from "@app/db/schemas";
|
||||||
import { Event, EventType } from "@app/ee/services/audit-log/audit-log-types";
|
|
||||||
|
import { ProjectPermissionSecretEventActions } from "../permission/project-permission";
|
||||||
|
|
||||||
export enum TopicName {
|
export enum TopicName {
|
||||||
CoreServers = "infisical::core-servers"
|
CoreServers = "infisical::core-servers"
|
||||||
@@ -10,84 +11,44 @@ export enum TopicName {
|
|||||||
export enum BusEventName {
|
export enum BusEventName {
|
||||||
CreateSecret = "secret:create",
|
CreateSecret = "secret:create",
|
||||||
UpdateSecret = "secret:update",
|
UpdateSecret = "secret:update",
|
||||||
DeleteSecret = "secret:delete"
|
DeleteSecret = "secret:delete",
|
||||||
|
ImportMutation = "secret:import-mutation"
|
||||||
}
|
}
|
||||||
|
|
||||||
type PublisableEventTypes =
|
export const Mappings = {
|
||||||
| EventType.CREATE_SECRET
|
BusEventToAction(input: BusEventName) {
|
||||||
| EventType.CREATE_SECRETS
|
switch (input) {
|
||||||
| EventType.DELETE_SECRET
|
case BusEventName.CreateSecret:
|
||||||
| EventType.DELETE_SECRETS
|
return ProjectPermissionSecretEventActions.SubscribeCreated;
|
||||||
| EventType.UPDATE_SECRETS
|
case BusEventName.DeleteSecret:
|
||||||
| EventType.UPDATE_SECRET;
|
return ProjectPermissionSecretEventActions.SubscribeDeleted;
|
||||||
|
case BusEventName.ImportMutation:
|
||||||
export function toBusEventName(input: EventType) {
|
return ProjectPermissionSecretEventActions.SubscribeImportMutations;
|
||||||
switch (input) {
|
case BusEventName.UpdateSecret:
|
||||||
case EventType.CREATE_SECRET:
|
return ProjectPermissionSecretEventActions.SubscribeUpdated;
|
||||||
case EventType.CREATE_SECRETS:
|
default:
|
||||||
return BusEventName.CreateSecret;
|
throw new Error("Unknown bus event name");
|
||||||
case EventType.UPDATE_SECRET:
|
|
||||||
case EventType.UPDATE_SECRETS:
|
|
||||||
return BusEventName.UpdateSecret;
|
|
||||||
case EventType.DELETE_SECRET:
|
|
||||||
case EventType.DELETE_SECRETS:
|
|
||||||
return BusEventName.DeleteSecret;
|
|
||||||
default:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const isBulkEvent = (event: Event): event is Extract<Event, { metadata: { secrets: Array<unknown> } }> => {
|
|
||||||
return event.type.endsWith("-secrets"); // Feels so wrong
|
|
||||||
};
|
|
||||||
|
|
||||||
export const toPublishableEvent = (event: Event) => {
|
|
||||||
const name = toBusEventName(event.type);
|
|
||||||
|
|
||||||
if (!name) return null;
|
|
||||||
|
|
||||||
const e = event as Extract<Event, { type: PublisableEventTypes }>;
|
|
||||||
|
|
||||||
if (isBulkEvent(e)) {
|
|
||||||
return {
|
|
||||||
name,
|
|
||||||
isBulk: true,
|
|
||||||
data: {
|
|
||||||
eventType: e.type,
|
|
||||||
payload: e.metadata.secrets.map((s) => ({
|
|
||||||
environment: e.metadata.environment,
|
|
||||||
secretPath: e.metadata.secretPath,
|
|
||||||
...s
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
} as const;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
name,
|
|
||||||
isBulk: false,
|
|
||||||
data: {
|
|
||||||
eventType: e.type,
|
|
||||||
payload: {
|
|
||||||
...e.metadata,
|
|
||||||
environment: e.metadata.environment
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} as const;
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const EventName = z.nativeEnum(BusEventName);
|
export const EventName = z.nativeEnum(BusEventName);
|
||||||
|
|
||||||
const EventSecretPayload = z.object({
|
const EventSecretPayload = z.object({
|
||||||
secretPath: z.string().optional(),
|
|
||||||
secretId: z.string(),
|
secretId: z.string(),
|
||||||
|
secretPath: z.string().optional(),
|
||||||
secretKey: z.string(),
|
secretKey: z.string(),
|
||||||
environment: z.string()
|
environment: z.string()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const EventImportMutationPayload = z.object({
|
||||||
|
secretPath: z.string(),
|
||||||
|
environment: z.string()
|
||||||
|
});
|
||||||
|
|
||||||
export type EventSecret = z.infer<typeof EventSecretPayload>;
|
export type EventSecret = z.infer<typeof EventSecretPayload>;
|
||||||
|
|
||||||
export const EventSchema = z.object({
|
export const BusEventSchema = z.object({
|
||||||
datacontenttype: z.literal("application/json").optional().default("application/json"),
|
datacontenttype: z.literal("application/json").optional().default("application/json"),
|
||||||
type: z.nativeEnum(ProjectType),
|
type: z.nativeEnum(ProjectType),
|
||||||
source: z.string(),
|
source: z.string(),
|
||||||
@@ -95,25 +56,38 @@ export const EventSchema = z.object({
|
|||||||
.string()
|
.string()
|
||||||
.optional()
|
.optional()
|
||||||
.default(() => new Date().toISOString()),
|
.default(() => new Date().toISOString()),
|
||||||
data: z.discriminatedUnion("eventType", [
|
data: z.discriminatedUnion("event", [
|
||||||
z.object({
|
z.object({
|
||||||
specversion: z.number().optional().default(1),
|
specversion: z.number().optional().default(1),
|
||||||
eventType: z.enum([EventType.CREATE_SECRET, EventType.UPDATE_SECRET, EventType.DELETE_SECRET]),
|
event: z.enum([BusEventName.CreateSecret, BusEventName.DeleteSecret, BusEventName.UpdateSecret]),
|
||||||
payload: EventSecretPayload
|
payload: z.union([EventSecretPayload, EventSecretPayload.array()])
|
||||||
}),
|
}),
|
||||||
z.object({
|
z.object({
|
||||||
specversion: z.number().optional().default(1),
|
specversion: z.number().optional().default(1),
|
||||||
eventType: z.enum([EventType.CREATE_SECRETS, EventType.UPDATE_SECRETS, EventType.DELETE_SECRETS]),
|
event: z.enum([BusEventName.ImportMutation]),
|
||||||
payload: EventSecretPayload.array()
|
payload: z.union([EventImportMutationPayload, EventImportMutationPayload.array()])
|
||||||
})
|
})
|
||||||
// Add more event types as needed
|
// Add more event types as needed
|
||||||
])
|
])
|
||||||
});
|
});
|
||||||
|
|
||||||
export type EventData = z.infer<typeof EventSchema>;
|
export type BusEvent = z.infer<typeof BusEventSchema>;
|
||||||
|
|
||||||
|
type PublishableEventPayload = z.input<typeof BusEventSchema>["data"];
|
||||||
|
type PublishableSecretEvent = Extract<
|
||||||
|
PublishableEventPayload,
|
||||||
|
{ event: Exclude<BusEventName, BusEventName.ImportMutation> }
|
||||||
|
>["payload"];
|
||||||
|
|
||||||
|
export type PublishableEvent = {
|
||||||
|
created?: PublishableSecretEvent;
|
||||||
|
updated?: PublishableSecretEvent;
|
||||||
|
deleted?: PublishableSecretEvent;
|
||||||
|
importMutation?: Extract<PublishableEventPayload, { event: BusEventName.ImportMutation }>["payload"];
|
||||||
|
};
|
||||||
|
|
||||||
export const EventRegisterSchema = z.object({
|
export const EventRegisterSchema = z.object({
|
||||||
event: EventName,
|
event: z.nativeEnum(BusEventName),
|
||||||
conditions: z
|
conditions: z
|
||||||
.object({
|
.object({
|
||||||
secretPath: z.string().optional().default("/"),
|
secretPath: z.string().optional().default("/"),
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import { ForbiddenError } from "@casl/ability";
|
import { ForbiddenError } from "@casl/ability";
|
||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
import { OrgMembershipStatus, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas";
|
import { OrgMembershipStatus, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas";
|
||||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||||
@@ -45,7 +46,7 @@ import { searchGroups, testLDAPConfig } from "./ldap-fns";
|
|||||||
import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal";
|
import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal";
|
||||||
|
|
||||||
type TLdapConfigServiceFactoryDep = {
|
type TLdapConfigServiceFactoryDep = {
|
||||||
ldapConfigDAL: Pick<TLdapConfigDALFactory, "create" | "update" | "findOne">;
|
ldapConfigDAL: Pick<TLdapConfigDALFactory, "create" | "update" | "findOne" | "transaction">;
|
||||||
ldapGroupMapDAL: Pick<TLdapGroupMapDALFactory, "find" | "create" | "delete" | "findLdapGroupMapsByLdapConfigId">;
|
ldapGroupMapDAL: Pick<TLdapGroupMapDALFactory, "find" | "create" | "delete" | "findLdapGroupMapsByLdapConfigId">;
|
||||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||||
orgDAL: Pick<
|
orgDAL: Pick<
|
||||||
@@ -131,6 +132,19 @@ export const ldapConfigServiceFactory = ({
|
|||||||
orgId
|
orgId
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const isConnected = await testLDAPConfig({
|
||||||
|
bindDN,
|
||||||
|
bindPass,
|
||||||
|
caCert,
|
||||||
|
url
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!isConnected) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Failed to establish connection to LDAP directory. Please verify that your credentials are correct."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const ldapConfig = await ldapConfigDAL.create({
|
const ldapConfig = await ldapConfigDAL.create({
|
||||||
orgId,
|
orgId,
|
||||||
isActive,
|
isActive,
|
||||||
@@ -148,6 +162,50 @@ export const ldapConfigServiceFactory = ({
|
|||||||
return ldapConfig;
|
return ldapConfig;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean; id?: string }, tx?: Knex) => {
|
||||||
|
const ldapConfig = await ldapConfigDAL.findOne(filter, tx);
|
||||||
|
if (!ldapConfig) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: `Failed to find organization LDAP data in organization with ID '${filter.orgId}'`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.Organization,
|
||||||
|
orgId: ldapConfig.orgId
|
||||||
|
});
|
||||||
|
|
||||||
|
let bindDN = "";
|
||||||
|
if (ldapConfig.encryptedLdapBindDN) {
|
||||||
|
bindDN = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindDN }).toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
let bindPass = "";
|
||||||
|
if (ldapConfig.encryptedLdapBindPass) {
|
||||||
|
bindPass = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindPass }).toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
let caCert = "";
|
||||||
|
if (ldapConfig.encryptedLdapCaCertificate) {
|
||||||
|
caCert = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapCaCertificate }).toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: ldapConfig.id,
|
||||||
|
organization: ldapConfig.orgId,
|
||||||
|
isActive: ldapConfig.isActive,
|
||||||
|
url: ldapConfig.url,
|
||||||
|
bindDN,
|
||||||
|
bindPass,
|
||||||
|
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
|
||||||
|
searchBase: ldapConfig.searchBase,
|
||||||
|
searchFilter: ldapConfig.searchFilter,
|
||||||
|
groupSearchBase: ldapConfig.groupSearchBase,
|
||||||
|
groupSearchFilter: ldapConfig.groupSearchFilter,
|
||||||
|
caCert
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
const updateLdapCfg = async ({
|
const updateLdapCfg = async ({
|
||||||
actor,
|
actor,
|
||||||
actorId,
|
actorId,
|
||||||
@@ -202,53 +260,25 @@ export const ldapConfigServiceFactory = ({
|
|||||||
updateQuery.encryptedLdapCaCertificate = encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob;
|
updateQuery.encryptedLdapCaCertificate = encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob;
|
||||||
}
|
}
|
||||||
|
|
||||||
const [ldapConfig] = await ldapConfigDAL.update({ orgId }, updateQuery);
|
const config = await ldapConfigDAL.transaction(async (tx) => {
|
||||||
|
const [updatedLdapCfg] = await ldapConfigDAL.update({ orgId }, updateQuery, tx);
|
||||||
|
const decryptedLdapCfg = await getLdapCfg({ orgId }, tx);
|
||||||
|
|
||||||
return ldapConfig;
|
const isSoftDeletion = !decryptedLdapCfg.url && !decryptedLdapCfg.bindDN && !decryptedLdapCfg.bindPass;
|
||||||
};
|
if (!isSoftDeletion) {
|
||||||
|
const isConnected = await testLDAPConfig(decryptedLdapCfg);
|
||||||
|
if (!isConnected) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message:
|
||||||
|
"Failed to establish connection to LDAP directory. Please verify that your credentials are correct."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean; id?: string }) => {
|
return updatedLdapCfg;
|
||||||
const ldapConfig = await ldapConfigDAL.findOne(filter);
|
|
||||||
if (!ldapConfig) {
|
|
||||||
throw new NotFoundError({
|
|
||||||
message: `Failed to find organization LDAP data in organization with ID '${filter.orgId}'`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
|
||||||
type: KmsDataKey.Organization,
|
|
||||||
orgId: ldapConfig.orgId
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let bindDN = "";
|
return config;
|
||||||
if (ldapConfig.encryptedLdapBindDN) {
|
|
||||||
bindDN = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindDN }).toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
let bindPass = "";
|
|
||||||
if (ldapConfig.encryptedLdapBindPass) {
|
|
||||||
bindPass = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindPass }).toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
let caCert = "";
|
|
||||||
if (ldapConfig.encryptedLdapCaCertificate) {
|
|
||||||
caCert = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapCaCertificate }).toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: ldapConfig.id,
|
|
||||||
organization: ldapConfig.orgId,
|
|
||||||
isActive: ldapConfig.isActive,
|
|
||||||
url: ldapConfig.url,
|
|
||||||
bindDN,
|
|
||||||
bindPass,
|
|
||||||
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
|
|
||||||
searchBase: ldapConfig.searchBase,
|
|
||||||
searchFilter: ldapConfig.searchFilter,
|
|
||||||
groupSearchBase: ldapConfig.groupSearchBase,
|
|
||||||
groupSearchFilter: ldapConfig.groupSearchFilter,
|
|
||||||
caCert
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const getLdapCfgWithPermissionCheck = async ({
|
const getLdapCfgWithPermissionCheck = async ({
|
||||||
@@ -527,14 +557,13 @@ export const ldapConfigServiceFactory = ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const isUserCompleted = Boolean(user.isAccepted);
|
const isUserCompleted = Boolean(user.isAccepted);
|
||||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
|
||||||
|
|
||||||
const providerAuthToken = crypto.jwt().sign(
|
const providerAuthToken = crypto.jwt().sign(
|
||||||
{
|
{
|
||||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||||
userId: user.id,
|
userId: user.id,
|
||||||
username: user.username,
|
username: user.username,
|
||||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
hasExchangedPrivateKey: true,
|
||||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||||
firstName,
|
firstName,
|
||||||
lastName,
|
lastName,
|
||||||
@@ -694,7 +723,17 @@ export const ldapConfigServiceFactory = ({
|
|||||||
return deletedGroupMap;
|
return deletedGroupMap;
|
||||||
};
|
};
|
||||||
|
|
||||||
const testLDAPConnection = async ({ actor, actorId, orgId, actorAuthMethod, actorOrgId }: TTestLdapConnectionDTO) => {
|
const testLDAPConnection = async ({
|
||||||
|
actor,
|
||||||
|
actorId,
|
||||||
|
orgId,
|
||||||
|
actorAuthMethod,
|
||||||
|
actorOrgId,
|
||||||
|
bindDN,
|
||||||
|
bindPass,
|
||||||
|
caCert,
|
||||||
|
url
|
||||||
|
}: TTestLdapConnectionDTO) => {
|
||||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
|
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
|
||||||
|
|
||||||
@@ -704,11 +743,12 @@ export const ldapConfigServiceFactory = ({
|
|||||||
message: "Failed to test LDAP connection due to plan restriction. Upgrade plan to test the LDAP connection."
|
message: "Failed to test LDAP connection due to plan restriction. Upgrade plan to test the LDAP connection."
|
||||||
});
|
});
|
||||||
|
|
||||||
const ldapConfig = await getLdapCfg({
|
return testLDAPConfig({
|
||||||
orgId
|
bindDN,
|
||||||
|
bindPass,
|
||||||
|
caCert,
|
||||||
|
url
|
||||||
});
|
});
|
||||||
|
|
||||||
return testLDAPConfig(ldapConfig);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@@ -83,6 +83,4 @@ export type TDeleteLdapGroupMapDTO = {
|
|||||||
ldapGroupMapId: string;
|
ldapGroupMapId: string;
|
||||||
} & TOrgPermission;
|
} & TOrgPermission;
|
||||||
|
|
||||||
export type TTestLdapConnectionDTO = {
|
export type TTestLdapConnectionDTO = TOrgPermission & TTestLDAPConfigDTO;
|
||||||
ldapConfigId: string;
|
|
||||||
} & TOrgPermission;
|
|
||||||
|
@@ -31,7 +31,7 @@ export const getDefaultOnPremFeatures = () => {
|
|||||||
caCrl: false,
|
caCrl: false,
|
||||||
sshHostGroups: false,
|
sshHostGroups: false,
|
||||||
enterpriseSecretSyncs: false,
|
enterpriseSecretSyncs: false,
|
||||||
enterpriseAppConnections: false,
|
enterpriseAppConnections: true,
|
||||||
machineIdentityAuthTemplates: false
|
machineIdentityAuthTemplates: false
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@@ -404,7 +404,6 @@ export const oidcConfigServiceFactory = ({
|
|||||||
|
|
||||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||||
|
|
||||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
|
||||||
const isUserCompleted = Boolean(user.isAccepted);
|
const isUserCompleted = Boolean(user.isAccepted);
|
||||||
const providerAuthToken = crypto.jwt().sign(
|
const providerAuthToken = crypto.jwt().sign(
|
||||||
{
|
{
|
||||||
@@ -417,7 +416,7 @@ export const oidcConfigServiceFactory = ({
|
|||||||
organizationName: organization.name,
|
organizationName: organization.name,
|
||||||
organizationId: organization.id,
|
organizationId: organization.id,
|
||||||
organizationSlug: organization.slug,
|
organizationSlug: organization.slug,
|
||||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
hasExchangedPrivateKey: true,
|
||||||
authMethod: AuthMethod.OIDC,
|
authMethod: AuthMethod.OIDC,
|
||||||
authType: UserAliasType.OIDC,
|
authType: UserAliasType.OIDC,
|
||||||
isUserCompleted,
|
isUserCompleted,
|
||||||
|
@@ -161,8 +161,7 @@ const buildAdminPermissionRules = () => {
|
|||||||
ProjectPermissionSecretActions.ReadValue,
|
ProjectPermissionSecretActions.ReadValue,
|
||||||
ProjectPermissionSecretActions.Create,
|
ProjectPermissionSecretActions.Create,
|
||||||
ProjectPermissionSecretActions.Edit,
|
ProjectPermissionSecretActions.Edit,
|
||||||
ProjectPermissionSecretActions.Delete,
|
ProjectPermissionSecretActions.Delete
|
||||||
ProjectPermissionSecretActions.Subscribe
|
|
||||||
],
|
],
|
||||||
ProjectPermissionSub.Secrets
|
ProjectPermissionSub.Secrets
|
||||||
);
|
);
|
||||||
@@ -266,8 +265,7 @@ const buildMemberPermissionRules = () => {
|
|||||||
ProjectPermissionSecretActions.ReadValue,
|
ProjectPermissionSecretActions.ReadValue,
|
||||||
ProjectPermissionSecretActions.Edit,
|
ProjectPermissionSecretActions.Edit,
|
||||||
ProjectPermissionSecretActions.Create,
|
ProjectPermissionSecretActions.Create,
|
||||||
ProjectPermissionSecretActions.Delete,
|
ProjectPermissionSecretActions.Delete
|
||||||
ProjectPermissionSecretActions.Subscribe
|
|
||||||
],
|
],
|
||||||
ProjectPermissionSub.Secrets
|
ProjectPermissionSub.Secrets
|
||||||
);
|
);
|
||||||
|
@@ -36,8 +36,7 @@ export enum ProjectPermissionSecretActions {
|
|||||||
ReadValue = "readValue",
|
ReadValue = "readValue",
|
||||||
Create = "create",
|
Create = "create",
|
||||||
Edit = "edit",
|
Edit = "edit",
|
||||||
Delete = "delete",
|
Delete = "delete"
|
||||||
Subscribe = "subscribe"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum ProjectPermissionCmekActions {
|
export enum ProjectPermissionCmekActions {
|
||||||
@@ -158,6 +157,13 @@ export enum ProjectPermissionSecretScanningConfigActions {
|
|||||||
Update = "update-configs"
|
Update = "update-configs"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum ProjectPermissionSecretEventActions {
|
||||||
|
SubscribeCreated = "subscribe-on-created",
|
||||||
|
SubscribeUpdated = "subscribe-on-updated",
|
||||||
|
SubscribeDeleted = "subscribe-on-deleted",
|
||||||
|
SubscribeImportMutations = "subscribe-on-import-mutations"
|
||||||
|
}
|
||||||
|
|
||||||
export enum ProjectPermissionSub {
|
export enum ProjectPermissionSub {
|
||||||
Role = "role",
|
Role = "role",
|
||||||
Member = "member",
|
Member = "member",
|
||||||
@@ -197,7 +203,8 @@ export enum ProjectPermissionSub {
|
|||||||
Kmip = "kmip",
|
Kmip = "kmip",
|
||||||
SecretScanningDataSources = "secret-scanning-data-sources",
|
SecretScanningDataSources = "secret-scanning-data-sources",
|
||||||
SecretScanningFindings = "secret-scanning-findings",
|
SecretScanningFindings = "secret-scanning-findings",
|
||||||
SecretScanningConfigs = "secret-scanning-configs"
|
SecretScanningConfigs = "secret-scanning-configs",
|
||||||
|
SecretEvents = "secret-events"
|
||||||
}
|
}
|
||||||
|
|
||||||
export type SecretSubjectFields = {
|
export type SecretSubjectFields = {
|
||||||
@@ -205,7 +212,13 @@ export type SecretSubjectFields = {
|
|||||||
secretPath: string;
|
secretPath: string;
|
||||||
secretName?: string;
|
secretName?: string;
|
||||||
secretTags?: string[];
|
secretTags?: string[];
|
||||||
eventType?: string;
|
};
|
||||||
|
|
||||||
|
export type SecretEventSubjectFields = {
|
||||||
|
environment: string;
|
||||||
|
secretPath: string;
|
||||||
|
secretName?: string;
|
||||||
|
secretTags?: string[];
|
||||||
};
|
};
|
||||||
|
|
||||||
export type SecretFolderSubjectFields = {
|
export type SecretFolderSubjectFields = {
|
||||||
@@ -344,7 +357,11 @@ export type ProjectPermissionSet =
|
|||||||
| [ProjectPermissionCommitsActions, ProjectPermissionSub.Commits]
|
| [ProjectPermissionCommitsActions, ProjectPermissionSub.Commits]
|
||||||
| [ProjectPermissionSecretScanningDataSourceActions, ProjectPermissionSub.SecretScanningDataSources]
|
| [ProjectPermissionSecretScanningDataSourceActions, ProjectPermissionSub.SecretScanningDataSources]
|
||||||
| [ProjectPermissionSecretScanningFindingActions, ProjectPermissionSub.SecretScanningFindings]
|
| [ProjectPermissionSecretScanningFindingActions, ProjectPermissionSub.SecretScanningFindings]
|
||||||
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs];
|
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs]
|
||||||
|
| [
|
||||||
|
ProjectPermissionSecretEventActions,
|
||||||
|
ProjectPermissionSub.SecretEvents | (ForcedSubject<ProjectPermissionSub.SecretEvents> & SecretEventSubjectFields)
|
||||||
|
];
|
||||||
|
|
||||||
const SECRET_PATH_MISSING_SLASH_ERR_MSG = "Invalid Secret Path; it must start with a '/'";
|
const SECRET_PATH_MISSING_SLASH_ERR_MSG = "Invalid Secret Path; it must start with a '/'";
|
||||||
const SECRET_PATH_PERMISSION_OPERATOR_SCHEMA = z.union([
|
const SECRET_PATH_PERMISSION_OPERATOR_SCHEMA = z.union([
|
||||||
@@ -877,7 +894,16 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
|
|||||||
"When specified, only matching conditions will be allowed to access given resource."
|
"When specified, only matching conditions will be allowed to access given resource."
|
||||||
).optional()
|
).optional()
|
||||||
}),
|
}),
|
||||||
|
z.object({
|
||||||
|
subject: z.literal(ProjectPermissionSub.SecretEvents).describe("The entity this permission pertains to."),
|
||||||
|
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||||
|
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretEventActions).describe(
|
||||||
|
"Describe what action an entity can take."
|
||||||
|
),
|
||||||
|
conditions: SecretSyncConditionV2Schema.describe(
|
||||||
|
"When specified, only matching conditions will be allowed to access given resource."
|
||||||
|
).optional()
|
||||||
|
}),
|
||||||
...GeneralPermissionSchema
|
...GeneralPermissionSchema
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@@ -411,7 +411,6 @@ export const samlConfigServiceFactory = ({
|
|||||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||||
|
|
||||||
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified);
|
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified);
|
||||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
|
||||||
const providerAuthToken = crypto.jwt().sign(
|
const providerAuthToken = crypto.jwt().sign(
|
||||||
{
|
{
|
||||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||||
@@ -424,7 +423,7 @@ export const samlConfigServiceFactory = ({
|
|||||||
organizationId: organization.id,
|
organizationId: organization.id,
|
||||||
organizationSlug: organization.slug,
|
organizationSlug: organization.slug,
|
||||||
authMethod: authProvider,
|
authMethod: authProvider,
|
||||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
hasExchangedPrivateKey: true,
|
||||||
authType: UserAliasType.SAML,
|
authType: UserAliasType.SAML,
|
||||||
isUserCompleted,
|
isUserCompleted,
|
||||||
...(relayState
|
...(relayState
|
||||||
|
@@ -952,13 +952,39 @@ export const secretApprovalRequestServiceFactory = ({
|
|||||||
if (!folder) {
|
if (!folder) {
|
||||||
throw new NotFoundError({ message: `Folder with ID '${folderId}' not found in project with ID '${projectId}'` });
|
throw new NotFoundError({ message: `Folder with ID '${folderId}' not found in project with ID '${projectId}'` });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { secrets } = mergeStatus;
|
||||||
|
|
||||||
await secretQueueService.syncSecrets({
|
await secretQueueService.syncSecrets({
|
||||||
projectId,
|
projectId,
|
||||||
orgId: actorOrgId,
|
orgId: actorOrgId,
|
||||||
secretPath: folder.path,
|
secretPath: folder.path,
|
||||||
environmentSlug: folder.environmentSlug,
|
environmentSlug: folder.environmentSlug,
|
||||||
actorId,
|
actorId,
|
||||||
actor
|
actor,
|
||||||
|
event: {
|
||||||
|
created: secrets.created.map((el) => ({
|
||||||
|
environment: folder.environmentSlug,
|
||||||
|
secretPath: folder.path,
|
||||||
|
secretId: el.id,
|
||||||
|
// @ts-expect-error - not present on V1 secrets
|
||||||
|
secretKey: el.key as string
|
||||||
|
})),
|
||||||
|
updated: secrets.updated.map((el) => ({
|
||||||
|
environment: folder.environmentSlug,
|
||||||
|
secretPath: folder.path,
|
||||||
|
secretId: el.id,
|
||||||
|
// @ts-expect-error - not present on V1 secrets
|
||||||
|
secretKey: el.key as string
|
||||||
|
})),
|
||||||
|
deleted: secrets.deleted.map((el) => ({
|
||||||
|
environment: folder.environmentSlug,
|
||||||
|
secretPath: folder.path,
|
||||||
|
secretId: el.id,
|
||||||
|
// @ts-expect-error - not present on V1 secrets
|
||||||
|
secretKey: el.key as string
|
||||||
|
}))
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
if (isSoftEnforcement) {
|
if (isSoftEnforcement) {
|
||||||
|
@@ -2,6 +2,7 @@ import { AxiosError } from "axios";
|
|||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { logger } from "@app/lib/logger";
|
||||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
|
|
||||||
import { AUTH0_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./auth0-client-secret";
|
import { AUTH0_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./auth0-client-secret";
|
||||||
@@ -13,9 +14,11 @@ import { MYSQL_CREDENTIALS_ROTATION_LIST_OPTION } from "./mysql-credentials";
|
|||||||
import { OKTA_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./okta-client-secret";
|
import { OKTA_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./okta-client-secret";
|
||||||
import { ORACLEDB_CREDENTIALS_ROTATION_LIST_OPTION } from "./oracledb-credentials";
|
import { ORACLEDB_CREDENTIALS_ROTATION_LIST_OPTION } from "./oracledb-credentials";
|
||||||
import { POSTGRES_CREDENTIALS_ROTATION_LIST_OPTION } from "./postgres-credentials";
|
import { POSTGRES_CREDENTIALS_ROTATION_LIST_OPTION } from "./postgres-credentials";
|
||||||
|
import { TSecretRotationV2DALFactory } from "./secret-rotation-v2-dal";
|
||||||
import { SecretRotation, SecretRotationStatus } from "./secret-rotation-v2-enums";
|
import { SecretRotation, SecretRotationStatus } from "./secret-rotation-v2-enums";
|
||||||
import { TSecretRotationV2ServiceFactoryDep } from "./secret-rotation-v2-service";
|
import { TSecretRotationV2ServiceFactory, TSecretRotationV2ServiceFactoryDep } from "./secret-rotation-v2-service";
|
||||||
import {
|
import {
|
||||||
|
TSecretRotationRotateSecretsJobPayload,
|
||||||
TSecretRotationV2,
|
TSecretRotationV2,
|
||||||
TSecretRotationV2GeneratedCredentials,
|
TSecretRotationV2GeneratedCredentials,
|
||||||
TSecretRotationV2ListItem,
|
TSecretRotationV2ListItem,
|
||||||
@@ -74,6 +77,10 @@ export const getNextUtcRotationInterval = (rotateAtUtc?: TSecretRotationV2["rota
|
|||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
|
|
||||||
if (appCfg.isRotationDevelopmentMode) {
|
if (appCfg.isRotationDevelopmentMode) {
|
||||||
|
if (appCfg.isTestMode) {
|
||||||
|
// if its test mode, it should always rotate
|
||||||
|
return new Date(Date.now() + 365 * 24 * 60 * 60 * 1000); // Current time + 1 year
|
||||||
|
}
|
||||||
return getNextUTCMinuteInterval(rotateAtUtc);
|
return getNextUTCMinuteInterval(rotateAtUtc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -263,3 +270,51 @@ export const throwOnImmutableParameterUpdate = (
|
|||||||
// do nothing
|
// do nothing
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const rotateSecretsFns = async ({
|
||||||
|
job,
|
||||||
|
secretRotationV2DAL,
|
||||||
|
secretRotationV2Service
|
||||||
|
}: {
|
||||||
|
job: {
|
||||||
|
data: TSecretRotationRotateSecretsJobPayload;
|
||||||
|
id: string;
|
||||||
|
retryCount: number;
|
||||||
|
retryLimit: number;
|
||||||
|
};
|
||||||
|
secretRotationV2DAL: Pick<TSecretRotationV2DALFactory, "findById">;
|
||||||
|
secretRotationV2Service: Pick<TSecretRotationV2ServiceFactory, "rotateGeneratedCredentials">;
|
||||||
|
}) => {
|
||||||
|
const { rotationId, queuedAt, isManualRotation } = job.data;
|
||||||
|
const { retryCount, retryLimit } = job;
|
||||||
|
|
||||||
|
const logDetails = `[rotationId=${rotationId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const secretRotation = await secretRotationV2DAL.findById(rotationId);
|
||||||
|
|
||||||
|
if (!secretRotation) throw new Error(`Secret rotation ${rotationId} not found`);
|
||||||
|
|
||||||
|
if (!secretRotation.isAutoRotationEnabled) {
|
||||||
|
logger.info(`secretRotationV2Queue: Skipping Rotation - Auto-Rotation Disabled Since Queue ${logDetails}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (new Date(secretRotation.lastRotatedAt).getTime() >= new Date(queuedAt).getTime()) {
|
||||||
|
// rotated since being queued, skip rotation
|
||||||
|
logger.info(`secretRotationV2Queue: Skipping Rotation - Rotated Since Queue ${logDetails}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await secretRotationV2Service.rotateGeneratedCredentials(secretRotation, {
|
||||||
|
jobId: job.id,
|
||||||
|
shouldSendNotification: true,
|
||||||
|
isFinalAttempt: retryCount === retryLimit,
|
||||||
|
isManualRotation
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`secretRotationV2Queue: Secrets Rotated ${logDetails}`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, `secretRotationV2Queue: Failed to Rotate Secrets ${logDetails}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
@@ -1,9 +1,12 @@
|
|||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
|
||||||
import { ProjectMembershipRole } from "@app/db/schemas";
|
import { ProjectMembershipRole } from "@app/db/schemas";
|
||||||
import { TSecretRotationV2DALFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-dal";
|
import { TSecretRotationV2DALFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-dal";
|
||||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||||
import {
|
import {
|
||||||
getNextUtcRotationInterval,
|
getNextUtcRotationInterval,
|
||||||
getSecretRotationRotateSecretJobOptions
|
getSecretRotationRotateSecretJobOptions,
|
||||||
|
rotateSecretsFns
|
||||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-fns";
|
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-fns";
|
||||||
import { SECRET_ROTATION_NAME_MAP } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-maps";
|
import { SECRET_ROTATION_NAME_MAP } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-maps";
|
||||||
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
|
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
|
||||||
@@ -63,14 +66,34 @@ export const secretRotationV2QueueServiceFactory = async ({
|
|||||||
rotation.lastRotatedAt
|
rotation.lastRotatedAt
|
||||||
).toISOString()}] [rotateAt=${new Date(rotation.nextRotationAt!).toISOString()}]`
|
).toISOString()}] [rotateAt=${new Date(rotation.nextRotationAt!).toISOString()}]`
|
||||||
);
|
);
|
||||||
await queueService.queuePg(
|
|
||||||
QueueJobs.SecretRotationV2RotateSecrets,
|
const data = {
|
||||||
{
|
rotationId: rotation.id,
|
||||||
rotationId: rotation.id,
|
queuedAt: currentTime
|
||||||
queuedAt: currentTime
|
} as TSecretRotationRotateSecretsJobPayload;
|
||||||
},
|
|
||||||
getSecretRotationRotateSecretJobOptions(rotation)
|
if (appCfg.isTestMode) {
|
||||||
);
|
logger.warn("secretRotationV2Queue: Manually rotating secrets for test mode");
|
||||||
|
await rotateSecretsFns({
|
||||||
|
job: {
|
||||||
|
id: uuidv4(),
|
||||||
|
data,
|
||||||
|
retryCount: 0,
|
||||||
|
retryLimit: 0
|
||||||
|
},
|
||||||
|
secretRotationV2DAL,
|
||||||
|
secretRotationV2Service
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await queueService.queuePg(
|
||||||
|
QueueJobs.SecretRotationV2RotateSecrets,
|
||||||
|
{
|
||||||
|
rotationId: rotation.id,
|
||||||
|
queuedAt: currentTime
|
||||||
|
},
|
||||||
|
getSecretRotationRotateSecretJobOptions(rotation)
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(error, "secretRotationV2Queue: Queue Rotations Error:");
|
logger.error(error, "secretRotationV2Queue: Queue Rotations Error:");
|
||||||
@@ -87,38 +110,14 @@ export const secretRotationV2QueueServiceFactory = async ({
|
|||||||
await queueService.startPg<QueueName.SecretRotationV2>(
|
await queueService.startPg<QueueName.SecretRotationV2>(
|
||||||
QueueJobs.SecretRotationV2RotateSecrets,
|
QueueJobs.SecretRotationV2RotateSecrets,
|
||||||
async ([job]) => {
|
async ([job]) => {
|
||||||
const { rotationId, queuedAt, isManualRotation } = job.data as TSecretRotationRotateSecretsJobPayload;
|
await rotateSecretsFns({
|
||||||
const { retryCount, retryLimit } = job;
|
job: {
|
||||||
|
...job,
|
||||||
const logDetails = `[rotationId=${rotationId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
|
data: job.data as TSecretRotationRotateSecretsJobPayload
|
||||||
|
},
|
||||||
try {
|
secretRotationV2DAL,
|
||||||
const secretRotation = await secretRotationV2DAL.findById(rotationId);
|
secretRotationV2Service
|
||||||
|
});
|
||||||
if (!secretRotation) throw new Error(`Secret rotation ${rotationId} not found`);
|
|
||||||
|
|
||||||
if (!secretRotation.isAutoRotationEnabled) {
|
|
||||||
logger.info(`secretRotationV2Queue: Skipping Rotation - Auto-Rotation Disabled Since Queue ${logDetails}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (new Date(secretRotation.lastRotatedAt).getTime() >= new Date(queuedAt).getTime()) {
|
|
||||||
// rotated since being queued, skip rotation
|
|
||||||
logger.info(`secretRotationV2Queue: Skipping Rotation - Rotated Since Queue ${logDetails}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await secretRotationV2Service.rotateGeneratedCredentials(secretRotation, {
|
|
||||||
jobId: job.id,
|
|
||||||
shouldSendNotification: true,
|
|
||||||
isFinalAttempt: retryCount === retryLimit,
|
|
||||||
isManualRotation
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(`secretRotationV2Queue: Secrets Rotated ${logDetails}`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(error, `secretRotationV2Queue: Failed to Rotate Secrets ${logDetails}`);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
batchSize: 1,
|
batchSize: 1,
|
||||||
|
@@ -58,9 +58,9 @@ export function scanDirectory(inputPath: string, outputPath: string, configPath?
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function scanFile(inputPath: string): Promise<void> {
|
export function scanFile(inputPath: string, configPath?: string): Promise<void> {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const command = `infisical scan --exit-code=77 --source "${inputPath}" --no-git`;
|
const command = `infisical scan --exit-code=77 --source "${inputPath}" --no-git ${configPath ? `-c ${configPath}` : ""}`;
|
||||||
exec(command, (error) => {
|
exec(command, (error) => {
|
||||||
if (error && error.code === 77) {
|
if (error && error.code === 77) {
|
||||||
reject(error);
|
reject(error);
|
||||||
@@ -166,6 +166,20 @@ export const parseScanErrorMessage = (err: unknown): string => {
|
|||||||
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
|
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const generateSecretValuePolicyConfiguration = (entropy: number): string => `
|
||||||
|
# Extend default configuration to preserve existing rules
|
||||||
|
[extend]
|
||||||
|
useDefault = true
|
||||||
|
|
||||||
|
# Add custom high-entropy rule
|
||||||
|
[[rules]]
|
||||||
|
id = "high-entropy"
|
||||||
|
description = "Will scan for high entropy secrets"
|
||||||
|
regex = '''.*'''
|
||||||
|
entropy = ${entropy}
|
||||||
|
keywords = []
|
||||||
|
`;
|
||||||
|
|
||||||
export const scanSecretPolicyViolations = async (
|
export const scanSecretPolicyViolations = async (
|
||||||
projectId: string,
|
projectId: string,
|
||||||
secretPath: string,
|
secretPath: string,
|
||||||
@@ -188,14 +202,25 @@ export const scanSecretPolicyViolations = async (
|
|||||||
|
|
||||||
const tempFolder = await createTempFolder();
|
const tempFolder = await createTempFolder();
|
||||||
try {
|
try {
|
||||||
|
const configPath = join(tempFolder, "infisical-scan.toml");
|
||||||
|
|
||||||
|
const secretPolicyConfiguration = generateSecretValuePolicyConfiguration(
|
||||||
|
appCfg.PARAMS_FOLDER_SECRET_DETECTION_ENTROPY
|
||||||
|
);
|
||||||
|
|
||||||
|
await writeTextToFile(configPath, secretPolicyConfiguration);
|
||||||
|
|
||||||
const scanPromises = secrets
|
const scanPromises = secrets
|
||||||
.filter((secret) => !ignoreValues.includes(secret.secretValue))
|
.filter((secret) => !ignoreValues.includes(secret.secretValue))
|
||||||
.map(async (secret) => {
|
.map(async (secret) => {
|
||||||
const secretFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
|
const secretKeyValueFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
|
||||||
await writeTextToFile(secretFilePath, `${secret.secretKey}=${secret.secretValue}`);
|
const secretValueOnlyFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
|
||||||
|
await writeTextToFile(secretKeyValueFilePath, `${secret.secretKey}=${secret.secretValue}`);
|
||||||
|
await writeTextToFile(secretValueOnlyFilePath, secret.secretValue);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await scanFile(secretFilePath);
|
await scanFile(secretKeyValueFilePath);
|
||||||
|
await scanFile(secretValueOnlyFilePath, configPath);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new BadRequestError({
|
throw new BadRequestError({
|
||||||
message: `Secret value detected in ${secret.secretKey}. Please add this instead to the designated secrets path in the project.`,
|
message: `Secret value detected in ${secret.secretKey}. Please add this instead to the designated secrets path in the project.`,
|
||||||
|
@@ -79,6 +79,7 @@ const envSchema = z
|
|||||||
QUEUE_WORKER_PROFILE: z.nativeEnum(QueueWorkerProfile).default(QueueWorkerProfile.All),
|
QUEUE_WORKER_PROFILE: z.nativeEnum(QueueWorkerProfile).default(QueueWorkerProfile.All),
|
||||||
HTTPS_ENABLED: zodStrBool,
|
HTTPS_ENABLED: zodStrBool,
|
||||||
ROTATION_DEVELOPMENT_MODE: zodStrBool.default("false").optional(),
|
ROTATION_DEVELOPMENT_MODE: zodStrBool.default("false").optional(),
|
||||||
|
DAILY_RESOURCE_CLEAN_UP_DEVELOPMENT_MODE: zodStrBool.default("false").optional(),
|
||||||
// smtp options
|
// smtp options
|
||||||
SMTP_HOST: zpStr(z.string().optional()),
|
SMTP_HOST: zpStr(z.string().optional()),
|
||||||
SMTP_IGNORE_TLS: zodStrBool.default("false"),
|
SMTP_IGNORE_TLS: zodStrBool.default("false"),
|
||||||
@@ -215,6 +216,7 @@ const envSchema = z
|
|||||||
return JSON.parse(val) as { secretPath: string; projectId: string }[];
|
return JSON.parse(val) as { secretPath: string; projectId: string }[];
|
||||||
})
|
})
|
||||||
),
|
),
|
||||||
|
PARAMS_FOLDER_SECRET_DETECTION_ENTROPY: z.coerce.number().optional().default(3.7),
|
||||||
|
|
||||||
// HSM
|
// HSM
|
||||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||||
@@ -346,7 +348,11 @@ const envSchema = z
|
|||||||
isSmtpConfigured: Boolean(data.SMTP_HOST),
|
isSmtpConfigured: Boolean(data.SMTP_HOST),
|
||||||
isRedisConfigured: Boolean(data.REDIS_URL || data.REDIS_SENTINEL_HOSTS),
|
isRedisConfigured: Boolean(data.REDIS_URL || data.REDIS_SENTINEL_HOSTS),
|
||||||
isDevelopmentMode: data.NODE_ENV === "development",
|
isDevelopmentMode: data.NODE_ENV === "development",
|
||||||
isRotationDevelopmentMode: data.NODE_ENV === "development" && data.ROTATION_DEVELOPMENT_MODE,
|
isTestMode: data.NODE_ENV === "test",
|
||||||
|
isRotationDevelopmentMode:
|
||||||
|
(data.NODE_ENV === "development" && data.ROTATION_DEVELOPMENT_MODE) || data.NODE_ENV === "test",
|
||||||
|
isDailyResourceCleanUpDevelopmentMode:
|
||||||
|
data.NODE_ENV === "development" && data.DAILY_RESOURCE_CLEAN_UP_DEVELOPMENT_MODE,
|
||||||
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
|
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
|
||||||
isRedisSentinelMode: Boolean(data.REDIS_SENTINEL_HOSTS),
|
isRedisSentinelMode: Boolean(data.REDIS_SENTINEL_HOSTS),
|
||||||
REDIS_SENTINEL_HOSTS: data.REDIS_SENTINEL_HOSTS?.trim()
|
REDIS_SENTINEL_HOSTS: data.REDIS_SENTINEL_HOSTS?.trim()
|
||||||
|
@@ -19,3 +19,17 @@ export const prefixWithSlash = (str: string) => {
|
|||||||
const vowelRegex = new RE2(/^[aeiou]/i);
|
const vowelRegex = new RE2(/^[aeiou]/i);
|
||||||
|
|
||||||
export const startsWithVowel = (str: string) => vowelRegex.test(str);
|
export const startsWithVowel = (str: string) => vowelRegex.test(str);
|
||||||
|
|
||||||
|
const pickWordsRegex = new RE2(/(\W+)/);
|
||||||
|
export const sanitizeString = (dto: { unsanitizedString: string; tokens: string[] }) => {
|
||||||
|
const words = dto.unsanitizedString.split(pickWordsRegex);
|
||||||
|
|
||||||
|
const redactionSet = new Set(dto.tokens.filter(Boolean));
|
||||||
|
const sanitizedWords = words.map((el) => {
|
||||||
|
if (redactionSet.has(el)) {
|
||||||
|
return "[REDACTED]";
|
||||||
|
}
|
||||||
|
return el;
|
||||||
|
});
|
||||||
|
return sanitizedWords.join("");
|
||||||
|
};
|
||||||
|
@@ -20,7 +20,10 @@ export const triggerWorkflowIntegrationNotification = async (dto: TTriggerWorkfl
|
|||||||
const slackConfig = await projectSlackConfigDAL.getIntegrationDetailsByProject(projectId);
|
const slackConfig = await projectSlackConfigDAL.getIntegrationDetailsByProject(projectId);
|
||||||
|
|
||||||
if (slackConfig) {
|
if (slackConfig) {
|
||||||
if (notification.type === TriggerFeature.ACCESS_REQUEST) {
|
if (
|
||||||
|
notification.type === TriggerFeature.ACCESS_REQUEST ||
|
||||||
|
notification.type === TriggerFeature.ACCESS_REQUEST_UPDATED
|
||||||
|
) {
|
||||||
const targetChannelIds = slackConfig.accessRequestChannels?.split(", ") || [];
|
const targetChannelIds = slackConfig.accessRequestChannels?.split(", ") || [];
|
||||||
if (targetChannelIds.length && slackConfig.isAccessRequestNotificationEnabled) {
|
if (targetChannelIds.length && slackConfig.isAccessRequestNotificationEnabled) {
|
||||||
await sendSlackNotification({
|
await sendSlackNotification({
|
||||||
@@ -50,7 +53,10 @@ export const triggerWorkflowIntegrationNotification = async (dto: TTriggerWorkfl
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (microsoftTeamsConfig) {
|
if (microsoftTeamsConfig) {
|
||||||
if (notification.type === TriggerFeature.ACCESS_REQUEST) {
|
if (
|
||||||
|
notification.type === TriggerFeature.ACCESS_REQUEST ||
|
||||||
|
notification.type === TriggerFeature.ACCESS_REQUEST_UPDATED
|
||||||
|
) {
|
||||||
if (microsoftTeamsConfig.isAccessRequestNotificationEnabled && microsoftTeamsConfig.accessRequestChannels) {
|
if (microsoftTeamsConfig.isAccessRequestNotificationEnabled && microsoftTeamsConfig.accessRequestChannels) {
|
||||||
const { success, data } = validateMicrosoftTeamsChannelsSchema.safeParse(
|
const { success, data } = validateMicrosoftTeamsChannelsSchema.safeParse(
|
||||||
microsoftTeamsConfig.accessRequestChannels
|
microsoftTeamsConfig.accessRequestChannels
|
||||||
|
@@ -6,7 +6,8 @@ import { TProjectSlackConfigDALFactory } from "@app/services/slack/project-slack
|
|||||||
|
|
||||||
export enum TriggerFeature {
|
export enum TriggerFeature {
|
||||||
SECRET_APPROVAL = "secret-approval",
|
SECRET_APPROVAL = "secret-approval",
|
||||||
ACCESS_REQUEST = "access-request"
|
ACCESS_REQUEST = "access-request",
|
||||||
|
ACCESS_REQUEST_UPDATED = "access-request-updated"
|
||||||
}
|
}
|
||||||
|
|
||||||
export type TNotification =
|
export type TNotification =
|
||||||
@@ -34,6 +35,22 @@ export type TNotification =
|
|||||||
approvalUrl: string;
|
approvalUrl: string;
|
||||||
note?: string;
|
note?: string;
|
||||||
};
|
};
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
type: TriggerFeature.ACCESS_REQUEST_UPDATED;
|
||||||
|
payload: {
|
||||||
|
requesterFullName: string;
|
||||||
|
requesterEmail: string;
|
||||||
|
isTemporary: boolean;
|
||||||
|
secretPath: string;
|
||||||
|
environment: string;
|
||||||
|
projectName: string;
|
||||||
|
permissions: string[];
|
||||||
|
approvalUrl: string;
|
||||||
|
editNote?: string;
|
||||||
|
editorFullName?: string;
|
||||||
|
editorEmail?: string;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TTriggerWorkflowNotificationDTO = {
|
export type TTriggerWorkflowNotificationDTO = {
|
||||||
|
@@ -45,6 +45,8 @@ import { groupServiceFactory } from "@app/ee/services/group/group-service";
|
|||||||
import { userGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
import { userGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||||
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||||
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||||
|
import { identityAuthTemplateDALFactory } from "@app/ee/services/identity-auth-template/identity-auth-template-dal";
|
||||||
|
import { identityAuthTemplateServiceFactory } from "@app/ee/services/identity-auth-template/identity-auth-template-service";
|
||||||
import { identityProjectAdditionalPrivilegeDALFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-dal";
|
import { identityProjectAdditionalPrivilegeDALFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-dal";
|
||||||
import { identityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
import { identityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||||
import { identityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
import { identityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||||
@@ -179,8 +181,6 @@ import { identityAccessTokenDALFactory } from "@app/services/identity-access-tok
|
|||||||
import { identityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
import { identityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||||
import { identityAliCloudAuthDALFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-dal";
|
import { identityAliCloudAuthDALFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-dal";
|
||||||
import { identityAliCloudAuthServiceFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-service";
|
import { identityAliCloudAuthServiceFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-service";
|
||||||
import { identityAuthTemplateDALFactory } from "@app/ee/services/identity-auth-template/identity-auth-template-dal";
|
|
||||||
import { identityAuthTemplateServiceFactory } from "@app/ee/services/identity-auth-template/identity-auth-template-service";
|
|
||||||
import { identityAwsAuthDALFactory } from "@app/services/identity-aws-auth/identity-aws-auth-dal";
|
import { identityAwsAuthDALFactory } from "@app/services/identity-aws-auth/identity-aws-auth-dal";
|
||||||
import { identityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
import { identityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||||
import { identityAzureAuthDALFactory } from "@app/services/identity-azure-auth/identity-azure-auth-dal";
|
import { identityAzureAuthDALFactory } from "@app/services/identity-azure-auth/identity-azure-auth-dal";
|
||||||
@@ -560,8 +560,7 @@ export const registerRoutes = async (
|
|||||||
queueService,
|
queueService,
|
||||||
projectDAL,
|
projectDAL,
|
||||||
licenseService,
|
licenseService,
|
||||||
auditLogStreamDAL,
|
auditLogStreamDAL
|
||||||
eventBusService
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
|
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
|
||||||
@@ -849,8 +848,6 @@ export const registerRoutes = async (
|
|||||||
projectDAL,
|
projectDAL,
|
||||||
permissionService,
|
permissionService,
|
||||||
projectUserMembershipRoleDAL,
|
projectUserMembershipRoleDAL,
|
||||||
projectBotDAL,
|
|
||||||
projectKeyDAL,
|
|
||||||
projectMembershipDAL
|
projectMembershipDAL
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1123,7 +1120,9 @@ export const registerRoutes = async (
|
|||||||
resourceMetadataDAL,
|
resourceMetadataDAL,
|
||||||
folderCommitService,
|
folderCommitService,
|
||||||
secretSyncQueue,
|
secretSyncQueue,
|
||||||
reminderService
|
reminderService,
|
||||||
|
eventBusService,
|
||||||
|
licenseService
|
||||||
});
|
});
|
||||||
|
|
||||||
const projectService = projectServiceFactory({
|
const projectService = projectServiceFactory({
|
||||||
@@ -1974,7 +1973,7 @@ export const registerRoutes = async (
|
|||||||
|
|
||||||
await telemetryQueue.startTelemetryCheck();
|
await telemetryQueue.startTelemetryCheck();
|
||||||
await telemetryQueue.startAggregatedEventsJob();
|
await telemetryQueue.startAggregatedEventsJob();
|
||||||
await dailyResourceCleanUp.startCleanUp();
|
await dailyResourceCleanUp.init();
|
||||||
await dailyReminderQueueService.startDailyRemindersJob();
|
await dailyReminderQueueService.startDailyRemindersJob();
|
||||||
await dailyReminderQueueService.startSecretReminderMigrationJob();
|
await dailyReminderQueueService.startSecretReminderMigrationJob();
|
||||||
await dailyExpiringPkiItemAlert.startSendingAlerts();
|
await dailyExpiringPkiItemAlert.startSendingAlerts();
|
||||||
|
@@ -583,16 +583,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
|||||||
email: z.string().email().trim(),
|
email: z.string().email().trim(),
|
||||||
password: z.string().trim(),
|
password: z.string().trim(),
|
||||||
firstName: z.string().trim(),
|
firstName: z.string().trim(),
|
||||||
lastName: z.string().trim().optional(),
|
lastName: z.string().trim().optional()
|
||||||
protectedKey: z.string().trim(),
|
|
||||||
protectedKeyIV: z.string().trim(),
|
|
||||||
protectedKeyTag: z.string().trim(),
|
|
||||||
publicKey: z.string().trim(),
|
|
||||||
encryptedPrivateKey: z.string().trim(),
|
|
||||||
encryptedPrivateKeyIV: z.string().trim(),
|
|
||||||
encryptedPrivateKeyTag: z.string().trim(),
|
|
||||||
salt: z.string().trim(),
|
|
||||||
verifier: z.string().trim()
|
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
|
@@ -2,6 +2,7 @@ import { ForbiddenError } from "@casl/ability";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { SecretFoldersSchema, SecretImportsSchema, UsersSchema } from "@app/db/schemas";
|
import { SecretFoldersSchema, SecretImportsSchema, UsersSchema } from "@app/db/schemas";
|
||||||
|
import { RemindersSchema } from "@app/db/schemas/reminders";
|
||||||
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
import { ProjectPermissionSecretActions } from "@app/ee/services/permission/project-permission";
|
import { ProjectPermissionSecretActions } from "@app/ee/services/permission/project-permission";
|
||||||
import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema";
|
import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema";
|
||||||
@@ -628,7 +629,10 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
|||||||
secretValueHidden: z.boolean(),
|
secretValueHidden: z.boolean(),
|
||||||
secretPath: z.string().optional(),
|
secretPath: z.string().optional(),
|
||||||
secretMetadata: ResourceMetadataSchema.optional(),
|
secretMetadata: ResourceMetadataSchema.optional(),
|
||||||
tags: SanitizedTagSchema.array().optional()
|
tags: SanitizedTagSchema.array().optional(),
|
||||||
|
reminder: RemindersSchema.extend({
|
||||||
|
recipients: z.string().array().optional()
|
||||||
|
}).nullish()
|
||||||
})
|
})
|
||||||
.array()
|
.array()
|
||||||
.optional(),
|
.optional(),
|
||||||
@@ -706,7 +710,11 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
|||||||
|
|
||||||
let imports: Awaited<ReturnType<typeof server.services.secretImport.getImports>> | undefined;
|
let imports: Awaited<ReturnType<typeof server.services.secretImport.getImports>> | undefined;
|
||||||
let folders: Awaited<ReturnType<typeof server.services.folder.getFolders>> | undefined;
|
let folders: Awaited<ReturnType<typeof server.services.folder.getFolders>> | undefined;
|
||||||
let secrets: Awaited<ReturnType<typeof server.services.secret.getSecretsRaw>>["secrets"] | undefined;
|
let secrets:
|
||||||
|
| (Awaited<ReturnType<typeof server.services.secret.getSecretsRaw>>["secrets"][number] & {
|
||||||
|
reminder: Awaited<ReturnType<typeof server.services.reminder.getRemindersForDashboard>>[string] | null;
|
||||||
|
})[]
|
||||||
|
| undefined;
|
||||||
let dynamicSecrets: Awaited<ReturnType<typeof server.services.dynamicSecret.listDynamicSecretsByEnv>> | undefined;
|
let dynamicSecrets: Awaited<ReturnType<typeof server.services.dynamicSecret.listDynamicSecretsByEnv>> | undefined;
|
||||||
let secretRotations:
|
let secretRotations:
|
||||||
| Awaited<ReturnType<typeof server.services.secretRotationV2.getDashboardSecretRotations>>
|
| Awaited<ReturnType<typeof server.services.secretRotationV2.getDashboardSecretRotations>>
|
||||||
@@ -904,7 +912,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (remainingLimit > 0 && totalSecretCount > adjustedOffset) {
|
if (remainingLimit > 0 && totalSecretCount > adjustedOffset) {
|
||||||
secrets = (
|
const rawSecrets = (
|
||||||
await server.services.secret.getSecretsRaw({
|
await server.services.secret.getSecretsRaw({
|
||||||
actorId: req.permission.id,
|
actorId: req.permission.id,
|
||||||
actor: req.permission.type,
|
actor: req.permission.type,
|
||||||
@@ -925,6 +933,15 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
|||||||
includeMetadataInSearch: true
|
includeMetadataInSearch: true
|
||||||
})
|
})
|
||||||
).secrets;
|
).secrets;
|
||||||
|
|
||||||
|
const reminders = await server.services.reminder.getRemindersForDashboard(
|
||||||
|
rawSecrets.map((secret) => secret.id)
|
||||||
|
);
|
||||||
|
|
||||||
|
secrets = rawSecrets.map((secret) => ({
|
||||||
|
...secret,
|
||||||
|
reminder: reminders[secret.id] ?? null
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
@@ -5,8 +5,8 @@ import { z } from "zod";
|
|||||||
|
|
||||||
import { ActionProjectType, ProjectType } from "@app/db/schemas";
|
import { ActionProjectType, ProjectType } from "@app/db/schemas";
|
||||||
import { getServerSentEventsHeaders } from "@app/ee/services/event/event-sse-stream";
|
import { getServerSentEventsHeaders } from "@app/ee/services/event/event-sse-stream";
|
||||||
import { EventRegisterSchema } from "@app/ee/services/event/types";
|
import { EventRegisterSchema, Mappings } from "@app/ee/services/event/types";
|
||||||
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
import { ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||||
import { ApiDocsTags, EventSubscriptions } from "@app/lib/api-docs";
|
import { ApiDocsTags, EventSubscriptions } from "@app/lib/api-docs";
|
||||||
import { BadRequestError, ForbiddenRequestError, RateLimitError } from "@app/lib/errors";
|
import { BadRequestError, ForbiddenRequestError, RateLimitError } from "@app/lib/errors";
|
||||||
import { readLimit } from "@app/server/config/rateLimiter";
|
import { readLimit } from "@app/server/config/rateLimiter";
|
||||||
@@ -82,21 +82,19 @@ export const registerEventRouter = async (server: FastifyZodProvider) => {
|
|||||||
req.body.register.forEach((r) => {
|
req.body.register.forEach((r) => {
|
||||||
const fields = {
|
const fields = {
|
||||||
environment: r.conditions?.environmentSlug ?? "",
|
environment: r.conditions?.environmentSlug ?? "",
|
||||||
secretPath: r.conditions?.secretPath ?? "/",
|
secretPath: r.conditions?.secretPath ?? "/"
|
||||||
eventType: r.event
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const allowed = info.permission.can(
|
const action = Mappings.BusEventToAction(r.event);
|
||||||
ProjectPermissionSecretActions.Subscribe,
|
|
||||||
subject(ProjectPermissionSub.Secrets, fields)
|
const allowed = info.permission.can(action, subject(ProjectPermissionSub.SecretEvents, fields));
|
||||||
);
|
|
||||||
|
|
||||||
if (!allowed) {
|
if (!allowed) {
|
||||||
throw new ForbiddenRequestError({
|
throw new ForbiddenRequestError({
|
||||||
name: "PermissionDenied",
|
name: "PermissionDenied",
|
||||||
message: `You are not allowed to subscribe on secrets`,
|
message: `You are not allowed to subscribe on ${ProjectPermissionSub.SecretEvents}`,
|
||||||
details: {
|
details: {
|
||||||
event: fields.eventType,
|
action,
|
||||||
environmentSlug: fields.environment,
|
environmentSlug: fields.environment,
|
||||||
secretPath: fields.secretPath
|
secretPath: fields.secretPath
|
||||||
}
|
}
|
||||||
|
@@ -478,4 +478,30 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
|||||||
return { identityMemberships };
|
return { identityMemberships };
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/details",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
identityDetails: z.object({
|
||||||
|
organization: z.object({
|
||||||
|
id: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
slug: z.string()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN], { requireOrg: false }),
|
||||||
|
handler: async (req) => {
|
||||||
|
const organization = await server.services.org.findIdentityOrganization(req.permission.id);
|
||||||
|
return { identityDetails: { organization } };
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
@@ -45,7 +45,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
|||||||
.transform(removeTrailingSlash)
|
.transform(removeTrailingSlash)
|
||||||
.describe(FOLDERS.CREATE.path)
|
.describe(FOLDERS.CREATE.path)
|
||||||
.optional(),
|
.optional(),
|
||||||
// backward compatiability with cli
|
// backward compatibility with cli
|
||||||
directory: z
|
directory: z
|
||||||
.string()
|
.string()
|
||||||
.trim()
|
.trim()
|
||||||
@@ -58,7 +58,9 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
|||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
folder: SecretFoldersSchema
|
folder: SecretFoldersSchema.extend({
|
||||||
|
path: z.string()
|
||||||
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -130,7 +132,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
|||||||
.transform(removeTrailingSlash)
|
.transform(removeTrailingSlash)
|
||||||
.describe(FOLDERS.UPDATE.path)
|
.describe(FOLDERS.UPDATE.path)
|
||||||
.optional(),
|
.optional(),
|
||||||
// backward compatiability with cli
|
// backward compatibility with cli
|
||||||
directory: z
|
directory: z
|
||||||
.string()
|
.string()
|
||||||
.trim()
|
.trim()
|
||||||
@@ -143,7 +145,9 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
|||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
folder: SecretFoldersSchema
|
folder: SecretFoldersSchema.extend({
|
||||||
|
path: z.string()
|
||||||
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -359,7 +363,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
|||||||
.transform(removeTrailingSlash)
|
.transform(removeTrailingSlash)
|
||||||
.describe(FOLDERS.LIST.path)
|
.describe(FOLDERS.LIST.path)
|
||||||
.optional(),
|
.optional(),
|
||||||
// backward compatiability with cli
|
// backward compatibility with cli
|
||||||
directory: z
|
directory: z
|
||||||
.string()
|
.string()
|
||||||
.trim()
|
.trim()
|
||||||
|
@@ -283,6 +283,14 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
|||||||
rateLimit: readLimit
|
rateLimit: readLimit
|
||||||
},
|
},
|
||||||
schema: {
|
schema: {
|
||||||
|
hide: false,
|
||||||
|
tags: [ApiDocsTags.Projects],
|
||||||
|
description: "Get project details by slug",
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
params: z.object({
|
params: z.object({
|
||||||
slug: slugSchema({ max: 36 }).describe("The slug of the project to get.")
|
slug: slugSchema({ max: 36 }).describe("The slug of the project to get.")
|
||||||
}),
|
}),
|
||||||
|
@@ -19,7 +19,7 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
|
|||||||
config: {
|
config: {
|
||||||
rateLimit: writeLimit
|
rateLimit: writeLimit
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
const data = await req.file({
|
const data = await req.file({
|
||||||
limits: {
|
limits: {
|
||||||
@@ -69,7 +69,7 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
|
|||||||
mappingType: z.nativeEnum(VaultMappingType)
|
mappingType: z.nativeEnum(VaultMappingType)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
await server.services.migration.importVaultData({
|
await server.services.migration.importVaultData({
|
||||||
actorId: req.permission.id,
|
actorId: req.permission.id,
|
||||||
|
@@ -142,16 +142,27 @@ export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) =>
|
|||||||
return token;
|
return token;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const parseGitHubLinkHeader = (linkHeader: string | undefined): Record<string, string> => {
|
||||||
|
if (!linkHeader) return {};
|
||||||
|
|
||||||
|
const links: Record<string, string> = {};
|
||||||
|
const segments = linkHeader.split(",");
|
||||||
|
const re = new RE2(/<([^>]+)>;\s*rel="([^"]+)"/);
|
||||||
|
|
||||||
|
for (const segment of segments) {
|
||||||
|
const match = re.exec(segment.trim());
|
||||||
|
if (match) {
|
||||||
|
const url = match[1];
|
||||||
|
const rel = match[2];
|
||||||
|
links[rel] = url;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return links;
|
||||||
|
};
|
||||||
|
|
||||||
function extractNextPageUrl(linkHeader: string | undefined): string | null {
|
function extractNextPageUrl(linkHeader: string | undefined): string | null {
|
||||||
if (!linkHeader) return null;
|
const links = parseGitHubLinkHeader(linkHeader);
|
||||||
|
return links.next || null;
|
||||||
const links = linkHeader.split(",");
|
|
||||||
const nextLink = links.find((link) => link.includes('rel="next"'));
|
|
||||||
|
|
||||||
if (!nextLink) return null;
|
|
||||||
|
|
||||||
const match = new RE2(/<([^>]+)>/).exec(nextLink);
|
|
||||||
return match ? match[1] : null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const makePaginatedGitHubRequest = async <T, R = T[]>(
|
export const makePaginatedGitHubRequest = async <T, R = T[]>(
|
||||||
@@ -164,27 +175,83 @@ export const makePaginatedGitHubRequest = async <T, R = T[]>(
|
|||||||
|
|
||||||
const token =
|
const token =
|
||||||
method === GitHubConnectionMethod.OAuth ? credentials.accessToken : await getGitHubAppAuthToken(appConnection);
|
method === GitHubConnectionMethod.OAuth ? credentials.accessToken : await getGitHubAppAuthToken(appConnection);
|
||||||
let url: string | null = `https://${await getGitHubInstanceApiUrl(appConnection)}${path}`;
|
|
||||||
|
const baseUrl = `https://${await getGitHubInstanceApiUrl(appConnection)}${path}`;
|
||||||
|
const initialUrlObj = new URL(baseUrl);
|
||||||
|
initialUrlObj.searchParams.set("per_page", "100");
|
||||||
|
|
||||||
let results: T[] = [];
|
let results: T[] = [];
|
||||||
let i = 0;
|
const maxIterations = 1000;
|
||||||
|
|
||||||
while (url && i < 1000) {
|
// Make initial request to get link header
|
||||||
// eslint-disable-next-line no-await-in-loop
|
const firstResponse: AxiosResponse<R> = await requestWithGitHubGateway<R>(appConnection, gatewayService, {
|
||||||
const response: AxiosResponse<R> = await requestWithGitHubGateway<R>(appConnection, gatewayService, {
|
url: initialUrlObj.toString(),
|
||||||
url,
|
method: "GET",
|
||||||
method: "GET",
|
headers: {
|
||||||
headers: {
|
Accept: "application/vnd.github+json",
|
||||||
Accept: "application/vnd.github+json",
|
Authorization: `Bearer ${token}`,
|
||||||
Authorization: `Bearer ${token}`,
|
"X-GitHub-Api-Version": "2022-11-28"
|
||||||
"X-GitHub-Api-Version": "2022-11-28"
|
}
|
||||||
}
|
});
|
||||||
});
|
|
||||||
|
|
||||||
const items = dataMapper ? dataMapper(response.data) : (response.data as unknown as T[]);
|
const firstPageItems = dataMapper ? dataMapper(firstResponse.data) : (firstResponse.data as unknown as T[]);
|
||||||
results = results.concat(items);
|
results = results.concat(firstPageItems);
|
||||||
|
|
||||||
url = extractNextPageUrl(response.headers.link as string | undefined);
|
const linkHeader = parseGitHubLinkHeader(firstResponse.headers.link as string | undefined);
|
||||||
i += 1;
|
const lastPageUrl = linkHeader.last;
|
||||||
|
|
||||||
|
// If there's a last page URL, get its page number and concurrently fetch every page starting from 2 to last
|
||||||
|
if (lastPageUrl) {
|
||||||
|
const lastPageParam = new URL(lastPageUrl).searchParams.get("page");
|
||||||
|
const totalPages = lastPageParam ? parseInt(lastPageParam, 10) : 1;
|
||||||
|
|
||||||
|
const pageRequests: Promise<AxiosResponse<R>>[] = [];
|
||||||
|
|
||||||
|
for (let pageNum = 2; pageNum <= totalPages && pageNum - 1 < maxIterations; pageNum += 1) {
|
||||||
|
const pageUrlObj = new URL(initialUrlObj.toString());
|
||||||
|
pageUrlObj.searchParams.set("page", pageNum.toString());
|
||||||
|
|
||||||
|
pageRequests.push(
|
||||||
|
requestWithGitHubGateway<R>(appConnection, gatewayService, {
|
||||||
|
url: pageUrlObj.toString(),
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
Accept: "application/vnd.github+json",
|
||||||
|
Authorization: `Bearer ${token}`,
|
||||||
|
"X-GitHub-Api-Version": "2022-11-28"
|
||||||
|
}
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const responses = await Promise.all(pageRequests);
|
||||||
|
|
||||||
|
for (const response of responses) {
|
||||||
|
const items = dataMapper ? dataMapper(response.data) : (response.data as unknown as T[]);
|
||||||
|
results = results.concat(items);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Fallback in case last link isn't present
|
||||||
|
let url: string | null = extractNextPageUrl(firstResponse.headers.link as string | undefined);
|
||||||
|
let i = 1;
|
||||||
|
|
||||||
|
while (url && i < maxIterations) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
const response: AxiosResponse<R> = await requestWithGitHubGateway<R>(appConnection, gatewayService, {
|
||||||
|
url,
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
Accept: "application/vnd.github+json",
|
||||||
|
Authorization: `Bearer ${token}`,
|
||||||
|
"X-GitHub-Api-Version": "2022-11-28"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const items = dataMapper ? dataMapper(response.data) : (response.data as unknown as T[]);
|
||||||
|
results = results.concat(items);
|
||||||
|
|
||||||
|
url = extractNextPageUrl(response.headers.link as string | undefined);
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return results;
|
return results;
|
||||||
|
@@ -148,9 +148,15 @@ export const authLoginServiceFactory = ({
|
|||||||
|
|
||||||
if (organizationId) {
|
if (organizationId) {
|
||||||
const org = await orgDAL.findById(organizationId);
|
const org = await orgDAL.findById(organizationId);
|
||||||
if (org && org.userTokenExpiration) {
|
if (org) {
|
||||||
tokenSessionExpiresIn = getMinExpiresIn(cfg.JWT_AUTH_LIFETIME, org.userTokenExpiration);
|
await orgMembershipDAL.update(
|
||||||
refreshTokenExpiresIn = org.userTokenExpiration;
|
{ userId: user.id, orgId: org.id },
|
||||||
|
{ lastLoginAuthMethod: authMethod, lastLoginTime: new Date() }
|
||||||
|
);
|
||||||
|
if (org.userTokenExpiration) {
|
||||||
|
tokenSessionExpiresIn = getMinExpiresIn(cfg.JWT_AUTH_LIFETIME, org.userTokenExpiration);
|
||||||
|
refreshTokenExpiresIn = org.userTokenExpiration;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -818,7 +824,6 @@ export const authLoginServiceFactory = ({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
|
||||||
const isUserCompleted = user.isAccepted;
|
const isUserCompleted = user.isAccepted;
|
||||||
const providerAuthToken = crypto.jwt().sign(
|
const providerAuthToken = crypto.jwt().sign(
|
||||||
{
|
{
|
||||||
@@ -829,7 +834,7 @@ export const authLoginServiceFactory = ({
|
|||||||
isEmailVerified: user.isEmailVerified,
|
isEmailVerified: user.isEmailVerified,
|
||||||
firstName: user.firstName,
|
firstName: user.firstName,
|
||||||
lastName: user.lastName,
|
lastName: user.lastName,
|
||||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
hasExchangedPrivateKey: true,
|
||||||
authMethod,
|
authMethod,
|
||||||
isUserCompleted,
|
isUserCompleted,
|
||||||
...(callbackPort
|
...(callbackPort
|
||||||
@@ -874,8 +879,7 @@ export const authLoginServiceFactory = ({
|
|||||||
const userEnc =
|
const userEnc =
|
||||||
usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === email) : usersByUsername?.[0];
|
usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === email) : usersByUsername?.[0];
|
||||||
|
|
||||||
if (!userEnc?.serverEncryptedPrivateKey)
|
if (!userEnc) throw new BadRequestError({ message: "User encryption not found" });
|
||||||
throw new BadRequestError({ message: "Key handoff incomplete. Please try logging in again." });
|
|
||||||
|
|
||||||
const token = await generateUserTokens({
|
const token = await generateUserTokens({
|
||||||
user: { ...userEnc, id: userEnc.userId },
|
user: { ...userEnc, id: userEnc.userId },
|
||||||
|
@@ -32,8 +32,8 @@ import {
|
|||||||
keyAlgorithmToAlgCfg
|
keyAlgorithmToAlgCfg
|
||||||
} from "../certificate-authority-fns";
|
} from "../certificate-authority-fns";
|
||||||
import { TCertificateAuthoritySecretDALFactory } from "../certificate-authority-secret-dal";
|
import { TCertificateAuthoritySecretDALFactory } from "../certificate-authority-secret-dal";
|
||||||
import { TIssueCertWithTemplateDTO } from "./internal-certificate-authority-types";
|
|
||||||
import { validateAndMapAltNameType } from "../certificate-authority-validators";
|
import { validateAndMapAltNameType } from "../certificate-authority-validators";
|
||||||
|
import { TIssueCertWithTemplateDTO } from "./internal-certificate-authority-types";
|
||||||
|
|
||||||
type TInternalCertificateAuthorityFnsDeps = {
|
type TInternalCertificateAuthorityFnsDeps = {
|
||||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findByIdWithAssociatedCa" | "findById">;
|
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findByIdWithAssociatedCa" | "findById">;
|
||||||
|
@@ -52,6 +52,7 @@ import {
|
|||||||
} from "../certificate-authority-fns";
|
} from "../certificate-authority-fns";
|
||||||
import { TCertificateAuthorityQueueFactory } from "../certificate-authority-queue";
|
import { TCertificateAuthorityQueueFactory } from "../certificate-authority-queue";
|
||||||
import { TCertificateAuthoritySecretDALFactory } from "../certificate-authority-secret-dal";
|
import { TCertificateAuthoritySecretDALFactory } from "../certificate-authority-secret-dal";
|
||||||
|
import { validateAndMapAltNameType } from "../certificate-authority-validators";
|
||||||
import { TInternalCertificateAuthorityDALFactory } from "./internal-certificate-authority-dal";
|
import { TInternalCertificateAuthorityDALFactory } from "./internal-certificate-authority-dal";
|
||||||
import {
|
import {
|
||||||
TCreateCaDTO,
|
TCreateCaDTO,
|
||||||
@@ -68,7 +69,6 @@ import {
|
|||||||
TSignIntermediateDTO,
|
TSignIntermediateDTO,
|
||||||
TUpdateCaDTO
|
TUpdateCaDTO
|
||||||
} from "./internal-certificate-authority-types";
|
} from "./internal-certificate-authority-types";
|
||||||
import { validateAndMapAltNameType } from "../certificate-authority-validators";
|
|
||||||
|
|
||||||
type TInternalCertificateAuthorityServiceFactoryDep = {
|
type TInternalCertificateAuthorityServiceFactoryDep = {
|
||||||
certificateAuthorityDAL: Pick<
|
certificateAuthorityDAL: Pick<
|
||||||
|
@@ -254,29 +254,26 @@ export const transformToInfisicalFormatNamespaceToProjects = (
|
|||||||
let currentFolderId: string | undefined;
|
let currentFolderId: string | undefined;
|
||||||
let currentPath = "";
|
let currentPath = "";
|
||||||
|
|
||||||
if (path.includes("/")) {
|
const pathParts = path.split("/").filter(Boolean);
|
||||||
const pathParts = path.split("/").filter(Boolean);
|
const folderParts = pathParts;
|
||||||
|
|
||||||
const folderParts = pathParts;
|
// create nested folder structure for the entire path
|
||||||
|
for (const folderName of folderParts) {
|
||||||
|
currentPath = currentPath ? `${currentPath}/${folderName}` : folderName;
|
||||||
|
const folderKey = `${namespace}:${mount}:${currentPath}`;
|
||||||
|
|
||||||
// create nested folder structure for the entire path
|
if (!folderMap.has(folderKey)) {
|
||||||
for (const folderName of folderParts) {
|
const folderId = uuidv4();
|
||||||
currentPath = currentPath ? `${currentPath}/${folderName}` : folderName;
|
folderMap.set(folderKey, folderId);
|
||||||
const folderKey = `${namespace}:${mount}:${currentPath}`;
|
folders.push({
|
||||||
|
id: folderId,
|
||||||
if (!folderMap.has(folderKey)) {
|
name: folderName,
|
||||||
const folderId = uuidv4();
|
environmentId,
|
||||||
folderMap.set(folderKey, folderId);
|
parentFolderId: currentFolderId || environmentId
|
||||||
folders.push({
|
});
|
||||||
id: folderId,
|
currentFolderId = folderId;
|
||||||
name: folderName,
|
} else {
|
||||||
environmentId,
|
currentFolderId = folderMap.get(folderKey)!;
|
||||||
parentFolderId: currentFolderId || environmentId
|
|
||||||
});
|
|
||||||
currentFolderId = folderId;
|
|
||||||
} else {
|
|
||||||
currentFolderId = folderMap.get(folderKey)!;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -38,7 +38,7 @@ type TIdentityAliCloudAuthServiceFactoryDep = {
|
|||||||
TIdentityAliCloudAuthDALFactory,
|
TIdentityAliCloudAuthDALFactory,
|
||||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||||
>;
|
>;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
};
|
};
|
||||||
@@ -64,6 +64,8 @@ export const identityAliCloudAuthServiceFactory = ({
|
|||||||
identityId: identityAliCloudAuth.identityId
|
identityId: identityAliCloudAuth.identityId
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (!identityMembershipOrg) throw new UnauthorizedError({ message: "Identity not attached to a organization" });
|
||||||
|
|
||||||
const requestUrl = new URL("https://sts.aliyuncs.com");
|
const requestUrl = new URL("https://sts.aliyuncs.com");
|
||||||
|
|
||||||
for (const key of Object.keys(params)) {
|
for (const key of Object.keys(params)) {
|
||||||
@@ -87,6 +89,14 @@ export const identityAliCloudAuthServiceFactory = ({
|
|||||||
|
|
||||||
// Generate the token
|
// Generate the token
|
||||||
const identityAccessToken = await identityAliCloudAuthDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityAliCloudAuthDAL.transaction(async (tx) => {
|
||||||
|
await identityOrgMembershipDAL.updateById(
|
||||||
|
identityMembershipOrg.id,
|
||||||
|
{
|
||||||
|
lastLoginAuthMethod: IdentityAuthMethod.ALICLOUD_AUTH,
|
||||||
|
lastLoginTime: new Date()
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
const newToken = await identityAccessTokenDAL.create(
|
const newToken = await identityAccessTokenDAL.create(
|
||||||
{
|
{
|
||||||
identityId: identityAliCloudAuth.identityId,
|
identityId: identityAliCloudAuth.identityId,
|
||||||
|
@@ -36,7 +36,7 @@ import {
|
|||||||
type TIdentityAwsAuthServiceFactoryDep = {
|
type TIdentityAwsAuthServiceFactoryDep = {
|
||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||||
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
};
|
};
|
||||||
@@ -91,6 +91,7 @@ export const identityAwsAuthServiceFactory = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityAwsAuth.identityId });
|
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityAwsAuth.identityId });
|
||||||
|
if (!identityMembershipOrg) throw new UnauthorizedError({ message: "Identity not attached to a organization" });
|
||||||
|
|
||||||
const headers: TAwsGetCallerIdentityHeaders = JSON.parse(Buffer.from(iamRequestHeaders, "base64").toString());
|
const headers: TAwsGetCallerIdentityHeaders = JSON.parse(Buffer.from(iamRequestHeaders, "base64").toString());
|
||||||
const body: string = Buffer.from(iamRequestBody, "base64").toString();
|
const body: string = Buffer.from(iamRequestBody, "base64").toString();
|
||||||
@@ -152,6 +153,14 @@ export const identityAwsAuthServiceFactory = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const identityAccessToken = await identityAwsAuthDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityAwsAuthDAL.transaction(async (tx) => {
|
||||||
|
await identityOrgMembershipDAL.updateById(
|
||||||
|
identityMembershipOrg.id,
|
||||||
|
{
|
||||||
|
lastLoginAuthMethod: IdentityAuthMethod.AWS_AUTH,
|
||||||
|
lastLoginTime: new Date()
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
const newToken = await identityAccessTokenDAL.create(
|
const newToken = await identityAccessTokenDAL.create(
|
||||||
{
|
{
|
||||||
identityId: identityAwsAuth.identityId,
|
identityId: identityAwsAuth.identityId,
|
||||||
|
@@ -33,7 +33,7 @@ type TIdentityAzureAuthServiceFactoryDep = {
|
|||||||
TIdentityAzureAuthDALFactory,
|
TIdentityAzureAuthDALFactory,
|
||||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||||
>;
|
>;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
@@ -80,6 +80,14 @@ export const identityAzureAuthServiceFactory = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const identityAccessToken = await identityAzureAuthDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||||
|
await identityOrgMembershipDAL.updateById(
|
||||||
|
identityMembershipOrg.id,
|
||||||
|
{
|
||||||
|
lastLoginAuthMethod: IdentityAuthMethod.AZURE_AUTH,
|
||||||
|
lastLoginTime: new Date()
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
const newToken = await identityAccessTokenDAL.create(
|
const newToken = await identityAccessTokenDAL.create(
|
||||||
{
|
{
|
||||||
identityId: identityAzureAuth.identityId,
|
identityId: identityAzureAuth.identityId,
|
||||||
|
@@ -31,7 +31,7 @@ import {
|
|||||||
|
|
||||||
type TIdentityGcpAuthServiceFactoryDep = {
|
type TIdentityGcpAuthServiceFactoryDep = {
|
||||||
identityGcpAuthDAL: Pick<TIdentityGcpAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
identityGcpAuthDAL: Pick<TIdentityGcpAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
@@ -119,6 +119,14 @@ export const identityGcpAuthServiceFactory = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const identityAccessToken = await identityGcpAuthDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityGcpAuthDAL.transaction(async (tx) => {
|
||||||
|
await identityOrgMembershipDAL.updateById(
|
||||||
|
identityMembershipOrg.id,
|
||||||
|
{
|
||||||
|
lastLoginAuthMethod: IdentityAuthMethod.GCP_AUTH,
|
||||||
|
lastLoginTime: new Date()
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
const newToken = await identityAccessTokenDAL.create(
|
const newToken = await identityAccessTokenDAL.create(
|
||||||
{
|
{
|
||||||
identityId: identityGcpAuth.identityId,
|
identityId: identityGcpAuth.identityId,
|
||||||
|
@@ -43,7 +43,7 @@ import {
|
|||||||
|
|
||||||
type TIdentityJwtAuthServiceFactoryDep = {
|
type TIdentityJwtAuthServiceFactoryDep = {
|
||||||
identityJwtAuthDAL: TIdentityJwtAuthDALFactory;
|
identityJwtAuthDAL: TIdentityJwtAuthDALFactory;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
@@ -209,6 +209,14 @@ export const identityJwtAuthServiceFactory = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const identityAccessToken = await identityJwtAuthDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityJwtAuthDAL.transaction(async (tx) => {
|
||||||
|
await identityOrgMembershipDAL.updateById(
|
||||||
|
identityMembershipOrg.id,
|
||||||
|
{
|
||||||
|
lastLoginAuthMethod: IdentityAuthMethod.JWT_AUTH,
|
||||||
|
lastLoginTime: new Date()
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
const newToken = await identityAccessTokenDAL.create(
|
const newToken = await identityAccessTokenDAL.create(
|
||||||
{
|
{
|
||||||
identityId: identityJwtAuth.identityId,
|
identityId: identityJwtAuth.identityId,
|
||||||
|
@@ -49,7 +49,7 @@ type TIdentityKubernetesAuthServiceFactoryDep = {
|
|||||||
"create" | "findOne" | "transaction" | "updateById" | "delete"
|
"create" | "findOne" | "transaction" | "updateById" | "delete"
|
||||||
>;
|
>;
|
||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById" | "updateById">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||||
@@ -380,6 +380,14 @@ export const identityKubernetesAuthServiceFactory = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const identityAccessToken = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
||||||
|
await identityOrgMembershipDAL.updateById(
|
||||||
|
identityMembershipOrg.id,
|
||||||
|
{
|
||||||
|
lastLoginAuthMethod: IdentityAuthMethod.KUBERNETES_AUTH,
|
||||||
|
lastLoginTime: new Date()
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
const newToken = await identityAccessTokenDAL.create(
|
const newToken = await identityAccessTokenDAL.create(
|
||||||
{
|
{
|
||||||
identityId: identityKubernetesAuth.identityId,
|
identityId: identityKubernetesAuth.identityId,
|
||||||
|
@@ -44,7 +44,7 @@ type TIdentityLdapAuthServiceFactoryDep = {
|
|||||||
TIdentityLdapAuthDALFactory,
|
TIdentityLdapAuthDALFactory,
|
||||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||||
>;
|
>;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
kmsService: TKmsServiceFactory;
|
kmsService: TKmsServiceFactory;
|
||||||
@@ -144,6 +144,14 @@ export const identityLdapAuthServiceFactory = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const identityAccessToken = await identityLdapAuthDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityLdapAuthDAL.transaction(async (tx) => {
|
||||||
|
await identityOrgMembershipDAL.updateById(
|
||||||
|
identityMembershipOrg.id,
|
||||||
|
{
|
||||||
|
lastLoginAuthMethod: IdentityAuthMethod.LDAP_AUTH,
|
||||||
|
lastLoginTime: new Date()
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
const newToken = await identityAccessTokenDAL.create(
|
const newToken = await identityAccessTokenDAL.create(
|
||||||
{
|
{
|
||||||
identityId: identityLdapAuth.identityId,
|
identityId: identityLdapAuth.identityId,
|
||||||
|
@@ -36,7 +36,7 @@ import {
|
|||||||
type TIdentityOciAuthServiceFactoryDep = {
|
type TIdentityOciAuthServiceFactoryDep = {
|
||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||||
identityOciAuthDAL: Pick<TIdentityOciAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
identityOciAuthDAL: Pick<TIdentityOciAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
};
|
};
|
||||||
@@ -57,6 +57,7 @@ export const identityOciAuthServiceFactory = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityOciAuth.identityId });
|
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityOciAuth.identityId });
|
||||||
|
if (!identityMembershipOrg) throw new UnauthorizedError({ message: "Identity not attached to a organization" });
|
||||||
|
|
||||||
// Validate OCI host format. Ensures that the host is in "identity.<region>.oraclecloud.com" format.
|
// Validate OCI host format. Ensures that the host is in "identity.<region>.oraclecloud.com" format.
|
||||||
if (!headers.host || !new RE2("^identity\\.([a-z]{2}-[a-z]+-[1-9])\\.oraclecloud\\.com$").test(headers.host)) {
|
if (!headers.host || !new RE2("^identity\\.([a-z]{2}-[a-z]+-[1-9])\\.oraclecloud\\.com$").test(headers.host)) {
|
||||||
@@ -91,6 +92,14 @@ export const identityOciAuthServiceFactory = ({
|
|||||||
|
|
||||||
// Generate the token
|
// Generate the token
|
||||||
const identityAccessToken = await identityOciAuthDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityOciAuthDAL.transaction(async (tx) => {
|
||||||
|
await identityOrgMembershipDAL.updateById(
|
||||||
|
identityMembershipOrg.id,
|
||||||
|
{
|
||||||
|
lastLoginAuthMethod: IdentityAuthMethod.OCI_AUTH,
|
||||||
|
lastLoginTime: new Date()
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
const newToken = await identityAccessTokenDAL.create(
|
const newToken = await identityAccessTokenDAL.create(
|
||||||
{
|
{
|
||||||
identityId: identityOciAuth.identityId,
|
identityId: identityOciAuth.identityId,
|
||||||
|
@@ -43,7 +43,7 @@ import {
|
|||||||
|
|
||||||
type TIdentityOidcAuthServiceFactoryDep = {
|
type TIdentityOidcAuthServiceFactoryDep = {
|
||||||
identityOidcAuthDAL: TIdentityOidcAuthDALFactory;
|
identityOidcAuthDAL: TIdentityOidcAuthDALFactory;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
@@ -178,6 +178,14 @@ export const identityOidcAuthServiceFactory = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const identityAccessToken = await identityOidcAuthDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||||
|
await identityOrgMembershipDAL.updateById(
|
||||||
|
identityMembershipOrg.id,
|
||||||
|
{
|
||||||
|
lastLoginAuthMethod: IdentityAuthMethod.OIDC_AUTH,
|
||||||
|
lastLoginTime: new Date()
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
const newToken = await identityAccessTokenDAL.create(
|
const newToken = await identityAccessTokenDAL.create(
|
||||||
{
|
{
|
||||||
identityId: identityOidcAuth.identityId,
|
identityId: identityOidcAuth.identityId,
|
||||||
|
@@ -30,7 +30,7 @@ type TIdentityTlsCertAuthServiceFactoryDep = {
|
|||||||
TIdentityTlsCertAuthDALFactory,
|
TIdentityTlsCertAuthDALFactory,
|
||||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||||
>;
|
>;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||||
@@ -118,6 +118,14 @@ export const identityTlsCertAuthServiceFactory = ({
|
|||||||
|
|
||||||
// Generate the token
|
// Generate the token
|
||||||
const identityAccessToken = await identityTlsCertAuthDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityTlsCertAuthDAL.transaction(async (tx) => {
|
||||||
|
await identityOrgMembershipDAL.updateById(
|
||||||
|
identityMembershipOrg.id,
|
||||||
|
{
|
||||||
|
lastLoginAuthMethod: IdentityAuthMethod.TLS_CERT_AUTH,
|
||||||
|
lastLoginTime: new Date()
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
const newToken = await identityAccessTokenDAL.create(
|
const newToken = await identityAccessTokenDAL.create(
|
||||||
{
|
{
|
||||||
identityId: identityTlsCertAuth.identityId,
|
identityId: identityTlsCertAuth.identityId,
|
||||||
|
@@ -35,7 +35,7 @@ type TIdentityTokenAuthServiceFactoryDep = {
|
|||||||
TIdentityTokenAuthDALFactory,
|
TIdentityTokenAuthDALFactory,
|
||||||
"transaction" | "create" | "findOne" | "updateById" | "delete"
|
"transaction" | "create" | "findOne" | "updateById" | "delete"
|
||||||
>;
|
>;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||||
identityAccessTokenDAL: Pick<
|
identityAccessTokenDAL: Pick<
|
||||||
TIdentityAccessTokenDALFactory,
|
TIdentityAccessTokenDALFactory,
|
||||||
"create" | "find" | "update" | "findById" | "findOne" | "updateById" | "delete"
|
"create" | "find" | "update" | "findById" | "findOne" | "updateById" | "delete"
|
||||||
@@ -345,6 +345,14 @@ export const identityTokenAuthServiceFactory = ({
|
|||||||
const identityTokenAuth = await identityTokenAuthDAL.findOne({ identityId });
|
const identityTokenAuth = await identityTokenAuthDAL.findOne({ identityId });
|
||||||
|
|
||||||
const identityAccessToken = await identityTokenAuthDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityTokenAuthDAL.transaction(async (tx) => {
|
||||||
|
await identityOrgMembershipDAL.updateById(
|
||||||
|
identityMembershipOrg.id,
|
||||||
|
{
|
||||||
|
lastLoginAuthMethod: IdentityAuthMethod.TOKEN_AUTH,
|
||||||
|
lastLoginTime: new Date()
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
const newToken = await identityAccessTokenDAL.create(
|
const newToken = await identityAccessTokenDAL.create(
|
||||||
{
|
{
|
||||||
identityId: identityTokenAuth.identityId,
|
identityId: identityTokenAuth.identityId,
|
||||||
|
@@ -59,6 +59,11 @@ export const identityUaServiceFactory = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityUa.identityId });
|
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityUa.identityId });
|
||||||
|
if (!identityMembershipOrg) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: "No identity with the org membership was found"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
checkIPAgainstBlocklist({
|
checkIPAgainstBlocklist({
|
||||||
ipAddress: ip,
|
ipAddress: ip,
|
||||||
@@ -127,7 +132,14 @@ export const identityUaServiceFactory = ({
|
|||||||
|
|
||||||
const identityAccessToken = await identityUaDAL.transaction(async (tx) => {
|
const identityAccessToken = await identityUaDAL.transaction(async (tx) => {
|
||||||
const uaClientSecretDoc = await identityUaClientSecretDAL.incrementUsage(validClientSecretInfo!.id, tx);
|
const uaClientSecretDoc = await identityUaClientSecretDAL.incrementUsage(validClientSecretInfo!.id, tx);
|
||||||
|
await identityOrgMembershipDAL.updateById(
|
||||||
|
identityMembershipOrg.id,
|
||||||
|
{
|
||||||
|
lastLoginAuthMethod: IdentityAuthMethod.UNIVERSAL_AUTH,
|
||||||
|
lastLoginTime: new Date()
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
const newToken = await identityAccessTokenDAL.create(
|
const newToken = await identityAccessTokenDAL.create(
|
||||||
{
|
{
|
||||||
identityId: identityUa.identityId,
|
identityId: identityUa.identityId,
|
||||||
|
@@ -254,6 +254,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
|||||||
db.ref("role").withSchema("paginatedIdentity"),
|
db.ref("role").withSchema("paginatedIdentity"),
|
||||||
db.ref("roleId").withSchema("paginatedIdentity"),
|
db.ref("roleId").withSchema("paginatedIdentity"),
|
||||||
db.ref("orgId").withSchema("paginatedIdentity"),
|
db.ref("orgId").withSchema("paginatedIdentity"),
|
||||||
|
db.ref("lastLoginAuthMethod").withSchema("paginatedIdentity"),
|
||||||
|
db.ref("lastLoginTime").withSchema("paginatedIdentity"),
|
||||||
db.ref("createdAt").withSchema("paginatedIdentity"),
|
db.ref("createdAt").withSchema("paginatedIdentity"),
|
||||||
db.ref("updatedAt").withSchema("paginatedIdentity"),
|
db.ref("updatedAt").withSchema("paginatedIdentity"),
|
||||||
db.ref("identityId").withSchema("paginatedIdentity").as("identityId"),
|
db.ref("identityId").withSchema("paginatedIdentity").as("identityId"),
|
||||||
@@ -319,7 +321,9 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
|||||||
ldapId,
|
ldapId,
|
||||||
tlsCertId,
|
tlsCertId,
|
||||||
createdAt,
|
createdAt,
|
||||||
updatedAt
|
updatedAt,
|
||||||
|
lastLoginAuthMethod,
|
||||||
|
lastLoginTime
|
||||||
}) => ({
|
}) => ({
|
||||||
role,
|
role,
|
||||||
roleId,
|
roleId,
|
||||||
@@ -328,6 +332,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
|||||||
orgId,
|
orgId,
|
||||||
createdAt,
|
createdAt,
|
||||||
updatedAt,
|
updatedAt,
|
||||||
|
lastLoginAuthMethod,
|
||||||
|
lastLoginTime,
|
||||||
customRole: roleId
|
customRole: roleId
|
||||||
? {
|
? {
|
||||||
id: crId,
|
id: crId,
|
||||||
@@ -497,6 +503,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
|||||||
db.ref("orgId").withSchema(TableName.IdentityOrgMembership),
|
db.ref("orgId").withSchema(TableName.IdentityOrgMembership),
|
||||||
db.ref("createdAt").withSchema(TableName.IdentityOrgMembership),
|
db.ref("createdAt").withSchema(TableName.IdentityOrgMembership),
|
||||||
db.ref("updatedAt").withSchema(TableName.IdentityOrgMembership),
|
db.ref("updatedAt").withSchema(TableName.IdentityOrgMembership),
|
||||||
|
db.ref("lastLoginAuthMethod").withSchema(TableName.IdentityOrgMembership),
|
||||||
|
db.ref("lastLoginTime").withSchema(TableName.IdentityOrgMembership),
|
||||||
db.ref("identityId").withSchema(TableName.IdentityOrgMembership).as("identityId"),
|
db.ref("identityId").withSchema(TableName.IdentityOrgMembership).as("identityId"),
|
||||||
db.ref("name").withSchema(TableName.Identity).as("identityName"),
|
db.ref("name").withSchema(TableName.Identity).as("identityName"),
|
||||||
db.ref("hasDeleteProtection").withSchema(TableName.Identity),
|
db.ref("hasDeleteProtection").withSchema(TableName.Identity),
|
||||||
@@ -531,10 +539,10 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
|||||||
} else if (orderBy === OrgIdentityOrderBy.Role) {
|
} else if (orderBy === OrgIdentityOrderBy.Role) {
|
||||||
void query.orderByRaw(
|
void query.orderByRaw(
|
||||||
`
|
`
|
||||||
CASE
|
CASE
|
||||||
WHEN ??.role = ?
|
WHEN ??.role = ?
|
||||||
THEN ??.slug
|
THEN ??.slug
|
||||||
ELSE ??.role
|
ELSE ??.role
|
||||||
END ?
|
END ?
|
||||||
`,
|
`,
|
||||||
[
|
[
|
||||||
@@ -576,7 +584,9 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
|||||||
tokenId,
|
tokenId,
|
||||||
ldapId,
|
ldapId,
|
||||||
createdAt,
|
createdAt,
|
||||||
updatedAt
|
updatedAt,
|
||||||
|
lastLoginTime,
|
||||||
|
lastLoginAuthMethod
|
||||||
}) => ({
|
}) => ({
|
||||||
role,
|
role,
|
||||||
roleId,
|
roleId,
|
||||||
@@ -586,6 +596,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
|||||||
orgId,
|
orgId,
|
||||||
createdAt,
|
createdAt,
|
||||||
updatedAt,
|
updatedAt,
|
||||||
|
lastLoginTime,
|
||||||
|
lastLoginAuthMethod,
|
||||||
customRole: roleId
|
customRole: roleId
|
||||||
? {
|
? {
|
||||||
id: crId,
|
id: crId,
|
||||||
|
@@ -462,6 +462,54 @@ export const buildTeamsPayload = (notification: TNotification) => {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
case TriggerFeature.ACCESS_REQUEST_UPDATED: {
|
||||||
|
const { payload } = notification;
|
||||||
|
|
||||||
|
const adaptiveCard = {
|
||||||
|
type: "AdaptiveCard",
|
||||||
|
$schema: "http://adaptivecards.io/schemas/adaptive-card.json",
|
||||||
|
version: "1.5",
|
||||||
|
body: [
|
||||||
|
{
|
||||||
|
type: "TextBlock",
|
||||||
|
text: "Updated access approval request pending for review",
|
||||||
|
weight: "Bolder",
|
||||||
|
size: "Large"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "TextBlock",
|
||||||
|
text: `${payload.editorFullName} (${payload.editorEmail}) has updated the ${
|
||||||
|
payload.isTemporary ? "temporary" : "permanent"
|
||||||
|
} access request from ${payload.requesterFullName} (${payload.requesterEmail}) to ${payload.secretPath} in the ${payload.environment} environment of ${payload.projectName}.`,
|
||||||
|
wrap: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "TextBlock",
|
||||||
|
text: `The following permissions are requested: ${payload.permissions.join(", ")}`,
|
||||||
|
wrap: true
|
||||||
|
},
|
||||||
|
payload.editNote
|
||||||
|
? {
|
||||||
|
type: "TextBlock",
|
||||||
|
text: `**Editor Note**: ${payload.editNote}`,
|
||||||
|
wrap: true
|
||||||
|
}
|
||||||
|
: null
|
||||||
|
].filter(Boolean),
|
||||||
|
actions: [
|
||||||
|
{
|
||||||
|
type: "Action.OpenUrl",
|
||||||
|
title: "View request in Infisical",
|
||||||
|
url: payload.approvalUrl
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
adaptiveCard
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
default: {
|
default: {
|
||||||
throw new BadRequestError({
|
throw new BadRequestError({
|
||||||
message: "Teams notification type not supported."
|
message: "Teams notification type not supported."
|
||||||
|
@@ -6,8 +6,6 @@ import { TPermissionServiceFactory } from "@app/ee/services/permission/permissio
|
|||||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
|
|
||||||
import { TProjectDALFactory } from "../project/project-dal";
|
import { TProjectDALFactory } from "../project/project-dal";
|
||||||
import { TProjectBotDALFactory } from "../project-bot/project-bot-dal";
|
|
||||||
import { TProjectKeyDALFactory } from "../project-key/project-key-dal";
|
|
||||||
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
|
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
|
||||||
import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal";
|
import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal";
|
||||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||||
@@ -20,8 +18,6 @@ type TOrgAdminServiceFactoryDep = {
|
|||||||
TProjectMembershipDALFactory,
|
TProjectMembershipDALFactory,
|
||||||
"findOne" | "create" | "transaction" | "delete" | "findAllProjectMembers"
|
"findOne" | "create" | "transaction" | "delete" | "findAllProjectMembers"
|
||||||
>;
|
>;
|
||||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "findLatestProjectKey" | "create">;
|
|
||||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
|
||||||
projectUserMembershipRoleDAL: Pick<TProjectUserMembershipRoleDALFactory, "create" | "delete">;
|
projectUserMembershipRoleDAL: Pick<TProjectUserMembershipRoleDALFactory, "create" | "delete">;
|
||||||
smtpService: Pick<TSmtpService, "sendMail">;
|
smtpService: Pick<TSmtpService, "sendMail">;
|
||||||
};
|
};
|
||||||
@@ -32,8 +28,6 @@ export const orgAdminServiceFactory = ({
|
|||||||
permissionService,
|
permissionService,
|
||||||
projectDAL,
|
projectDAL,
|
||||||
projectMembershipDAL,
|
projectMembershipDAL,
|
||||||
projectKeyDAL,
|
|
||||||
projectBotDAL,
|
|
||||||
projectUserMembershipRoleDAL,
|
projectUserMembershipRoleDAL,
|
||||||
smtpService
|
smtpService
|
||||||
}: TOrgAdminServiceFactoryDep) => {
|
}: TOrgAdminServiceFactoryDep) => {
|
||||||
@@ -119,28 +113,6 @@ export const orgAdminServiceFactory = ({
|
|||||||
return { isExistingMember: true, membership: projectMembership };
|
return { isExistingMember: true, membership: projectMembership };
|
||||||
}
|
}
|
||||||
|
|
||||||
// missing membership thus add admin back as admin to project
|
|
||||||
const ghostUser = await projectDAL.findProjectGhostUser(projectId);
|
|
||||||
if (!ghostUser) {
|
|
||||||
throw new NotFoundError({
|
|
||||||
message: `Project owner of project with ID '${projectId}' not found`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, projectId);
|
|
||||||
if (!ghostUserLatestKey) {
|
|
||||||
throw new NotFoundError({
|
|
||||||
message: `Project owner's latest key of project with ID '${projectId}' not found`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const bot = await projectBotDAL.findOne({ projectId });
|
|
||||||
if (!bot) {
|
|
||||||
throw new NotFoundError({
|
|
||||||
message: `Project bot for project with ID '${projectId}' not found`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const updatedMembership = await projectMembershipDAL.transaction(async (tx) => {
|
const updatedMembership = await projectMembershipDAL.transaction(async (tx) => {
|
||||||
const newProjectMembership = await projectMembershipDAL.create(
|
const newProjectMembership = await projectMembershipDAL.create(
|
||||||
{
|
{
|
||||||
|
@@ -32,6 +32,8 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
|
|||||||
db.ref("roleId").withSchema(TableName.OrgMembership),
|
db.ref("roleId").withSchema(TableName.OrgMembership),
|
||||||
db.ref("status").withSchema(TableName.OrgMembership),
|
db.ref("status").withSchema(TableName.OrgMembership),
|
||||||
db.ref("isActive").withSchema(TableName.OrgMembership),
|
db.ref("isActive").withSchema(TableName.OrgMembership),
|
||||||
|
db.ref("lastLoginAuthMethod").withSchema(TableName.OrgMembership),
|
||||||
|
db.ref("lastLoginTime").withSchema(TableName.OrgMembership),
|
||||||
db.ref("email").withSchema(TableName.Users),
|
db.ref("email").withSchema(TableName.Users),
|
||||||
db.ref("username").withSchema(TableName.Users),
|
db.ref("username").withSchema(TableName.Users),
|
||||||
db.ref("firstName").withSchema(TableName.Users),
|
db.ref("firstName").withSchema(TableName.Users),
|
||||||
@@ -64,7 +66,9 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
|
|||||||
role,
|
role,
|
||||||
status,
|
status,
|
||||||
isActive,
|
isActive,
|
||||||
inviteEmail
|
inviteEmail,
|
||||||
|
lastLoginAuthMethod,
|
||||||
|
lastLoginTime
|
||||||
}) => ({
|
}) => ({
|
||||||
roleId,
|
roleId,
|
||||||
orgId,
|
orgId,
|
||||||
@@ -73,6 +77,8 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
|
|||||||
status,
|
status,
|
||||||
isActive,
|
isActive,
|
||||||
inviteEmail,
|
inviteEmail,
|
||||||
|
lastLoginAuthMethod,
|
||||||
|
lastLoginTime,
|
||||||
user: {
|
user: {
|
||||||
id: userId,
|
id: userId,
|
||||||
email,
|
email,
|
||||||
|
@@ -285,6 +285,8 @@ export const orgDALFactory = (db: TDbClient) => {
|
|||||||
db.ref("roleId").withSchema(TableName.OrgMembership),
|
db.ref("roleId").withSchema(TableName.OrgMembership),
|
||||||
db.ref("status").withSchema(TableName.OrgMembership),
|
db.ref("status").withSchema(TableName.OrgMembership),
|
||||||
db.ref("isActive").withSchema(TableName.OrgMembership),
|
db.ref("isActive").withSchema(TableName.OrgMembership),
|
||||||
|
db.ref("lastLoginAuthMethod").withSchema(TableName.OrgMembership),
|
||||||
|
db.ref("lastLoginTime").withSchema(TableName.OrgMembership),
|
||||||
db.ref("email").withSchema(TableName.Users),
|
db.ref("email").withSchema(TableName.Users),
|
||||||
db.ref("isEmailVerified").withSchema(TableName.Users),
|
db.ref("isEmailVerified").withSchema(TableName.Users),
|
||||||
db.ref("username").withSchema(TableName.Users),
|
db.ref("username").withSchema(TableName.Users),
|
||||||
@@ -628,6 +630,25 @@ export const orgDALFactory = (db: TDbClient) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const findIdentityOrganization = async (
|
||||||
|
identityId: string
|
||||||
|
): Promise<{ id: string; name: string; slug: string; role: string }> => {
|
||||||
|
try {
|
||||||
|
const org = await db
|
||||||
|
.replicaNode()(TableName.IdentityOrgMembership)
|
||||||
|
.where({ identityId })
|
||||||
|
.join(TableName.Organization, `${TableName.IdentityOrgMembership}.orgId`, `${TableName.Organization}.id`)
|
||||||
|
.select(db.ref("id").withSchema(TableName.Organization).as("id"))
|
||||||
|
.select(db.ref("name").withSchema(TableName.Organization).as("name"))
|
||||||
|
.select(db.ref("slug").withSchema(TableName.Organization).as("slug"))
|
||||||
|
.select(db.ref("role").withSchema(TableName.IdentityOrgMembership).as("role"));
|
||||||
|
|
||||||
|
return org?.[0];
|
||||||
|
} catch (error) {
|
||||||
|
throw new DatabaseError({ error, name: "Find identity organization" });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return withTransaction(db, {
|
return withTransaction(db, {
|
||||||
...orgOrm,
|
...orgOrm,
|
||||||
findOrgByProjectId,
|
findOrgByProjectId,
|
||||||
@@ -650,6 +671,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
|||||||
updateMembershipById,
|
updateMembershipById,
|
||||||
deleteMembershipById,
|
deleteMembershipById,
|
||||||
deleteMembershipsById,
|
deleteMembershipsById,
|
||||||
updateMembership
|
updateMembership,
|
||||||
|
findIdentityOrganization
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
@@ -198,6 +198,15 @@ export const orgServiceFactory = ({
|
|||||||
// Filter out orgs where the membership object is an invitation
|
// Filter out orgs where the membership object is an invitation
|
||||||
return orgs.filter((org) => org.userStatus !== "invited");
|
return orgs.filter((org) => org.userStatus !== "invited");
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Get all organization an identity is part of
|
||||||
|
* */
|
||||||
|
const findIdentityOrganization = async (identityId: string) => {
|
||||||
|
const org = await orgDAL.findIdentityOrganization(identityId);
|
||||||
|
|
||||||
|
return org;
|
||||||
|
};
|
||||||
/*
|
/*
|
||||||
* Get all workspace members
|
* Get all workspace members
|
||||||
* */
|
* */
|
||||||
@@ -1403,6 +1412,7 @@ export const orgServiceFactory = ({
|
|||||||
findOrganizationById,
|
findOrganizationById,
|
||||||
findAllOrgMembers,
|
findAllOrgMembers,
|
||||||
findAllOrganizationOfUser,
|
findAllOrganizationOfUser,
|
||||||
|
findIdentityOrganization,
|
||||||
inviteUserToOrganization,
|
inviteUserToOrganization,
|
||||||
verifyUserToOrg,
|
verifyUserToOrg,
|
||||||
updateOrg,
|
updateOrg,
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user