mirror of
https://github.com/Infisical/infisical.git
synced 2025-08-16 20:48:26 +00:00
Compare commits
154 Commits
audit-log-
...
aws-parame
Author | SHA1 | Date | |
---|---|---|---|
|
d24f5a57a8 | ||
|
a7847f177c | ||
|
48e5f550e9 | ||
|
f352f98374 | ||
|
ea4bb0a062 | ||
|
3d6be7b1b2 | ||
|
12558e8614 | ||
|
987f87e562 | ||
|
4d06d5cbb0 | ||
|
bad934de48 | ||
|
90b93fbd15 | ||
|
b0d24de008 | ||
|
4ccb5dc9b0 | ||
|
930425d5dc | ||
|
ca2825ba95 | ||
|
b8fa4d5255 | ||
|
0d3cb2d41a | ||
|
e0d19d7b65 | ||
|
f5a0d8be78 | ||
|
c7ae7be493 | ||
|
fa54c406dc | ||
|
1a2eef3ba6 | ||
|
0c562150f5 | ||
|
6fde132804 | ||
|
799721782a | ||
|
86d430f911 | ||
|
7c28ee844e | ||
|
d5390fcafc | ||
|
1b40f5d475 | ||
|
3cec1b4021 | ||
|
97b2c534a7 | ||
|
d71362ccc3 | ||
|
e4d90eb055 | ||
|
f16dca45d9 | ||
|
118c28df54 | ||
|
249b2933da | ||
|
272336092d | ||
|
6f05a6d82c | ||
|
84ebdb8503 | ||
|
b464941fbc | ||
|
77e8d8a86d | ||
|
c61dd1ee6e | ||
|
9db8573e72 | ||
|
ce8653e908 | ||
|
fd4cdc2769 | ||
|
90a1cc9330 | ||
|
78bfd0922a | ||
|
458dcd31c1 | ||
|
372537f0b6 | ||
|
e173ff3828 | ||
|
2baadf60d1 | ||
|
e13fc93bac | ||
|
6b14fbcce2 | ||
|
86fbe5cc24 | ||
|
3f7862a345 | ||
|
9661458469 | ||
|
c7c1eb0f5f | ||
|
a1e48a1795 | ||
|
d14e80b771 | ||
|
0264d37d9b | ||
|
11a1604e14 | ||
|
f788dee398 | ||
|
88120ed45e | ||
|
d6a377416d | ||
|
dbbd58ffb7 | ||
|
5d2beb3604 | ||
|
ec65e0e29c | ||
|
b819848058 | ||
|
1b0ef540fe | ||
|
4496241002 | ||
|
52e32484ce | ||
|
8b497699d4 | ||
|
be73f62226 | ||
|
102620ff09 | ||
|
994ee88852 | ||
|
770e25b895 | ||
|
fcf3bdb440 | ||
|
89c11b5541 | ||
|
5f764904e2 | ||
|
1a75384dba | ||
|
50f434cd80 | ||
|
d879cfd90c | ||
|
ca1f5eaca3 | ||
|
04086376ea | ||
|
364027a88a | ||
|
ca110d11b0 | ||
|
4e8f404f16 | ||
|
22abb78f48 | ||
|
24f11406e1 | ||
|
d5d67c82b2 | ||
|
35cfcf1f0f | ||
|
368e00ea71 | ||
|
2c8cfeb826 | ||
|
23237dd055 | ||
|
70d22f90ec | ||
|
e10aec3170 | ||
|
0b11dcd627 | ||
|
d88a473b47 | ||
|
4f52400887 | ||
|
34eb9f475a | ||
|
902a0b0c56 | ||
|
d1e8ae3c98 | ||
|
5c9243d691 | ||
|
35d1eabf49 | ||
|
b6902160ce | ||
|
fbfc51ee93 | ||
|
9e6294786f | ||
|
9d92ffce95 | ||
|
9193418f8b | ||
|
847c50d2d4 | ||
|
efa043c3d2 | ||
|
7e94791635 | ||
|
eedc5f533e | ||
|
fc5d42baf0 | ||
|
b95c35620a | ||
|
fa867e5068 | ||
|
8851faec65 | ||
|
47fb666dc7 | ||
|
569edd2852 | ||
|
676ebaf3c2 | ||
|
adb3185042 | ||
|
8da0a4d846 | ||
|
eebf080e3c | ||
|
97be31f11e | ||
|
667cceebc0 | ||
|
1ad02e2da6 | ||
|
93445d96b3 | ||
|
e105a5f7da | ||
|
72b80e1fd7 | ||
|
6429adfaf6 | ||
|
50e40e8bcf | ||
|
6100086338 | ||
|
000dd6c223 | ||
|
389e2e1fb7 | ||
|
88fcbcadd4 | ||
|
60dc1d1e00 | ||
|
2d68f9aa16 | ||
|
e694293ebe | ||
|
ef6f5ecc4b | ||
|
56f5249925 | ||
|
df5b3fa8dc | ||
|
035ac0fe8d | ||
|
c12408eb81 | ||
|
13194296c6 | ||
|
be20a507ac | ||
|
63cf36c722 | ||
|
4dcd3ed06c | ||
|
1b32de5c5b | ||
|
522795871e | ||
|
5c63955fde | ||
|
d7f3892b73 | ||
|
33af2fb2b8 | ||
|
c568f40954 | ||
|
28f87b8b27 |
123
.github/workflows/build-docker-image-to-prod.yml
vendored
123
.github/workflows/build-docker-image-to-prod.yml
vendored
@@ -1,123 +0,0 @@
|
||||
name: Release production images (frontend, backend)
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*"
|
||||
- "!infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
backend-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
# - name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
load: true
|
||||
context: backend
|
||||
tags: infisical/infisical:test
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: ⏻ Spawn backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
||||
- name: 🧪 Test backend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-backend-test
|
||||
- name: ⏻ Shut down backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml down
|
||||
- name: 🏗️ Build backend and push
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: backend
|
||||
tags: |
|
||||
infisical/backend:${{ steps.commit.outputs.short }}
|
||||
infisical/backend:latest
|
||||
infisical/backend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
frontend-image:
|
||||
name: Build frontend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build frontend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
load: true
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
project: 64mmf0n610
|
||||
context: frontend
|
||||
tags: infisical/frontend:test
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
- name: ⏻ Spawn frontend container
|
||||
run: |
|
||||
docker run -d --rm --name infisical-frontend-test infisical/frontend:test
|
||||
- name: 🧪 Test frontend image
|
||||
run: |
|
||||
./.github/resources/healthcheck.sh infisical-frontend-test
|
||||
- name: ⏻ Shut down frontend container
|
||||
run: |
|
||||
docker stop infisical-frontend-test
|
||||
- name: 🏗️ Build frontend and push
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
push: true
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
context: frontend
|
||||
tags: |
|
||||
infisical/frontend:${{ steps.commit.outputs.short }}
|
||||
infisical/frontend:latest
|
||||
infisical/frontend:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
82
.github/workflows/nightly-tag-generation.yml
vendored
Normal file
82
.github/workflows/nightly-tag-generation.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
name: Generate Nightly Tag
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # Run daily at midnight UTC
|
||||
workflow_dispatch: # Allow manual triggering for testing
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
create-nightly-tag:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Fetch all history for tags
|
||||
token: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
- name: Generate nightly tag
|
||||
run: |
|
||||
# Get the latest infisical production tag
|
||||
LATEST_STABLE_TAG=$(git tag --list | grep "^v[0-9].*$" | grep -v "nightly" | sort -V | tail -n1)
|
||||
|
||||
if [ -z "$LATEST_STABLE_TAG" ]; then
|
||||
echo "No infisical production tags found, using v0.1.0"
|
||||
LATEST_STABLE_TAG="v0.1.0"
|
||||
fi
|
||||
|
||||
echo "Latest production tag: $LATEST_STABLE_TAG"
|
||||
|
||||
# Get current date in YYYYMMDD format
|
||||
DATE=$(date +%Y%m%d)
|
||||
|
||||
# Base nightly tag name
|
||||
BASE_TAG="${LATEST_STABLE_TAG}-nightly-${DATE}"
|
||||
|
||||
# Check if this exact tag already exists
|
||||
if git tag --list | grep -q "^${BASE_TAG}$"; then
|
||||
echo "Base tag ${BASE_TAG} already exists, finding next increment"
|
||||
|
||||
# Find existing tags for this date and get the highest increment
|
||||
EXISTING_TAGS=$(git tag --list | grep "^${BASE_TAG}" | grep -E '\.[0-9]+$' || true)
|
||||
|
||||
if [ -z "$EXISTING_TAGS" ]; then
|
||||
# No incremental tags exist, create .1
|
||||
NIGHTLY_TAG="${BASE_TAG}.1"
|
||||
else
|
||||
# Find the highest increment
|
||||
HIGHEST_INCREMENT=$(echo "$EXISTING_TAGS" | sed "s|^${BASE_TAG}\.||" | sort -n | tail -n1)
|
||||
NEXT_INCREMENT=$((HIGHEST_INCREMENT + 1))
|
||||
NIGHTLY_TAG="${BASE_TAG}.${NEXT_INCREMENT}"
|
||||
fi
|
||||
else
|
||||
# Base tag doesn't exist, use it
|
||||
NIGHTLY_TAG="$BASE_TAG"
|
||||
fi
|
||||
|
||||
echo "Generated nightly tag: $NIGHTLY_TAG"
|
||||
echo "NIGHTLY_TAG=$NIGHTLY_TAG" >> $GITHUB_ENV
|
||||
echo "LATEST_PRODUCTION_TAG=$LATEST_STABLE_TAG" >> $GITHUB_ENV
|
||||
|
||||
git tag "$NIGHTLY_TAG"
|
||||
git push origin "$NIGHTLY_TAG"
|
||||
echo "✅ Created and pushed nightly tag: $NIGHTLY_TAG"
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
tag_name: ${{ env.NIGHTLY_TAG }}
|
||||
name: ${{ env.NIGHTLY_TAG }}
|
||||
draft: false
|
||||
prerelease: true
|
||||
generate_release_notes: true
|
||||
make_latest: false
|
@@ -2,7 +2,9 @@ name: Release standalone docker image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
- "v*.*.*"
|
||||
- "v*.*.*-nightly-*"
|
||||
- "v*.*.*-nightly-*.*"
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
@@ -17,7 +19,7 @@ jobs:
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
@@ -53,7 +55,7 @@ jobs:
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest-postgres
|
||||
infisical/infisical:latest
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
@@ -69,7 +71,7 @@ jobs:
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
@@ -105,7 +107,7 @@ jobs:
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical-fips:latest-postgres
|
||||
infisical/infisical-fips:latest
|
||||
infisical/infisical-fips:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
@@ -44,10 +44,7 @@ jobs:
|
||||
|
||||
- name: Generate Helm Chart
|
||||
working-directory: k8-operator
|
||||
run: make helm
|
||||
|
||||
- name: Update Helm Chart Version
|
||||
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
|
||||
run: make helm VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Debug - Check file changes
|
||||
run: |
|
||||
|
15
.github/workflows/run-backend-tests.yml
vendored
15
.github/workflows/run-backend-tests.yml
vendored
@@ -16,6 +16,16 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
|
||||
|
||||
- name: Free up disk space
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf "/usr/local/share/boost"
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
docker system prune -af
|
||||
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
@@ -34,6 +44,8 @@ jobs:
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start Secret Rotation testing databases
|
||||
run: docker compose -f docker-compose.e2e-dbs.yml up -d --wait --wait-timeout 300
|
||||
- name: Run unit test
|
||||
run: npm run test:unit
|
||||
working-directory: backend
|
||||
@@ -41,6 +53,9 @@ jobs:
|
||||
run: npm run test:e2e
|
||||
working-directory: backend
|
||||
env:
|
||||
E2E_TEST_ORACLE_DB_19_HOST: ${{ secrets.E2E_TEST_ORACLE_DB_19_HOST }}
|
||||
E2E_TEST_ORACLE_DB_19_USERNAME: ${{ secrets.E2E_TEST_ORACLE_DB_19_USERNAME }}
|
||||
E2E_TEST_ORACLE_DB_19_PASSWORD: ${{ secrets.E2E_TEST_ORACLE_DB_19_PASSWORD }}
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
AUTH_SECRET: something-random
|
||||
|
@@ -50,3 +50,4 @@ docs/integrations/app-connections/zabbix.mdx:generic-api-key:91
|
||||
docs/integrations/app-connections/bitbucket.mdx:generic-api-key:123
|
||||
docs/integrations/app-connections/railway.mdx:generic-api-key:156
|
||||
.github/workflows/validate-db-schemas.yml:generic-api-key:21
|
||||
k8-operator/config/samples/universalAuthIdentitySecret.yaml:generic-api-key:8
|
||||
|
@@ -1,34 +0,0 @@
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
|
||||
export const mockQueue = (): TQueueServiceFactory => {
|
||||
const queues: Record<string, unknown> = {};
|
||||
const workers: Record<string, unknown> = {};
|
||||
const job: Record<string, unknown> = {};
|
||||
const events: Record<string, unknown> = {};
|
||||
|
||||
return {
|
||||
queue: async (name, jobData) => {
|
||||
job[name] = jobData;
|
||||
},
|
||||
queuePg: async () => {},
|
||||
schedulePg: async () => {},
|
||||
initialize: async () => {},
|
||||
shutdown: async () => undefined,
|
||||
stopRepeatableJob: async () => true,
|
||||
start: (name, jobFn) => {
|
||||
queues[name] = jobFn;
|
||||
workers[name] = jobFn;
|
||||
},
|
||||
startPg: async () => {},
|
||||
listen: (name, event) => {
|
||||
events[name] = event;
|
||||
},
|
||||
getRepeatableJobs: async () => [],
|
||||
getDelayedJobs: async () => [],
|
||||
clearQueue: async () => {},
|
||||
stopJobById: async () => {},
|
||||
stopJobByIdPg: async () => {},
|
||||
stopRepeatableJobByJobId: async () => true,
|
||||
stopRepeatableJobByKey: async () => true
|
||||
};
|
||||
};
|
726
backend/e2e-test/routes/v3/secret-rotations.spec.ts
Normal file
726
backend/e2e-test/routes/v3/secret-rotations.spec.ts
Normal file
@@ -0,0 +1,726 @@
|
||||
/* eslint-disable no-promise-executor-return */
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import knex from "knex";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
enum SecretRotationType {
|
||||
OracleDb = "oracledb",
|
||||
MySQL = "mysql",
|
||||
Postgres = "postgres"
|
||||
}
|
||||
|
||||
type TGenericSqlCredentials = {
|
||||
host: string;
|
||||
port: number;
|
||||
username: string;
|
||||
password: string;
|
||||
database: string;
|
||||
};
|
||||
|
||||
type TSecretMapping = {
|
||||
username: string;
|
||||
password: string;
|
||||
};
|
||||
|
||||
type TDatabaseUserCredentials = {
|
||||
username: string;
|
||||
};
|
||||
|
||||
const formatSqlUsername = (username: string) => `${username}_${uuidv4().slice(0, 8).replace(/-/g, "").toUpperCase()}`;
|
||||
|
||||
const getSecretValue = async (secretKey: string) => {
|
||||
const passwordSecret = await testServer.inject({
|
||||
url: `/api/v3/secrets/raw/${secretKey}`,
|
||||
method: "GET",
|
||||
query: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
|
||||
expect(passwordSecret.statusCode).toBe(200);
|
||||
expect(passwordSecret.json().secret).toBeDefined();
|
||||
|
||||
const passwordSecretJson = JSON.parse(passwordSecret.payload);
|
||||
|
||||
return passwordSecretJson.secret.secretValue as string;
|
||||
};
|
||||
|
||||
const deleteSecretRotation = async (id: string, type: SecretRotationType) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
query: {
|
||||
deleteSecrets: "true",
|
||||
revokeGeneratedCredentials: "true"
|
||||
},
|
||||
url: `/api/v2/secret-rotations/${type}-credentials/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
};
|
||||
|
||||
const deleteAppConnection = async (id: string, type: SecretRotationType) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/app-connections/${type}/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
};
|
||||
|
||||
const createOracleDBAppConnection = async (credentials: TGenericSqlCredentials) => {
|
||||
const createOracleDBAppConnectionReqBody = {
|
||||
credentials: {
|
||||
database: credentials.database,
|
||||
host: credentials.host,
|
||||
username: credentials.username,
|
||||
password: credentials.password,
|
||||
port: credentials.port,
|
||||
sslEnabled: true,
|
||||
sslRejectUnauthorized: true
|
||||
},
|
||||
name: `oracle-db-${uuidv4()}`,
|
||||
description: "Test OracleDB App Connection",
|
||||
gatewayId: null,
|
||||
isPlatformManagedCredentials: false,
|
||||
method: "username-and-password"
|
||||
};
|
||||
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/app-connections/oracledb`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: createOracleDBAppConnectionReqBody
|
||||
});
|
||||
|
||||
const json = JSON.parse(res.payload);
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(json.appConnection).toBeDefined();
|
||||
|
||||
return json.appConnection.id as string;
|
||||
};
|
||||
|
||||
const createMySQLAppConnection = async (credentials: TGenericSqlCredentials) => {
|
||||
const createMySQLAppConnectionReqBody = {
|
||||
name: `mysql-test-${uuidv4()}`,
|
||||
description: "test-mysql",
|
||||
gatewayId: null,
|
||||
method: "username-and-password",
|
||||
credentials: {
|
||||
host: credentials.host,
|
||||
port: credentials.port,
|
||||
database: credentials.database,
|
||||
username: credentials.username,
|
||||
password: credentials.password,
|
||||
sslEnabled: false,
|
||||
sslRejectUnauthorized: true
|
||||
}
|
||||
};
|
||||
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/app-connections/mysql`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: createMySQLAppConnectionReqBody
|
||||
});
|
||||
|
||||
const json = JSON.parse(res.payload);
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(json.appConnection).toBeDefined();
|
||||
|
||||
return json.appConnection.id as string;
|
||||
};
|
||||
|
||||
const createPostgresAppConnection = async (credentials: TGenericSqlCredentials) => {
|
||||
const createPostgresAppConnectionReqBody = {
|
||||
credentials: {
|
||||
host: credentials.host,
|
||||
port: credentials.port,
|
||||
database: credentials.database,
|
||||
username: credentials.username,
|
||||
password: credentials.password,
|
||||
sslEnabled: false,
|
||||
sslRejectUnauthorized: true
|
||||
},
|
||||
name: `postgres-test-${uuidv4()}`,
|
||||
description: "test-postgres",
|
||||
gatewayId: null,
|
||||
method: "username-and-password"
|
||||
};
|
||||
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/app-connections/postgres`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: createPostgresAppConnectionReqBody
|
||||
});
|
||||
|
||||
const json = JSON.parse(res.payload);
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(json.appConnection).toBeDefined();
|
||||
|
||||
return json.appConnection.id as string;
|
||||
};
|
||||
|
||||
const createOracleInfisicalUsers = async (
|
||||
credentials: TGenericSqlCredentials,
|
||||
userCredentials: TDatabaseUserCredentials[]
|
||||
) => {
|
||||
const client = knex({
|
||||
client: "oracledb",
|
||||
connection: {
|
||||
database: credentials.database,
|
||||
port: credentials.port,
|
||||
host: credentials.host,
|
||||
user: credentials.username,
|
||||
password: credentials.password,
|
||||
connectionTimeoutMillis: 10000,
|
||||
ssl: {
|
||||
// @ts-expect-error - this is a valid property for the ssl object
|
||||
sslServerDNMatch: true
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
for await (const { username } of userCredentials) {
|
||||
// check if user exists, and if it does, don't create it
|
||||
const existingUser = await client.raw(`SELECT * FROM all_users WHERE username = '${username}'`);
|
||||
|
||||
if (!existingUser.length) {
|
||||
await client.raw(`CREATE USER ${username} IDENTIFIED BY "temporary_password"`);
|
||||
}
|
||||
await client.raw(`GRANT ALL PRIVILEGES TO ${username} WITH ADMIN OPTION`);
|
||||
}
|
||||
|
||||
await client.destroy();
|
||||
};
|
||||
|
||||
const createMySQLInfisicalUsers = async (
|
||||
credentials: TGenericSqlCredentials,
|
||||
userCredentials: TDatabaseUserCredentials[]
|
||||
) => {
|
||||
const client = knex({
|
||||
client: "mysql2",
|
||||
connection: {
|
||||
database: credentials.database,
|
||||
port: credentials.port,
|
||||
host: credentials.host,
|
||||
user: credentials.username,
|
||||
password: credentials.password,
|
||||
connectionTimeoutMillis: 10000
|
||||
}
|
||||
});
|
||||
|
||||
// Fix: Ensure root has GRANT OPTION privileges
|
||||
try {
|
||||
await client.raw("GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' WITH GRANT OPTION;");
|
||||
await client.raw("FLUSH PRIVILEGES;");
|
||||
} catch (error) {
|
||||
// Ignore if already has privileges
|
||||
}
|
||||
|
||||
for await (const { username } of userCredentials) {
|
||||
// check if user exists, and if it does, dont create it
|
||||
|
||||
const existingUser = await client.raw(`SELECT * FROM mysql.user WHERE user = '${username}'`);
|
||||
|
||||
if (!existingUser[0].length) {
|
||||
await client.raw(`CREATE USER '${username}'@'%' IDENTIFIED BY 'temporary_password';`);
|
||||
}
|
||||
|
||||
await client.raw(`GRANT ALL PRIVILEGES ON \`${credentials.database}\`.* TO '${username}'@'%';`);
|
||||
await client.raw("FLUSH PRIVILEGES;");
|
||||
}
|
||||
|
||||
await client.destroy();
|
||||
};
|
||||
|
||||
const createPostgresInfisicalUsers = async (
|
||||
credentials: TGenericSqlCredentials,
|
||||
userCredentials: TDatabaseUserCredentials[]
|
||||
) => {
|
||||
const client = knex({
|
||||
client: "pg",
|
||||
connection: {
|
||||
database: credentials.database,
|
||||
port: credentials.port,
|
||||
host: credentials.host,
|
||||
user: credentials.username,
|
||||
password: credentials.password,
|
||||
connectionTimeoutMillis: 10000
|
||||
}
|
||||
});
|
||||
|
||||
for await (const { username } of userCredentials) {
|
||||
// check if user exists, and if it does, don't create it
|
||||
const existingUser = await client.raw("SELECT * FROM pg_catalog.pg_user WHERE usename = ?", [username]);
|
||||
|
||||
if (!existingUser.rows.length) {
|
||||
await client.raw(`CREATE USER "${username}" WITH PASSWORD 'temporary_password'`);
|
||||
}
|
||||
|
||||
await client.raw("GRANT ALL PRIVILEGES ON DATABASE ?? TO ??", [credentials.database, username]);
|
||||
}
|
||||
|
||||
await client.destroy();
|
||||
};
|
||||
|
||||
const createOracleDBSecretRotation = async (
|
||||
appConnectionId: string,
|
||||
credentials: TGenericSqlCredentials,
|
||||
userCredentials: TDatabaseUserCredentials[],
|
||||
secretMapping: TSecretMapping
|
||||
) => {
|
||||
const now = new Date();
|
||||
const rotationTime = new Date(now.getTime() - 2 * 60 * 1000); // 2 minutes ago
|
||||
|
||||
await createOracleInfisicalUsers(credentials, userCredentials);
|
||||
|
||||
const createOracleDBSecretRotationReqBody = {
|
||||
parameters: userCredentials.reduce(
|
||||
(acc, user, index) => {
|
||||
acc[`username${index + 1}`] = user.username;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, string>
|
||||
),
|
||||
secretsMapping: {
|
||||
username: secretMapping.username,
|
||||
password: secretMapping.password
|
||||
},
|
||||
name: `test-oracle-${uuidv4()}`,
|
||||
description: "Test OracleDB Secret Rotation",
|
||||
secretPath: "/",
|
||||
isAutoRotationEnabled: true,
|
||||
rotationInterval: 5, // 5 seconds for testing
|
||||
rotateAtUtc: {
|
||||
hours: rotationTime.getUTCHours(),
|
||||
minutes: rotationTime.getUTCMinutes()
|
||||
},
|
||||
connectionId: appConnectionId,
|
||||
environment: seedData1.environment.slug,
|
||||
projectId: seedData1.projectV3.id
|
||||
};
|
||||
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v2/secret-rotations/oracledb-credentials`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: createOracleDBSecretRotationReqBody
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.json().secretRotation).toBeDefined();
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
const createMySQLSecretRotation = async (
|
||||
appConnectionId: string,
|
||||
credentials: TGenericSqlCredentials,
|
||||
userCredentials: TDatabaseUserCredentials[],
|
||||
secretMapping: TSecretMapping
|
||||
) => {
|
||||
const now = new Date();
|
||||
const rotationTime = new Date(now.getTime() - 2 * 60 * 1000); // 2 minutes ago
|
||||
|
||||
await createMySQLInfisicalUsers(credentials, userCredentials);
|
||||
|
||||
const createMySQLSecretRotationReqBody = {
|
||||
parameters: userCredentials.reduce(
|
||||
(acc, user, index) => {
|
||||
acc[`username${index + 1}`] = user.username;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, string>
|
||||
),
|
||||
secretsMapping: {
|
||||
username: secretMapping.username,
|
||||
password: secretMapping.password
|
||||
},
|
||||
name: `test-mysql-rotation-${uuidv4()}`,
|
||||
description: "Test MySQL Secret Rotation",
|
||||
secretPath: "/",
|
||||
isAutoRotationEnabled: true,
|
||||
rotationInterval: 5,
|
||||
rotateAtUtc: {
|
||||
hours: rotationTime.getUTCHours(),
|
||||
minutes: rotationTime.getUTCMinutes()
|
||||
},
|
||||
connectionId: appConnectionId,
|
||||
environment: seedData1.environment.slug,
|
||||
projectId: seedData1.projectV3.id
|
||||
};
|
||||
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v2/secret-rotations/mysql-credentials`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: createMySQLSecretRotationReqBody
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.json().secretRotation).toBeDefined();
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
const createPostgresSecretRotation = async (
|
||||
appConnectionId: string,
|
||||
credentials: TGenericSqlCredentials,
|
||||
userCredentials: TDatabaseUserCredentials[],
|
||||
secretMapping: TSecretMapping
|
||||
) => {
|
||||
const now = new Date();
|
||||
const rotationTime = new Date(now.getTime() - 2 * 60 * 1000); // 2 minutes ago
|
||||
|
||||
await createPostgresInfisicalUsers(credentials, userCredentials);
|
||||
|
||||
const createPostgresSecretRotationReqBody = {
|
||||
parameters: userCredentials.reduce(
|
||||
(acc, user, index) => {
|
||||
acc[`username${index + 1}`] = user.username;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, string>
|
||||
),
|
||||
secretsMapping: {
|
||||
username: secretMapping.username,
|
||||
password: secretMapping.password
|
||||
},
|
||||
name: `test-postgres-rotation-${uuidv4()}`,
|
||||
description: "Test Postgres Secret Rotation",
|
||||
secretPath: "/",
|
||||
isAutoRotationEnabled: true,
|
||||
rotationInterval: 5,
|
||||
rotateAtUtc: {
|
||||
hours: rotationTime.getUTCHours(),
|
||||
minutes: rotationTime.getUTCMinutes()
|
||||
},
|
||||
connectionId: appConnectionId,
|
||||
environment: seedData1.environment.slug,
|
||||
projectId: seedData1.projectV3.id
|
||||
};
|
||||
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v2/secret-rotations/postgres-credentials`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: createPostgresSecretRotationReqBody
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.json().secretRotation).toBeDefined();
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
describe("Secret Rotations", async () => {
|
||||
const testCases = [
|
||||
{
|
||||
type: SecretRotationType.MySQL,
|
||||
name: "MySQL (8.4.6) Secret Rotation",
|
||||
dbCredentials: {
|
||||
database: "mysql-test",
|
||||
host: "127.0.0.1",
|
||||
username: "root",
|
||||
password: "mysql-test",
|
||||
port: 3306
|
||||
},
|
||||
secretMapping: {
|
||||
username: formatSqlUsername("MYSQL_USERNAME"),
|
||||
password: formatSqlUsername("MYSQL_PASSWORD")
|
||||
},
|
||||
userCredentials: [
|
||||
{
|
||||
username: formatSqlUsername("MYSQL_USER_1")
|
||||
},
|
||||
{
|
||||
username: formatSqlUsername("MYSQL_USER_2")
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: SecretRotationType.MySQL,
|
||||
name: "MySQL (8.0.29) Secret Rotation",
|
||||
dbCredentials: {
|
||||
database: "mysql-test",
|
||||
host: "127.0.0.1",
|
||||
username: "root",
|
||||
password: "mysql-test",
|
||||
port: 3307
|
||||
},
|
||||
secretMapping: {
|
||||
username: formatSqlUsername("MYSQL_USERNAME"),
|
||||
password: formatSqlUsername("MYSQL_PASSWORD")
|
||||
},
|
||||
userCredentials: [
|
||||
{
|
||||
username: formatSqlUsername("MYSQL_USER_1")
|
||||
},
|
||||
{
|
||||
username: formatSqlUsername("MYSQL_USER_2")
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: SecretRotationType.MySQL,
|
||||
name: "MySQL (5.7.31) Secret Rotation",
|
||||
dbCredentials: {
|
||||
database: "mysql-test",
|
||||
host: "127.0.0.1",
|
||||
username: "root",
|
||||
password: "mysql-test",
|
||||
port: 3308
|
||||
},
|
||||
secretMapping: {
|
||||
username: formatSqlUsername("MYSQL_USERNAME"),
|
||||
password: formatSqlUsername("MYSQL_PASSWORD")
|
||||
},
|
||||
userCredentials: [
|
||||
{
|
||||
username: formatSqlUsername("MYSQL_USER_1")
|
||||
},
|
||||
{
|
||||
username: formatSqlUsername("MYSQL_USER_2")
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: SecretRotationType.OracleDb,
|
||||
name: "OracleDB (23.8) Secret Rotation",
|
||||
dbCredentials: {
|
||||
database: "FREEPDB1",
|
||||
host: "127.0.0.1",
|
||||
username: "system",
|
||||
password: "pdb-password",
|
||||
port: 1521
|
||||
},
|
||||
secretMapping: {
|
||||
username: formatSqlUsername("ORACLEDB_USERNAME"),
|
||||
password: formatSqlUsername("ORACLEDB_PASSWORD")
|
||||
},
|
||||
userCredentials: [
|
||||
{
|
||||
username: formatSqlUsername("INFISICAL_USER_1")
|
||||
},
|
||||
{
|
||||
username: formatSqlUsername("INFISICAL_USER_2")
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: SecretRotationType.OracleDb,
|
||||
name: "OracleDB (19.3) Secret Rotation",
|
||||
skippable: true,
|
||||
dbCredentials: {
|
||||
password: process.env.E2E_TEST_ORACLE_DB_19_PASSWORD!,
|
||||
host: process.env.E2E_TEST_ORACLE_DB_19_HOST!,
|
||||
username: process.env.E2E_TEST_ORACLE_DB_19_USERNAME!,
|
||||
port: 1521,
|
||||
database: "ORCLPDB1"
|
||||
},
|
||||
secretMapping: {
|
||||
username: formatSqlUsername("ORACLEDB_USERNAME"),
|
||||
password: formatSqlUsername("ORACLEDB_PASSWORD")
|
||||
},
|
||||
userCredentials: [
|
||||
{
|
||||
username: formatSqlUsername("INFISICAL_USER_1")
|
||||
},
|
||||
{
|
||||
username: formatSqlUsername("INFISICAL_USER_2")
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: SecretRotationType.Postgres,
|
||||
name: "Postgres (17) Secret Rotation",
|
||||
dbCredentials: {
|
||||
database: "postgres-test",
|
||||
host: "127.0.0.1",
|
||||
username: "postgres-test",
|
||||
password: "postgres-test",
|
||||
port: 5433
|
||||
},
|
||||
secretMapping: {
|
||||
username: formatSqlUsername("POSTGRES_USERNAME"),
|
||||
password: formatSqlUsername("POSTGRES_PASSWORD")
|
||||
},
|
||||
userCredentials: [
|
||||
{
|
||||
username: formatSqlUsername("INFISICAL_USER_1")
|
||||
},
|
||||
{
|
||||
username: formatSqlUsername("INFISICAL_USER_2")
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: SecretRotationType.Postgres,
|
||||
name: "Postgres (16) Secret Rotation",
|
||||
dbCredentials: {
|
||||
database: "postgres-test",
|
||||
host: "127.0.0.1",
|
||||
username: "postgres-test",
|
||||
password: "postgres-test",
|
||||
port: 5434
|
||||
},
|
||||
secretMapping: {
|
||||
username: formatSqlUsername("POSTGRES_USERNAME"),
|
||||
password: formatSqlUsername("POSTGRES_PASSWORD")
|
||||
},
|
||||
userCredentials: [
|
||||
{
|
||||
username: formatSqlUsername("INFISICAL_USER_1")
|
||||
},
|
||||
{
|
||||
username: formatSqlUsername("INFISICAL_USER_2")
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: SecretRotationType.Postgres,
|
||||
name: "Postgres (10.12) Secret Rotation",
|
||||
dbCredentials: {
|
||||
database: "postgres-test",
|
||||
host: "127.0.0.1",
|
||||
username: "postgres-test",
|
||||
password: "postgres-test",
|
||||
port: 5435
|
||||
},
|
||||
secretMapping: {
|
||||
username: formatSqlUsername("POSTGRES_USERNAME"),
|
||||
password: formatSqlUsername("POSTGRES_PASSWORD")
|
||||
},
|
||||
userCredentials: [
|
||||
{
|
||||
username: formatSqlUsername("INFISICAL_USER_1")
|
||||
},
|
||||
{
|
||||
username: formatSqlUsername("INFISICAL_USER_2")
|
||||
}
|
||||
]
|
||||
}
|
||||
] as {
|
||||
skippable?: boolean;
|
||||
type: SecretRotationType;
|
||||
name: string;
|
||||
dbCredentials: TGenericSqlCredentials;
|
||||
secretMapping: TSecretMapping;
|
||||
userCredentials: TDatabaseUserCredentials[];
|
||||
}[];
|
||||
|
||||
const createAppConnectionMap = {
|
||||
[SecretRotationType.OracleDb]: createOracleDBAppConnection,
|
||||
[SecretRotationType.MySQL]: createMySQLAppConnection,
|
||||
[SecretRotationType.Postgres]: createPostgresAppConnection
|
||||
};
|
||||
|
||||
const createRotationMap = {
|
||||
[SecretRotationType.OracleDb]: createOracleDBSecretRotation,
|
||||
[SecretRotationType.MySQL]: createMySQLSecretRotation,
|
||||
[SecretRotationType.Postgres]: createPostgresSecretRotation
|
||||
};
|
||||
|
||||
const appConnectionIds: { id: string; type: SecretRotationType }[] = [];
|
||||
const secretRotationIds: { id: string; type: SecretRotationType }[] = [];
|
||||
|
||||
afterAll(async () => {
|
||||
for (const { id, type } of secretRotationIds) {
|
||||
await deleteSecretRotation(id, type);
|
||||
}
|
||||
|
||||
for (const { id, type } of appConnectionIds) {
|
||||
await deleteAppConnection(id, type);
|
||||
}
|
||||
});
|
||||
|
||||
testCases.forEach(({ skippable, dbCredentials, secretMapping, userCredentials, type, name }) => {
|
||||
const shouldSkip = () => {
|
||||
if (skippable) {
|
||||
if (type === SecretRotationType.OracleDb) {
|
||||
if (!process.env.E2E_TEST_ORACLE_DB_19_HOST) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
if (shouldSkip()) {
|
||||
test.skip(`Skipping Secret Rotation for ${type} (${name}) because E2E_TEST_ORACLE_DB_19_HOST is not set`);
|
||||
} else {
|
||||
test.concurrent(
|
||||
`Create secret rotation for ${name}`,
|
||||
async () => {
|
||||
const appConnectionId = await createAppConnectionMap[type](dbCredentials);
|
||||
|
||||
if (appConnectionId) {
|
||||
appConnectionIds.push({ id: appConnectionId, type });
|
||||
}
|
||||
|
||||
const res = await createRotationMap[type](appConnectionId, dbCredentials, userCredentials, secretMapping);
|
||||
|
||||
const resJson = JSON.parse(res.payload);
|
||||
|
||||
if (resJson.secretRotation) {
|
||||
secretRotationIds.push({ id: resJson.secretRotation.id, type });
|
||||
}
|
||||
|
||||
const startSecretValue = await getSecretValue(secretMapping.password);
|
||||
expect(startSecretValue).toBeDefined();
|
||||
|
||||
let attempts = 0;
|
||||
while (attempts < 60) {
|
||||
const currentSecretValue = await getSecretValue(secretMapping.password);
|
||||
|
||||
if (currentSecretValue !== startSecretValue) {
|
||||
break;
|
||||
}
|
||||
|
||||
attempts += 1;
|
||||
await new Promise((resolve) => setTimeout(resolve, 2_500));
|
||||
}
|
||||
|
||||
if (attempts >= 60) {
|
||||
throw new Error("Secret rotation failed to rotate after 60 attempts");
|
||||
}
|
||||
|
||||
const finalSecretValue = await getSecretValue(secretMapping.password);
|
||||
expect(finalSecretValue).not.toBe(startSecretValue);
|
||||
},
|
||||
{
|
||||
timeout: 300_000
|
||||
}
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
@@ -18,6 +18,7 @@ import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { buildRedisFromConfig } from "@app/lib/config/redis";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
import { bootstrapCheck } from "@app/server/boot-strap-check";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@@ -63,6 +64,8 @@ export default {
|
||||
const queue = queueServiceFactory(envCfg, { dbConnectionUrl: envCfg.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(envCfg);
|
||||
|
||||
await queue.initialize();
|
||||
|
||||
const hsmModule = initializeHsmModule(envCfg);
|
||||
hsmModule.initialize();
|
||||
|
||||
@@ -78,9 +81,13 @@ export default {
|
||||
envConfig: envCfg
|
||||
});
|
||||
|
||||
await bootstrapCheck({ db });
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
// @ts-expect-error type
|
||||
globalThis.testQueue = queue;
|
||||
// @ts-expect-error type
|
||||
globalThis.testSuperAdminDAL = superAdminDAL;
|
||||
// @ts-expect-error type
|
||||
globalThis.jwtAuthToken = crypto.jwt().sign(
|
||||
@@ -105,6 +112,8 @@ export default {
|
||||
// custom setup
|
||||
return {
|
||||
async teardown() {
|
||||
// @ts-expect-error type
|
||||
await globalThis.testQueue.shutdown();
|
||||
// @ts-expect-error type
|
||||
await globalThis.testServer.close();
|
||||
// @ts-expect-error type
|
||||
@@ -112,7 +121,9 @@ export default {
|
||||
// @ts-expect-error type
|
||||
delete globalThis.testSuperAdminDAL;
|
||||
// @ts-expect-error type
|
||||
delete globalThis.jwtToken;
|
||||
delete globalThis.jwtAuthToken;
|
||||
// @ts-expect-error type
|
||||
delete globalThis.testQueue;
|
||||
// called after all tests with this env have been run
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
|
@@ -2,7 +2,7 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { chunkArray } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { initLogger, logger } from "@app/lib/logger";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { TReminders, TRemindersInsert } from "../schemas/reminders";
|
||||
@@ -107,5 +107,6 @@ export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
initLogger();
|
||||
logger.info("Rollback not implemented for secret reminders fix migration");
|
||||
}
|
||||
|
@@ -0,0 +1,65 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const lastUserLoggedInAuthMethod = await knex.schema.hasColumn(TableName.OrgMembership, "lastLoginAuthMethod");
|
||||
const lastIdentityLoggedInAuthMethod = await knex.schema.hasColumn(
|
||||
TableName.IdentityOrgMembership,
|
||||
"lastLoginAuthMethod"
|
||||
);
|
||||
const lastUserLoggedInTime = await knex.schema.hasColumn(TableName.OrgMembership, "lastLoginTime");
|
||||
const lastIdentityLoggedInTime = await knex.schema.hasColumn(TableName.IdentityOrgMembership, "lastLoginTime");
|
||||
if (!lastUserLoggedInAuthMethod || !lastUserLoggedInTime) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
if (!lastUserLoggedInAuthMethod) {
|
||||
t.string("lastLoginAuthMethod").nullable();
|
||||
}
|
||||
if (!lastUserLoggedInTime) {
|
||||
t.datetime("lastLoginTime").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (!lastIdentityLoggedInAuthMethod || !lastIdentityLoggedInTime) {
|
||||
await knex.schema.alterTable(TableName.IdentityOrgMembership, (t) => {
|
||||
if (!lastIdentityLoggedInAuthMethod) {
|
||||
t.string("lastLoginAuthMethod").nullable();
|
||||
}
|
||||
if (!lastIdentityLoggedInTime) {
|
||||
t.datetime("lastLoginTime").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const lastUserLoggedInAuthMethod = await knex.schema.hasColumn(TableName.OrgMembership, "lastLoginAuthMethod");
|
||||
const lastIdentityLoggedInAuthMethod = await knex.schema.hasColumn(
|
||||
TableName.IdentityOrgMembership,
|
||||
"lastLoginAuthMethod"
|
||||
);
|
||||
const lastUserLoggedInTime = await knex.schema.hasColumn(TableName.OrgMembership, "lastLoginTime");
|
||||
const lastIdentityLoggedInTime = await knex.schema.hasColumn(TableName.IdentityOrgMembership, "lastLoginTime");
|
||||
if (lastUserLoggedInAuthMethod || lastUserLoggedInTime) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
if (lastUserLoggedInAuthMethod) {
|
||||
t.dropColumn("lastLoginAuthMethod");
|
||||
}
|
||||
if (lastUserLoggedInTime) {
|
||||
t.dropColumn("lastLoginTime");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (lastIdentityLoggedInAuthMethod || lastIdentityLoggedInTime) {
|
||||
await knex.schema.alterTable(TableName.IdentityOrgMembership, (t) => {
|
||||
if (lastIdentityLoggedInAuthMethod) {
|
||||
t.dropColumn("lastLoginAuthMethod");
|
||||
}
|
||||
if (lastIdentityLoggedInTime) {
|
||||
t.dropColumn("lastLoginTime");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas/models";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "maxTimePeriod"))) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.string("maxTimePeriod").nullable(); // Ex: 1h - Null is permanent
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "maxTimePeriod")) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.dropColumn("maxTimePeriod");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,38 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEditNoteCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "editNote");
|
||||
const hasEditedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "editedByUserId");
|
||||
|
||||
if (!hasEditNoteCol || !hasEditedByUserId) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||
if (!hasEditedByUserId) {
|
||||
t.uuid("editedByUserId").nullable();
|
||||
t.foreign("editedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||
}
|
||||
|
||||
if (!hasEditNoteCol) {
|
||||
t.string("editNote").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEditNoteCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "editNote");
|
||||
const hasEditedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "editedByUserId");
|
||||
|
||||
if (hasEditNoteCol || hasEditedByUserId) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||
if (hasEditedByUserId) {
|
||||
t.dropColumn("editedByUserId");
|
||||
}
|
||||
|
||||
if (hasEditNoteCol) {
|
||||
t.dropColumn("editNote");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@@ -17,7 +17,8 @@ export const AccessApprovalPoliciesSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
allowedSelfApprovals: z.boolean().default(true),
|
||||
maxTimePeriod: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;
|
||||
|
@@ -20,7 +20,9 @@ export const AccessApprovalRequestsSchema = z.object({
|
||||
requestedByUserId: z.string().uuid(),
|
||||
note: z.string().nullable().optional(),
|
||||
privilegeDeletedAt: z.date().nullable().optional(),
|
||||
status: z.string().default("pending")
|
||||
status: z.string().default("pending"),
|
||||
editedByUserId: z.string().uuid().nullable().optional(),
|
||||
editNote: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;
|
||||
|
@@ -14,7 +14,9 @@ export const IdentityOrgMembershipsSchema = z.object({
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid()
|
||||
identityId: z.string().uuid(),
|
||||
lastLoginAuthMethod: z.string().nullable().optional(),
|
||||
lastLoginTime: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityOrgMemberships = z.infer<typeof IdentityOrgMembershipsSchema>;
|
||||
|
@@ -19,7 +19,9 @@ export const OrgMembershipsSchema = z.object({
|
||||
roleId: z.string().uuid().nullable().optional(),
|
||||
projectFavorites: z.string().array().nullable().optional(),
|
||||
isActive: z.boolean().default(true),
|
||||
lastInvitedAt: z.date().nullable().optional()
|
||||
lastInvitedAt: z.date().nullable().optional(),
|
||||
lastLoginAuthMethod: z.string().nullable().optional(),
|
||||
lastLoginTime: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;
|
||||
|
@@ -3,12 +3,32 @@ import { z } from "zod";
|
||||
|
||||
import { ApproverType, BypasserType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { EnforcementLevel } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const maxTimePeriodSchema = z
|
||||
.string()
|
||||
.trim()
|
||||
.nullish()
|
||||
.transform((val, ctx) => {
|
||||
if (val === undefined) return undefined;
|
||||
if (!val || val === "permanent") return null;
|
||||
const parsedMs = ms(val);
|
||||
|
||||
if (typeof parsedMs !== "number" || parsedMs <= 0) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "Invalid time period format or value. Must be a positive duration (e.g., '1h', '30m', '2d')."
|
||||
});
|
||||
return z.NEVER;
|
||||
}
|
||||
return val;
|
||||
});
|
||||
|
||||
export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/",
|
||||
@@ -71,7 +91,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.optional(),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
allowedSelfApprovals: z.boolean().default(true),
|
||||
maxTimePeriod: maxTimePeriodSchema
|
||||
})
|
||||
.refine(
|
||||
(val) => Boolean(val.environment) || Boolean(val.environments),
|
||||
@@ -124,7 +145,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.nullable()
|
||||
.optional(),
|
||||
bypassers: z.object({ type: z.nativeEnum(BypasserType), id: z.string().nullable().optional() }).array()
|
||||
bypassers: z.object({ type: z.nativeEnum(BypasserType), id: z.string().nullable().optional() }).array(),
|
||||
maxTimePeriod: z.string().nullable().optional()
|
||||
})
|
||||
.array()
|
||||
.nullable()
|
||||
@@ -233,7 +255,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
stepNumber: z.number().int()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
.optional(),
|
||||
maxTimePeriod: maxTimePeriodSchema
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -314,7 +337,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
})
|
||||
.array()
|
||||
.nullable()
|
||||
.optional()
|
||||
.optional(),
|
||||
maxTimePeriod: z.string().nullable().optional()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@@ -2,6 +2,7 @@ import { z } from "zod";
|
||||
|
||||
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { ApprovalStatus } from "@app/ee/services/access-approval-request/access-approval-request-types";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@@ -26,7 +27,23 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
body: z.object({
|
||||
permissions: z.any().array(),
|
||||
isTemporary: z.boolean(),
|
||||
temporaryRange: z.string().optional(),
|
||||
temporaryRange: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((val, ctx) => {
|
||||
if (!val || val === "permanent") return undefined;
|
||||
|
||||
const parsedMs = ms(val);
|
||||
|
||||
if (typeof parsedMs !== "number" || parsedMs <= 0) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "Invalid time period format or value. Must be a positive duration (e.g., '1h', '30m', '2d')."
|
||||
});
|
||||
return z.NEVER;
|
||||
}
|
||||
return val;
|
||||
}),
|
||||
note: z.string().max(255).optional()
|
||||
}),
|
||||
querystring: z.object({
|
||||
@@ -128,7 +145,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
envId: z.string(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
allowedSelfApprovals: z.boolean(),
|
||||
maxTimePeriod: z.string().nullable().optional()
|
||||
}),
|
||||
reviewers: z
|
||||
.object({
|
||||
@@ -189,4 +207,47 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
return { review };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:requestId",
|
||||
method: "PATCH",
|
||||
schema: {
|
||||
params: z.object({
|
||||
requestId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
temporaryRange: z.string().transform((val, ctx) => {
|
||||
const parsedMs = ms(val);
|
||||
|
||||
if (typeof parsedMs !== "number" || parsedMs <= 0) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "Invalid time period format or value. Must be a positive duration (e.g., '1h', '30m', '2d')."
|
||||
});
|
||||
return z.NEVER;
|
||||
}
|
||||
return val;
|
||||
}),
|
||||
editNote: z.string().max(255)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: AccessApprovalRequestsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { request } = await server.services.accessApprovalRequest.updateAccessApprovalRequest({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
temporaryRange: req.body.temporaryRange,
|
||||
editNote: req.body.editNote,
|
||||
requestId: req.params.requestId
|
||||
});
|
||||
return { approval: request };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -379,14 +379,17 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/config/:configId/test-connection",
|
||||
url: "/config/test-connection",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
configId: z.string().trim()
|
||||
body: z.object({
|
||||
url: z.string().trim(),
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
caCert: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.boolean()
|
||||
@@ -399,8 +402,9 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
orgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
ldapConfigId: req.params.configId
|
||||
...req.body
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
@@ -56,6 +56,7 @@ export interface TAccessApprovalPolicyDALFactory
|
||||
allowedSelfApprovals: boolean;
|
||||
secretPath: string;
|
||||
deletedAt?: Date | null | undefined;
|
||||
maxTimePeriod?: string | null;
|
||||
projectId: string;
|
||||
bypassers: (
|
||||
| {
|
||||
@@ -96,6 +97,7 @@ export interface TAccessApprovalPolicyDALFactory
|
||||
allowedSelfApprovals: boolean;
|
||||
secretPath: string;
|
||||
deletedAt?: Date | null | undefined;
|
||||
maxTimePeriod?: string | null;
|
||||
environments: {
|
||||
id: string;
|
||||
name: string;
|
||||
@@ -141,6 +143,7 @@ export interface TAccessApprovalPolicyDALFactory
|
||||
allowedSelfApprovals: boolean;
|
||||
secretPath: string;
|
||||
deletedAt?: Date | null | undefined;
|
||||
maxTimePeriod?: string | null;
|
||||
}
|
||||
| undefined
|
||||
>;
|
||||
|
@@ -100,7 +100,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
environments,
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals,
|
||||
approvalsRequired
|
||||
approvalsRequired,
|
||||
maxTimePeriod
|
||||
}) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
@@ -219,7 +220,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
allowedSelfApprovals,
|
||||
maxTimePeriod
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -318,7 +320,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals,
|
||||
approvalsRequired,
|
||||
environments
|
||||
environments,
|
||||
maxTimePeriod
|
||||
}: TUpdateAccessApprovalPolicy) => {
|
||||
const groupApprovers = approvers.filter((approver) => approver.type === ApproverType.Group);
|
||||
|
||||
@@ -461,7 +464,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
allowedSelfApprovals,
|
||||
maxTimePeriod
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@@ -41,6 +41,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
enforcementLevel: EnforcementLevel;
|
||||
allowedSelfApprovals: boolean;
|
||||
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
|
||||
maxTimePeriod?: string | null;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateAccessApprovalPolicy = {
|
||||
@@ -60,6 +61,7 @@ export type TUpdateAccessApprovalPolicy = {
|
||||
allowedSelfApprovals: boolean;
|
||||
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
|
||||
environments?: string[];
|
||||
maxTimePeriod?: string | null;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteAccessApprovalPolicy = {
|
||||
@@ -104,7 +106,8 @@ export interface TAccessApprovalPolicyServiceFactory {
|
||||
environment,
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals,
|
||||
approvalsRequired
|
||||
approvalsRequired,
|
||||
maxTimePeriod
|
||||
}: TCreateAccessApprovalPolicy) => Promise<{
|
||||
environment: {
|
||||
name: string;
|
||||
@@ -135,6 +138,7 @@ export interface TAccessApprovalPolicyServiceFactory {
|
||||
allowedSelfApprovals: boolean;
|
||||
secretPath: string;
|
||||
deletedAt?: Date | null | undefined;
|
||||
maxTimePeriod?: string | null;
|
||||
}>;
|
||||
deleteAccessApprovalPolicy: ({
|
||||
policyId,
|
||||
@@ -159,6 +163,7 @@ export interface TAccessApprovalPolicyServiceFactory {
|
||||
allowedSelfApprovals: boolean;
|
||||
secretPath: string;
|
||||
deletedAt?: Date | null | undefined;
|
||||
maxTimePeriod?: string | null;
|
||||
environment: {
|
||||
id: string;
|
||||
name: string;
|
||||
@@ -185,7 +190,8 @@ export interface TAccessApprovalPolicyServiceFactory {
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals,
|
||||
approvalsRequired,
|
||||
environments
|
||||
environments,
|
||||
maxTimePeriod
|
||||
}: TUpdateAccessApprovalPolicy) => Promise<{
|
||||
environment: {
|
||||
id: string;
|
||||
@@ -208,6 +214,7 @@ export interface TAccessApprovalPolicyServiceFactory {
|
||||
allowedSelfApprovals: boolean;
|
||||
secretPath?: string | null | undefined;
|
||||
deletedAt?: Date | null | undefined;
|
||||
maxTimePeriod?: string | null;
|
||||
}>;
|
||||
getAccessApprovalPolicyByProjectSlug: ({
|
||||
actorId,
|
||||
@@ -242,6 +249,7 @@ export interface TAccessApprovalPolicyServiceFactory {
|
||||
allowedSelfApprovals: boolean;
|
||||
secretPath: string;
|
||||
deletedAt?: Date | null | undefined;
|
||||
maxTimePeriod?: string | null;
|
||||
environment: {
|
||||
id: string;
|
||||
name: string;
|
||||
@@ -298,6 +306,7 @@ export interface TAccessApprovalPolicyServiceFactory {
|
||||
allowedSelfApprovals: boolean;
|
||||
secretPath: string;
|
||||
deletedAt?: Date | null | undefined;
|
||||
maxTimePeriod?: string | null;
|
||||
environment: {
|
||||
id: string;
|
||||
name: string;
|
||||
|
@@ -63,6 +63,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
enforcementLevel: string;
|
||||
allowedSelfApprovals: boolean;
|
||||
deletedAt: Date | null | undefined;
|
||||
maxTimePeriod?: string | null;
|
||||
};
|
||||
projectId: string;
|
||||
environments: string[];
|
||||
@@ -161,6 +162,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
allowedSelfApprovals: boolean;
|
||||
envId: string;
|
||||
deletedAt: Date | null | undefined;
|
||||
maxTimePeriod?: string | null;
|
||||
};
|
||||
projectId: string;
|
||||
environment: string;
|
||||
@@ -297,7 +299,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
|
||||
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt"),
|
||||
db.ref("maxTimePeriod").withSchema(TableName.AccessApprovalPolicy).as("policyMaxTimePeriod")
|
||||
)
|
||||
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(db.ref("sequence").withSchema(TableName.AccessApprovalPolicyApprover).as("approverSequence"))
|
||||
@@ -364,7 +367,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
enforcementLevel: doc.policyEnforcementLevel,
|
||||
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
|
||||
envId: doc.policyEnvId,
|
||||
deletedAt: doc.policyDeletedAt
|
||||
deletedAt: doc.policyDeletedAt,
|
||||
maxTimePeriod: doc.policyMaxTimePeriod
|
||||
},
|
||||
requestedByUser: {
|
||||
userId: doc.requestedByUserId,
|
||||
@@ -574,7 +578,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt"),
|
||||
tx.ref("maxTimePeriod").withSchema(TableName.AccessApprovalPolicy).as("policyMaxTimePeriod")
|
||||
);
|
||||
|
||||
const findById: TAccessApprovalRequestDALFactory["findById"] = async (id, tx) => {
|
||||
@@ -595,7 +600,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals,
|
||||
deletedAt: el.policyDeletedAt
|
||||
deletedAt: el.policyDeletedAt,
|
||||
maxTimePeriod: el.policyMaxTimePeriod
|
||||
},
|
||||
requestedByUser: {
|
||||
userId: el.requestedByUserId,
|
||||
|
@@ -54,7 +54,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
accessApprovalPolicyDAL: Pick<TAccessApprovalPolicyDALFactory, "findOne" | "find" | "findLastValidPolicy">;
|
||||
accessApprovalRequestReviewerDAL: Pick<
|
||||
TAccessApprovalRequestReviewerDALFactory,
|
||||
"create" | "find" | "findOne" | "transaction"
|
||||
"create" | "find" | "findOne" | "transaction" | "delete"
|
||||
>;
|
||||
groupDAL: Pick<TGroupDALFactory, "findAllGroupPossibleMembers">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findById">;
|
||||
@@ -156,6 +156,15 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
throw new BadRequestError({ message: "The policy linked to this request has been deleted" });
|
||||
}
|
||||
|
||||
// Check if the requested time falls under policy.maxTimePeriod
|
||||
if (policy.maxTimePeriod) {
|
||||
if (!temporaryRange || ms(temporaryRange) > ms(policy.maxTimePeriod)) {
|
||||
throw new BadRequestError({
|
||||
message: `Requested access time range is limited to ${policy.maxTimePeriod} by policy`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const approverIds: string[] = [];
|
||||
const approverGroupIds: string[] = [];
|
||||
|
||||
@@ -292,6 +301,155 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
return { request: approval };
|
||||
};
|
||||
|
||||
const updateAccessApprovalRequest: TAccessApprovalRequestServiceFactory["updateAccessApprovalRequest"] = async ({
|
||||
temporaryRange,
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
editNote,
|
||||
requestId
|
||||
}) => {
|
||||
const cfg = getConfig();
|
||||
|
||||
const accessApprovalRequest = await accessApprovalRequestDAL.findById(requestId);
|
||||
if (!accessApprovalRequest) {
|
||||
throw new NotFoundError({ message: `Access request with ID '${requestId}' not found` });
|
||||
}
|
||||
|
||||
const { policy, requestedByUser } = accessApprovalRequest;
|
||||
if (policy.deletedAt) {
|
||||
throw new BadRequestError({
|
||||
message: "The policy associated with this access request has been deleted."
|
||||
});
|
||||
}
|
||||
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: accessApprovalRequest.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
|
||||
const isApprover = policy.approvers.find((approver) => approver.userId === actorId);
|
||||
|
||||
if (!hasRole(ProjectMembershipRole.Admin) && !isApprover) {
|
||||
throw new ForbiddenRequestError({ message: "You are not authorized to modify this request" });
|
||||
}
|
||||
|
||||
const project = await projectDAL.findById(accessApprovalRequest.projectId);
|
||||
|
||||
if (!project) {
|
||||
throw new NotFoundError({
|
||||
message: `The project associated with this access request was not found. [projectId=${accessApprovalRequest.projectId}]`
|
||||
});
|
||||
}
|
||||
|
||||
if (accessApprovalRequest.status !== ApprovalStatus.PENDING) {
|
||||
throw new BadRequestError({ message: "The request has been closed" });
|
||||
}
|
||||
|
||||
const editedByUser = await userDAL.findById(actorId);
|
||||
|
||||
if (!editedByUser) throw new NotFoundError({ message: "Editing user not found" });
|
||||
|
||||
if (accessApprovalRequest.isTemporary && accessApprovalRequest.temporaryRange) {
|
||||
if (ms(temporaryRange) > ms(accessApprovalRequest.temporaryRange)) {
|
||||
throw new BadRequestError({ message: "Updated access duration must be less than current access duration" });
|
||||
}
|
||||
}
|
||||
|
||||
const { envSlug, secretPath, accessTypes } = verifyRequestedPermissions({
|
||||
permissions: accessApprovalRequest.permissions
|
||||
});
|
||||
|
||||
const approval = await accessApprovalRequestDAL.transaction(async (tx) => {
|
||||
const approvalRequest = await accessApprovalRequestDAL.updateById(
|
||||
requestId,
|
||||
{
|
||||
temporaryRange,
|
||||
isTemporary: true,
|
||||
editNote,
|
||||
editedByUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
// reset review progress
|
||||
await accessApprovalRequestReviewerDAL.delete(
|
||||
{
|
||||
requestId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`;
|
||||
const editorFullName = `${editedByUser.firstName} ${editedByUser.lastName}`;
|
||||
const approvalUrl = `${cfg.SITE_URL}/projects/secret-management/${project.id}/approval`;
|
||||
|
||||
await triggerWorkflowIntegrationNotification({
|
||||
input: {
|
||||
notification: {
|
||||
type: TriggerFeature.ACCESS_REQUEST_UPDATED,
|
||||
payload: {
|
||||
projectName: project.name,
|
||||
requesterFullName,
|
||||
isTemporary: true,
|
||||
requesterEmail: requestedByUser.email as string,
|
||||
secretPath,
|
||||
environment: envSlug,
|
||||
permissions: accessTypes,
|
||||
approvalUrl,
|
||||
editNote,
|
||||
editorEmail: editedByUser.email as string,
|
||||
editorFullName
|
||||
}
|
||||
},
|
||||
projectId: project.id
|
||||
},
|
||||
dependencies: {
|
||||
projectDAL,
|
||||
projectSlackConfigDAL,
|
||||
kmsService,
|
||||
microsoftTeamsService,
|
||||
projectMicrosoftTeamsConfigDAL
|
||||
}
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
recipients: policy.approvers
|
||||
.filter((approver) => Boolean(approver.email) && approver.userId !== editedByUser.id)
|
||||
.map((approver) => approver.email!),
|
||||
subjectLine: "Access Approval Request Updated",
|
||||
substitutions: {
|
||||
projectName: project.name,
|
||||
requesterFullName,
|
||||
requesterEmail: requestedByUser.email,
|
||||
isTemporary: true,
|
||||
expiresIn: msFn(ms(temporaryRange || ""), { long: true }),
|
||||
secretPath,
|
||||
environment: envSlug,
|
||||
permissions: accessTypes,
|
||||
approvalUrl,
|
||||
editNote,
|
||||
editorFullName,
|
||||
editorEmail: editedByUser.email
|
||||
},
|
||||
template: SmtpTemplates.AccessApprovalRequestUpdated
|
||||
});
|
||||
|
||||
return approvalRequest;
|
||||
});
|
||||
|
||||
return { request: approval };
|
||||
};
|
||||
|
||||
const listApprovalRequests: TAccessApprovalRequestServiceFactory["listApprovalRequests"] = async ({
|
||||
projectSlug,
|
||||
authorUserId,
|
||||
@@ -641,6 +799,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
|
||||
return {
|
||||
createAccessApprovalRequest,
|
||||
updateAccessApprovalRequest,
|
||||
listApprovalRequests,
|
||||
reviewAccessRequest,
|
||||
getCount
|
||||
|
@@ -30,6 +30,12 @@ export type TCreateAccessApprovalRequestDTO = {
|
||||
note?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateAccessApprovalRequestDTO = {
|
||||
requestId: string;
|
||||
temporaryRange: string;
|
||||
editNote: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TListApprovalRequestsDTO = {
|
||||
projectSlug: string;
|
||||
authorUserId?: string;
|
||||
@@ -54,6 +60,23 @@ export interface TAccessApprovalRequestServiceFactory {
|
||||
privilegeDeletedAt?: Date | null | undefined;
|
||||
};
|
||||
}>;
|
||||
updateAccessApprovalRequest: (arg: TUpdateAccessApprovalRequestDTO) => Promise<{
|
||||
request: {
|
||||
status: string;
|
||||
id: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
policyId: string;
|
||||
isTemporary: boolean;
|
||||
requestedByUserId: string;
|
||||
privilegeId?: string | null | undefined;
|
||||
requestedBy?: string | null | undefined;
|
||||
temporaryRange?: string | null | undefined;
|
||||
permissions?: unknown;
|
||||
note?: string | null | undefined;
|
||||
privilegeDeletedAt?: Date | null | undefined;
|
||||
};
|
||||
}>;
|
||||
listApprovalRequests: (arg: TListApprovalRequestsDTO) => Promise<{
|
||||
requests: {
|
||||
policy: {
|
||||
@@ -82,6 +105,7 @@ export interface TAccessApprovalRequestServiceFactory {
|
||||
allowedSelfApprovals: boolean;
|
||||
envId: string;
|
||||
deletedAt: Date | null | undefined;
|
||||
maxTimePeriod?: string | null;
|
||||
};
|
||||
projectId: string;
|
||||
environment: string;
|
||||
|
@@ -9,7 +9,7 @@ import { getDbConnectionHost } from "@app/lib/knex";
|
||||
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (appCfg.isDevelopmentMode) return [host];
|
||||
if (appCfg.isDevelopmentMode || appCfg.isTestMode) return [host];
|
||||
|
||||
if (isGateway) return [host];
|
||||
|
||||
|
@@ -15,6 +15,7 @@ import { z } from "zod";
|
||||
import { CustomAWSHasher } from "@app/lib/aws/hashing";
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
|
||||
@@ -170,14 +171,29 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).verifyCredentials(providerInputs.clusterName);
|
||||
return true;
|
||||
try {
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).verifyCredentials(providerInputs.clusterName);
|
||||
return true;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [
|
||||
providerInputs.accessKeyId,
|
||||
providerInputs.secretAccessKey,
|
||||
providerInputs.clusterName,
|
||||
providerInputs.region
|
||||
]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -206,21 +222,37 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const parsedStatement = CreateElastiCacheUserSchema.parse(JSON.parse(creationStatement));
|
||||
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).createUser(parsedStatement, providerInputs.clusterName);
|
||||
try {
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).createUser(parsedStatement, providerInputs.clusterName);
|
||||
|
||||
return {
|
||||
entityId: leaseUsername,
|
||||
data: {
|
||||
DB_USERNAME: leaseUsername,
|
||||
DB_PASSWORD: leasePassword
|
||||
}
|
||||
};
|
||||
return {
|
||||
entityId: leaseUsername,
|
||||
data: {
|
||||
DB_USERNAME: leaseUsername,
|
||||
DB_PASSWORD: leasePassword
|
||||
}
|
||||
};
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [
|
||||
leaseUsername,
|
||||
leasePassword,
|
||||
providerInputs.accessKeyId,
|
||||
providerInputs.secretAccessKey,
|
||||
providerInputs.clusterName
|
||||
]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
@@ -229,15 +261,25 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username: entityId });
|
||||
const parsedStatement = DeleteElasticCacheUserSchema.parse(JSON.parse(revokeStatement));
|
||||
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).deleteUser(parsedStatement);
|
||||
try {
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).deleteUser(parsedStatement);
|
||||
|
||||
return { entityId };
|
||||
return { entityId };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [entityId, providerInputs.accessKeyId, providerInputs.secretAccessKey, providerInputs.clusterName]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
|
@@ -23,6 +23,7 @@ import { CustomAWSHasher } from "@app/lib/aws/hashing";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
|
||||
@@ -118,22 +119,39 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown, { projectId }: { projectId: string }) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs, projectId);
|
||||
const isConnected = await client
|
||||
.send(new GetUserCommand({}))
|
||||
.then(() => true)
|
||||
.catch((err) => {
|
||||
const message = (err as Error)?.message;
|
||||
if (
|
||||
(providerInputs.method === AwsIamAuthType.AssumeRole || providerInputs.method === AwsIamAuthType.IRSA) &&
|
||||
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
|
||||
message.includes("Must specify userName when calling with non-User credentials")
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
throw err;
|
||||
try {
|
||||
const client = await $getClient(providerInputs, projectId);
|
||||
const isConnected = await client
|
||||
.send(new GetUserCommand({}))
|
||||
.then(() => true)
|
||||
.catch((err) => {
|
||||
const message = (err as Error)?.message;
|
||||
if (
|
||||
(providerInputs.method === AwsIamAuthType.AssumeRole || providerInputs.method === AwsIamAuthType.IRSA) &&
|
||||
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
|
||||
message.includes("Must specify userName when calling with non-User credentials")
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
return isConnected;
|
||||
} catch (err) {
|
||||
const sensitiveTokens = [];
|
||||
if (providerInputs.method === AwsIamAuthType.AccessKey) {
|
||||
sensitiveTokens.push(providerInputs.accessKey, providerInputs.secretAccessKey);
|
||||
}
|
||||
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
|
||||
sensitiveTokens.push(providerInputs.roleArn);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: sensitiveTokens
|
||||
});
|
||||
return isConnected;
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -162,62 +180,81 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
awsTags.push(...additionalTags);
|
||||
}
|
||||
|
||||
const createUserRes = await client.send(
|
||||
new CreateUserCommand({
|
||||
Path: awsPath,
|
||||
PermissionsBoundary: permissionBoundaryPolicyArn || undefined,
|
||||
Tags: awsTags,
|
||||
UserName: username
|
||||
})
|
||||
);
|
||||
|
||||
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
|
||||
if (userGroups) {
|
||||
await Promise.all(
|
||||
userGroups
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((group) =>
|
||||
client.send(new AddUserToGroupCommand({ UserName: createUserRes?.User?.UserName, GroupName: group }))
|
||||
)
|
||||
);
|
||||
}
|
||||
if (policyArns) {
|
||||
await Promise.all(
|
||||
policyArns
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((policyArn) =>
|
||||
client.send(new AttachUserPolicyCommand({ UserName: createUserRes?.User?.UserName, PolicyArn: policyArn }))
|
||||
)
|
||||
);
|
||||
}
|
||||
if (policyDocument) {
|
||||
await client.send(
|
||||
new PutUserPolicyCommand({
|
||||
UserName: createUserRes.User.UserName,
|
||||
PolicyName: `infisical-dynamic-policy-${alphaNumericNanoId(4)}`,
|
||||
PolicyDocument: policyDocument
|
||||
try {
|
||||
const createUserRes = await client.send(
|
||||
new CreateUserCommand({
|
||||
Path: awsPath,
|
||||
PermissionsBoundary: permissionBoundaryPolicyArn || undefined,
|
||||
Tags: awsTags,
|
||||
UserName: username
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const createAccessKeyRes = await client.send(
|
||||
new CreateAccessKeyCommand({
|
||||
UserName: createUserRes.User.UserName
|
||||
})
|
||||
);
|
||||
if (!createAccessKeyRes.AccessKey)
|
||||
throw new BadRequestError({ message: "Failed to create AWS IAM User access key" });
|
||||
|
||||
return {
|
||||
entityId: username,
|
||||
data: {
|
||||
ACCESS_KEY: createAccessKeyRes.AccessKey.AccessKeyId,
|
||||
SECRET_ACCESS_KEY: createAccessKeyRes.AccessKey.SecretAccessKey,
|
||||
USERNAME: username
|
||||
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
|
||||
if (userGroups) {
|
||||
await Promise.all(
|
||||
userGroups
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((group) =>
|
||||
client.send(new AddUserToGroupCommand({ UserName: createUserRes?.User?.UserName, GroupName: group }))
|
||||
)
|
||||
);
|
||||
}
|
||||
};
|
||||
if (policyArns) {
|
||||
await Promise.all(
|
||||
policyArns
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((policyArn) =>
|
||||
client.send(
|
||||
new AttachUserPolicyCommand({ UserName: createUserRes?.User?.UserName, PolicyArn: policyArn })
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
if (policyDocument) {
|
||||
await client.send(
|
||||
new PutUserPolicyCommand({
|
||||
UserName: createUserRes.User.UserName,
|
||||
PolicyName: `infisical-dynamic-policy-${alphaNumericNanoId(4)}`,
|
||||
PolicyDocument: policyDocument
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const createAccessKeyRes = await client.send(
|
||||
new CreateAccessKeyCommand({
|
||||
UserName: createUserRes.User.UserName
|
||||
})
|
||||
);
|
||||
if (!createAccessKeyRes.AccessKey)
|
||||
throw new BadRequestError({ message: "Failed to create AWS IAM User access key" });
|
||||
|
||||
return {
|
||||
entityId: username,
|
||||
data: {
|
||||
ACCESS_KEY: createAccessKeyRes.AccessKey.AccessKeyId,
|
||||
SECRET_ACCESS_KEY: createAccessKeyRes.AccessKey.SecretAccessKey,
|
||||
USERNAME: username
|
||||
}
|
||||
};
|
||||
} catch (err) {
|
||||
const sensitiveTokens = [username];
|
||||
if (providerInputs.method === AwsIamAuthType.AccessKey) {
|
||||
sensitiveTokens.push(providerInputs.accessKey, providerInputs.secretAccessKey);
|
||||
}
|
||||
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
|
||||
sensitiveTokens.push(providerInputs.roleArn);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: sensitiveTokens
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string, metadata: { projectId: string }) => {
|
||||
@@ -278,8 +315,25 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
)
|
||||
);
|
||||
|
||||
await client.send(new DeleteUserCommand({ UserName: username }));
|
||||
return { entityId: username };
|
||||
try {
|
||||
await client.send(new DeleteUserCommand({ UserName: username }));
|
||||
return { entityId: username };
|
||||
} catch (err) {
|
||||
const sensitiveTokens = [username];
|
||||
if (providerInputs.method === AwsIamAuthType.AccessKey) {
|
||||
sensitiveTokens.push(providerInputs.accessKey, providerInputs.secretAccessKey);
|
||||
}
|
||||
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
|
||||
sensitiveTokens.push(providerInputs.roleArn);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: sensitiveTokens
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
|
@@ -2,6 +2,7 @@ import axios from "axios";
|
||||
import { customAlphabet } from "nanoid";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
|
||||
import { AzureEntraIDSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
@@ -51,45 +52,82 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
return data.success;
|
||||
try {
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
return data.success;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.clientSecret, providerInputs.applicationId, providerInputs.tenantId]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async ({ inputs }: { inputs: unknown }) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
|
||||
const password = generatePassword();
|
||||
|
||||
const response = await axios.patch(
|
||||
`${MSFT_GRAPH_API_URL}/users/${providerInputs.userId}`,
|
||||
{
|
||||
passwordProfile: {
|
||||
forceChangePasswordNextSignIn: false,
|
||||
password
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${data.token}`
|
||||
}
|
||||
try {
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
);
|
||||
if (response.status !== 204) {
|
||||
throw new BadRequestError({ message: "Failed to update password" });
|
||||
}
|
||||
|
||||
return { entityId: providerInputs.userId, data: { email: providerInputs.email, password } };
|
||||
const response = await axios.patch(
|
||||
`${MSFT_GRAPH_API_URL}/users/${providerInputs.userId}`,
|
||||
{
|
||||
passwordProfile: {
|
||||
forceChangePasswordNextSignIn: false,
|
||||
password
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${data.token}`
|
||||
}
|
||||
}
|
||||
);
|
||||
if (response.status !== 204) {
|
||||
throw new BadRequestError({ message: "Failed to update password" });
|
||||
}
|
||||
|
||||
return { entityId: providerInputs.userId, data: { email: providerInputs.email, password } };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [
|
||||
providerInputs.clientSecret,
|
||||
providerInputs.applicationId,
|
||||
providerInputs.userId,
|
||||
providerInputs.email,
|
||||
password
|
||||
]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
// Creates a new password
|
||||
await create({ inputs });
|
||||
return { entityId };
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
try {
|
||||
// Creates a new password
|
||||
await create({ inputs });
|
||||
return { entityId };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.clientSecret, providerInputs.applicationId, entityId]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
|
||||
|
@@ -3,6 +3,8 @@ import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
@@ -71,9 +73,24 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
||||
await client.shutdown();
|
||||
return isConnected;
|
||||
try {
|
||||
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
||||
await client.shutdown();
|
||||
return isConnected;
|
||||
} catch (err) {
|
||||
const tokens = [providerInputs.password, providerInputs.username];
|
||||
if (providerInputs.keyspace) {
|
||||
tokens.push(providerInputs.keyspace);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens
|
||||
});
|
||||
await client.shutdown();
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -89,23 +106,39 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
const { keyspace } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration,
|
||||
keyspace
|
||||
});
|
||||
try {
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await client.execute(query);
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration,
|
||||
keyspace
|
||||
});
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (err) {
|
||||
const tokens = [username, password];
|
||||
if (keyspace) {
|
||||
tokens.push(keyspace);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens
|
||||
});
|
||||
await client.shutdown();
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
await client.shutdown();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
@@ -115,14 +148,29 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
const username = entityId;
|
||||
const { keyspace } = providerInputs;
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await client.execute(query);
|
||||
try {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId: username };
|
||||
} catch (err) {
|
||||
const tokens = [username];
|
||||
if (keyspace) {
|
||||
tokens.push(keyspace);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens
|
||||
});
|
||||
await client.shutdown();
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
@@ -130,21 +178,36 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { keyspace } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
keyspace,
|
||||
expiration
|
||||
});
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await client.execute(query);
|
||||
try {
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
keyspace,
|
||||
expiration
|
||||
});
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId };
|
||||
} catch (err) {
|
||||
const tokens = [entityId];
|
||||
if (keyspace) {
|
||||
tokens.push(keyspace);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens
|
||||
});
|
||||
await client.shutdown();
|
||||
throw new BadRequestError({
|
||||
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -2,6 +2,8 @@ import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
@@ -63,12 +65,24 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection
|
||||
.info()
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
return infoResponse;
|
||||
try {
|
||||
const infoResponse = await connection.info().then(() => true);
|
||||
return infoResponse;
|
||||
} catch (err) {
|
||||
const tokens = [];
|
||||
if (providerInputs.auth.type === ElasticSearchAuthTypes.ApiKey) {
|
||||
tokens.push(providerInputs.auth.apiKey, providerInputs.auth.apiKeyId);
|
||||
} else {
|
||||
tokens.push(providerInputs.auth.username, providerInputs.auth.password);
|
||||
}
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
@@ -79,27 +93,49 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
|
||||
await connection.security.putUser({
|
||||
username,
|
||||
password,
|
||||
full_name: "Managed by Infisical.com",
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
try {
|
||||
await connection.security.putUser({
|
||||
username,
|
||||
password,
|
||||
full_name: "Managed by Infisical.com",
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
await connection.close();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password]
|
||||
});
|
||||
await connection.close();
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
});
|
||||
try {
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId };
|
||||
await connection.close();
|
||||
return { entityId };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [entityId]
|
||||
});
|
||||
await connection.close();
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
|
@@ -3,6 +3,7 @@ import { GetAccessTokenResponse } from "google-auth-library/build/src/auth/oauth
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretGcpIamSchema, TDynamicProviderFns } from "./models";
|
||||
@@ -65,8 +66,18 @@ export const GcpIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
await $getToken(providerInputs.serviceAccountEmail, 10);
|
||||
return true;
|
||||
try {
|
||||
await $getToken(providerInputs.serviceAccountEmail, 10);
|
||||
return true;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.serviceAccountEmail]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; expireAt: number }) => {
|
||||
@@ -74,13 +85,23 @@ export const GcpIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const ttl = Math.max(Math.floor(expireAt / 1000) - now, 0);
|
||||
try {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const ttl = Math.max(Math.floor(expireAt / 1000) - now, 0);
|
||||
|
||||
const token = await $getToken(providerInputs.serviceAccountEmail, ttl);
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
const token = await $getToken(providerInputs.serviceAccountEmail, ttl);
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
|
||||
return { entityId, data: { SERVICE_ACCOUNT_EMAIL: providerInputs.serviceAccountEmail, TOKEN: token } };
|
||||
return { entityId, data: { SERVICE_ACCOUNT_EMAIL: providerInputs.serviceAccountEmail, TOKEN: token } };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.serviceAccountEmail]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||
@@ -89,10 +110,21 @@ export const GcpIamProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
// To renew a token it must be re-created
|
||||
const data = await create({ inputs, expireAt });
|
||||
try {
|
||||
// To renew a token it must be re-created
|
||||
const data = await create({ inputs, expireAt });
|
||||
|
||||
return { ...data, entityId };
|
||||
return { ...data, entityId };
|
||||
} catch (err) {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.serviceAccountEmail]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -3,6 +3,7 @@ import jwt from "jsonwebtoken";
|
||||
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
|
||||
@@ -89,26 +90,46 @@ export const GithubProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
await $generateGitHubInstallationAccessToken(providerInputs);
|
||||
return true;
|
||||
try {
|
||||
await $generateGitHubInstallationAccessToken(providerInputs);
|
||||
return true;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.privateKey, String(providerInputs.appId), String(providerInputs.installationId)]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown }) => {
|
||||
const { inputs } = data;
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const ghTokenData = await $generateGitHubInstallationAccessToken(providerInputs);
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
try {
|
||||
const ghTokenData = await $generateGitHubInstallationAccessToken(providerInputs);
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
|
||||
return {
|
||||
entityId,
|
||||
data: {
|
||||
TOKEN: ghTokenData.token,
|
||||
EXPIRES_AT: ghTokenData.expires_at,
|
||||
PERMISSIONS: ghTokenData.permissions,
|
||||
REPOSITORY_SELECTION: ghTokenData.repository_selection
|
||||
}
|
||||
};
|
||||
return {
|
||||
entityId,
|
||||
data: {
|
||||
TOKEN: ghTokenData.token,
|
||||
EXPIRES_AT: ghTokenData.expires_at,
|
||||
PERMISSIONS: ghTokenData.permissions,
|
||||
REPOSITORY_SELECTION: ghTokenData.repository_selection
|
||||
}
|
||||
};
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.privateKey, String(providerInputs.appId), String(providerInputs.installationId)]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async () => {
|
||||
|
@@ -2,7 +2,8 @@ import axios, { AxiosError } from "axios";
|
||||
import handlebars from "handlebars";
|
||||
import https from "https";
|
||||
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
@@ -356,8 +357,12 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
errorMessage = (error.response?.data as { message: string }).message;
|
||||
}
|
||||
|
||||
throw new InternalServerError({
|
||||
message: `Failed to validate connection: ${errorMessage}`
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [providerInputs.clusterToken || ""]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -602,8 +607,12 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
errorMessage = (error.response?.data as { message: string }).message;
|
||||
}
|
||||
|
||||
throw new InternalServerError({
|
||||
message: `Failed to create dynamic secret: ${errorMessage}`
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [providerInputs.clusterToken || ""]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -683,50 +692,65 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
|
||||
};
|
||||
|
||||
if (providerInputs.credentialType === KubernetesCredentialType.Dynamic) {
|
||||
const rawUrl =
|
||||
providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? GATEWAY_AUTH_DEFAULT_URL
|
||||
: providerInputs.url || "";
|
||||
try {
|
||||
const rawUrl =
|
||||
providerInputs.authMethod === KubernetesAuthMethod.Gateway
|
||||
? GATEWAY_AUTH_DEFAULT_URL
|
||||
: providerInputs.url || "";
|
||||
|
||||
const url = new URL(rawUrl);
|
||||
const k8sGatewayHost = url.hostname;
|
||||
const k8sPort = url.port ? Number(url.port) : 443;
|
||||
const k8sHost = `${url.protocol}//${url.hostname}`;
|
||||
const url = new URL(rawUrl);
|
||||
const k8sGatewayHost = url.hostname;
|
||||
const k8sPort = url.port ? Number(url.port) : 443;
|
||||
const k8sHost = `${url.protocol}//${url.hostname}`;
|
||||
|
||||
const httpsAgent =
|
||||
providerInputs.ca && providerInputs.sslEnabled
|
||||
? new https.Agent({
|
||||
ca: providerInputs.ca,
|
||||
rejectUnauthorized: true
|
||||
})
|
||||
: undefined;
|
||||
const httpsAgent =
|
||||
providerInputs.ca && providerInputs.sslEnabled
|
||||
? new https.Agent({
|
||||
ca: providerInputs.ca,
|
||||
rejectUnauthorized: true
|
||||
})
|
||||
: undefined;
|
||||
|
||||
if (providerInputs.gatewayId) {
|
||||
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sHost,
|
||||
targetPort: k8sPort,
|
||||
httpsAgent,
|
||||
reviewTokenThroughGateway: true
|
||||
},
|
||||
serviceAccountDynamicCallback
|
||||
);
|
||||
if (providerInputs.gatewayId) {
|
||||
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sHost,
|
||||
targetPort: k8sPort,
|
||||
httpsAgent,
|
||||
reviewTokenThroughGateway: true
|
||||
},
|
||||
serviceAccountDynamicCallback
|
||||
);
|
||||
} else {
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sGatewayHost,
|
||||
targetPort: k8sPort,
|
||||
httpsAgent,
|
||||
reviewTokenThroughGateway: false
|
||||
},
|
||||
serviceAccountDynamicCallback
|
||||
);
|
||||
}
|
||||
} else {
|
||||
await $gatewayProxyWrapper(
|
||||
{
|
||||
gatewayId: providerInputs.gatewayId,
|
||||
targetHost: k8sGatewayHost,
|
||||
targetPort: k8sPort,
|
||||
httpsAgent,
|
||||
reviewTokenThroughGateway: false
|
||||
},
|
||||
serviceAccountDynamicCallback
|
||||
);
|
||||
await serviceAccountDynamicCallback(k8sHost, k8sPort, httpsAgent);
|
||||
}
|
||||
} else {
|
||||
await serviceAccountDynamicCallback(k8sHost, k8sPort, httpsAgent);
|
||||
} catch (error) {
|
||||
let errorMessage = error instanceof Error ? error.message : "Unknown error";
|
||||
if (axios.isAxiosError(error) && (error.response?.data as { message: string })?.message) {
|
||||
errorMessage = (error.response?.data as { message: string }).message;
|
||||
}
|
||||
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [entityId, providerInputs.clusterToken || ""]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -6,6 +6,7 @@ import RE2 from "re2";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
|
||||
@@ -91,8 +92,18 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
return client.connected;
|
||||
try {
|
||||
const client = await $getClient(providerInputs);
|
||||
return client.connected;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.bindpass, providerInputs.binddn]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const executeLdif = async (client: ldapjs.Client, ldif_file: string) => {
|
||||
@@ -205,11 +216,11 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||
const dnRegex = new RE2("^dn:\\s*(.+)", "m");
|
||||
const dnMatch = dnRegex.exec(providerInputs.rotationLdif);
|
||||
const username = dnMatch?.[1];
|
||||
if (!username) throw new BadRequestError({ message: "Username not found from Ldif" });
|
||||
const password = generatePassword();
|
||||
|
||||
if (dnMatch) {
|
||||
const username = dnMatch[1];
|
||||
const password = generatePassword();
|
||||
|
||||
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rotationLdif });
|
||||
|
||||
try {
|
||||
@@ -217,7 +228,11 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||
} catch (err) {
|
||||
throw new BadRequestError({ message: (err as Error).message });
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.binddn, providerInputs.bindpass]
|
||||
});
|
||||
throw new BadRequestError({ message: sanitizedErrorMessage });
|
||||
}
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
@@ -238,7 +253,11 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
const rollbackLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rollbackLdif });
|
||||
await executeLdif(client, rollbackLdif);
|
||||
}
|
||||
throw new BadRequestError({ message: (err as Error).message });
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.binddn, providerInputs.bindpass]
|
||||
});
|
||||
throw new BadRequestError({ message: sanitizedErrorMessage });
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -262,7 +281,11 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||
} catch (err) {
|
||||
throw new BadRequestError({ message: (err as Error).message });
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.binddn, providerInputs.bindpass]
|
||||
});
|
||||
throw new BadRequestError({ message: sanitizedErrorMessage });
|
||||
}
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
@@ -278,7 +301,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
@@ -3,6 +3,8 @@ import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||
@@ -49,19 +51,25 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
url: `v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
params: { itemsPerPage: 1 }
|
||||
})
|
||||
.then(() => true)
|
||||
.catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
try {
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
url: `v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
params: { itemsPerPage: 1 }
|
||||
}).then(() => true);
|
||||
return isConnected;
|
||||
} catch (error) {
|
||||
const errorMessage = (error as AxiosError).response
|
||||
? JSON.stringify((error as AxiosError).response?.data)
|
||||
: (error as Error)?.message;
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [providerInputs.adminPublicKey, providerInputs.adminPrivateKey, providerInputs.groupId]
|
||||
});
|
||||
return isConnected;
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -77,25 +85,39 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
await client({
|
||||
method: "POST",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
data: {
|
||||
roles: providerInputs.roles,
|
||||
scopes: providerInputs.scopes,
|
||||
deleteAfterDate: expiration,
|
||||
username,
|
||||
password,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
try {
|
||||
await client({
|
||||
method: "POST",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
data: {
|
||||
roles: providerInputs.roles,
|
||||
scopes: providerInputs.scopes,
|
||||
deleteAfterDate: expiration,
|
||||
username,
|
||||
password,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
});
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (error) {
|
||||
const errorMessage = (error as AxiosError).response
|
||||
? JSON.stringify((error as AxiosError).response?.data)
|
||||
: (error as Error)?.message;
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [
|
||||
username,
|
||||
password,
|
||||
providerInputs.adminPublicKey,
|
||||
providerInputs.adminPrivateKey,
|
||||
providerInputs.groupId
|
||||
]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
@@ -111,15 +133,23 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
throw err;
|
||||
});
|
||||
if (isExisting) {
|
||||
await client({
|
||||
method: "DELETE",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
try {
|
||||
await client({
|
||||
method: "DELETE",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
});
|
||||
} catch (error) {
|
||||
const errorMessage = (error as AxiosError).response
|
||||
? JSON.stringify((error as AxiosError).response?.data)
|
||||
: (error as Error)?.message;
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [username, providerInputs.adminPublicKey, providerInputs.adminPrivateKey, providerInputs.groupId]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
@@ -132,21 +162,29 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
await client({
|
||||
method: "PATCH",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`,
|
||||
data: {
|
||||
deleteAfterDate: expiration,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username };
|
||||
try {
|
||||
await client({
|
||||
method: "PATCH",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`,
|
||||
data: {
|
||||
deleteAfterDate: expiration,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
});
|
||||
return { entityId: username };
|
||||
} catch (error) {
|
||||
const errorMessage = (error as AxiosError).response
|
||||
? JSON.stringify((error as AxiosError).response?.data)
|
||||
: (error as Error)?.message;
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: errorMessage,
|
||||
tokens: [username, providerInputs.adminPublicKey, providerInputs.adminPrivateKey, providerInputs.groupId]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -2,6 +2,8 @@ import { MongoClient } from "mongodb";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
@@ -51,13 +53,24 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client
|
||||
.db(providerInputs.database)
|
||||
.command({ ping: 1 })
|
||||
.then(() => true);
|
||||
try {
|
||||
const isConnected = await client
|
||||
.db(providerInputs.database)
|
||||
.command({ ping: 1 })
|
||||
.then(() => true);
|
||||
|
||||
await client.close();
|
||||
return isConnected;
|
||||
await client.close();
|
||||
return isConnected;
|
||||
} catch (err) {
|
||||
await client.close();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.password, providerInputs.username, providerInputs.database, providerInputs.host]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
@@ -68,16 +81,27 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
|
||||
const db = client.db(providerInputs.database);
|
||||
try {
|
||||
const db = client.db(providerInputs.database);
|
||||
|
||||
await db.command({
|
||||
createUser: username,
|
||||
pwd: password,
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
await client.close();
|
||||
await db.command({
|
||||
createUser: username,
|
||||
pwd: password,
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
await client.close();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (err) {
|
||||
await client.close();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.password, providerInputs.username, providerInputs.database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
@@ -86,13 +110,24 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const username = entityId;
|
||||
|
||||
const db = client.db(providerInputs.database);
|
||||
await db.command({
|
||||
dropUser: username
|
||||
});
|
||||
await client.close();
|
||||
try {
|
||||
const db = client.db(providerInputs.database);
|
||||
await db.command({
|
||||
dropUser: username
|
||||
});
|
||||
await client.close();
|
||||
|
||||
return { entityId: username };
|
||||
return { entityId: username };
|
||||
} catch (err) {
|
||||
await client.close();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, providerInputs.password, providerInputs.username, providerInputs.database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
|
@@ -3,6 +3,8 @@ import https from "https";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
@@ -110,11 +112,19 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||
|
||||
return infoResponse;
|
||||
try {
|
||||
const connection = await $getClient(providerInputs);
|
||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||
return infoResponse;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.password, providerInputs.username, providerInputs.host]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
@@ -125,26 +135,44 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
const username = generateUsername(usernameTemplate, identity);
|
||||
const password = generatePassword();
|
||||
|
||||
await createRabbitMqUser({
|
||||
axiosInstance: connection,
|
||||
virtualHost: providerInputs.virtualHost,
|
||||
createUser: {
|
||||
password,
|
||||
username,
|
||||
tags: [...(providerInputs.tags ?? []), "infisical-user"]
|
||||
}
|
||||
});
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
try {
|
||||
await createRabbitMqUser({
|
||||
axiosInstance: connection,
|
||||
virtualHost: providerInputs.virtualHost,
|
||||
createUser: {
|
||||
password,
|
||||
username,
|
||||
tags: [...(providerInputs.tags ?? []), "infisical-user"]
|
||||
}
|
||||
});
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||
|
||||
return { entityId };
|
||||
try {
|
||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||
return { entityId };
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [entityId, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
|
@@ -4,6 +4,7 @@ import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
@@ -112,14 +113,27 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const pingResponse = await connection
|
||||
.ping()
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
return pingResponse;
|
||||
let connection;
|
||||
try {
|
||||
connection = await $getClient(providerInputs);
|
||||
const pingResponse = await connection.ping().then(() => true);
|
||||
await connection.quit();
|
||||
return pingResponse;
|
||||
} catch (err) {
|
||||
if (connection) await connection.quit();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [
|
||||
providerInputs.password || "",
|
||||
providerInputs.username,
|
||||
providerInputs.host,
|
||||
String(providerInputs.port)
|
||||
]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -144,10 +158,20 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
|
||||
await executeTransactions(connection, queries);
|
||||
|
||||
await connection.quit();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
try {
|
||||
await executeTransactions(connection, queries);
|
||||
await connection.quit();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (err) {
|
||||
await connection.quit();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.password || "", providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
@@ -159,10 +183,20 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
|
||||
await executeTransactions(connection, queries);
|
||||
|
||||
await connection.quit();
|
||||
return { entityId: username };
|
||||
try {
|
||||
await executeTransactions(connection, queries);
|
||||
await connection.quit();
|
||||
return { entityId: username };
|
||||
} catch (err) {
|
||||
await connection.quit();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, providerInputs.password || "", providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
@@ -176,13 +210,23 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
||||
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await executeTransactions(connection, queries);
|
||||
try {
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await executeTransactions(connection, queries);
|
||||
}
|
||||
await connection.quit();
|
||||
return { entityId: username };
|
||||
} catch (err) {
|
||||
await connection.quit();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, providerInputs.password || "", providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
await connection.quit();
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -4,6 +4,7 @@ import odbc from "odbc";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
@@ -67,25 +68,41 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
const client = await $getClient(providerInputs);
|
||||
let masterClient;
|
||||
let client;
|
||||
try {
|
||||
masterClient = await $getClient(providerInputs, true);
|
||||
client = await $getClient(providerInputs);
|
||||
|
||||
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
|
||||
if (!resultFromSelectedDatabase.version) {
|
||||
if (!resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection, version query failed"
|
||||
});
|
||||
}
|
||||
|
||||
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection (master), version mismatch"
|
||||
});
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
return true;
|
||||
} catch (err) {
|
||||
if (masterClient) await masterClient.close();
|
||||
if (client) await client.close();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.password, providerInputs.username, providerInputs.host, providerInputs.database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection, version query failed"
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection (master), version mismatch"
|
||||
});
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
|
||||
@@ -105,16 +122,26 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
|
||||
|
||||
for await (const query of queries) {
|
||||
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||
// If not done, then the newly created user won't be able to authenticate.
|
||||
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
|
||||
try {
|
||||
for await (const query of queries) {
|
||||
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||
// If not done, then the newly created user won't be able to authenticate.
|
||||
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
} catch (err) {
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.password, providerInputs.username, providerInputs.database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
@@ -140,14 +167,24 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
}
|
||||
}
|
||||
|
||||
for await (const query of queries) {
|
||||
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
|
||||
try {
|
||||
for await (const query of queries) {
|
||||
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
return { entityId: username };
|
||||
} catch (err) {
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, providerInputs.password, providerInputs.username, providerInputs.database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_: unknown, username: string) => {
|
||||
|
@@ -10,6 +10,7 @@ import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
@@ -83,19 +84,26 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const testResult = await new Promise<boolean>((resolve, reject) => {
|
||||
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||
if (err) {
|
||||
reject();
|
||||
}
|
||||
|
||||
resolve(true);
|
||||
try {
|
||||
const client = await $getClient(providerInputs);
|
||||
const testResult = await new Promise<boolean>((resolve, reject) => {
|
||||
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return testResult;
|
||||
return testResult;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.password, providerInputs.username, providerInputs.host]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -119,18 +127,22 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
});
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(
|
||||
new BadRequestError({
|
||||
message: err.message
|
||||
})
|
||||
);
|
||||
}
|
||||
resolve(true);
|
||||
try {
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) return reject(err);
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -142,18 +154,24 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const client = await $getClient(providerInputs);
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(
|
||||
new BadRequestError({
|
||||
message: err.message
|
||||
})
|
||||
);
|
||||
}
|
||||
resolve(true);
|
||||
try {
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -174,16 +192,20 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(
|
||||
new BadRequestError({
|
||||
message: err.message
|
||||
})
|
||||
);
|
||||
reject(err);
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [entityId, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
client.disconnect();
|
||||
}
|
||||
|
@@ -4,6 +4,7 @@ import snowflake from "snowflake-sdk";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
@@ -69,12 +70,10 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
let isValidConnection: boolean;
|
||||
|
||||
let client;
|
||||
try {
|
||||
isValidConnection = await Promise.race([
|
||||
client = await $getClient(providerInputs);
|
||||
const isValidConnection = await Promise.race([
|
||||
client.isValidAsync(),
|
||||
new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000);
|
||||
@@ -82,11 +81,18 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
throw new BadRequestError({ message: "Unable to establish connection - verify credentials" });
|
||||
})
|
||||
]);
|
||||
return isValidConnection;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.password, providerInputs.username, providerInputs.accountId, providerInputs.orgId]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
if (client) client.destroy(noop);
|
||||
}
|
||||
|
||||
return isValidConnection;
|
||||
};
|
||||
|
||||
const create = async (data: {
|
||||
@@ -116,13 +122,19 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
sqlText: creationStatement,
|
||||
complete(err) {
|
||||
if (err) {
|
||||
return reject(new BadRequestError({ name: "CreateLease", message: err.message }));
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error).message,
|
||||
tokens: [username, password, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({ message: `Failed to create lease from provider: ${sanitizedErrorMessage}` });
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
@@ -143,13 +155,19 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
sqlText: revokeStatement,
|
||||
complete(err) {
|
||||
if (err) {
|
||||
return reject(new BadRequestError({ name: "RevokeLease", message: err.message }));
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error).message,
|
||||
tokens: [username, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({ message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}` });
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
@@ -175,13 +193,19 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
sqlText: renewStatement,
|
||||
complete(err) {
|
||||
if (err) {
|
||||
return reject(new BadRequestError({ name: "RenewLease", message: err.message }));
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error).message,
|
||||
tokens: [entityId, providerInputs.password, providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({ message: `Failed to renew lease from provider: ${sanitizedErrorMessage}` });
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
@@ -3,6 +3,8 @@ import knex from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
@@ -212,8 +214,19 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
||||
isConnected = await db.raw(testStatement).then(() => true);
|
||||
await db.destroy();
|
||||
try {
|
||||
isConnected = await db.raw(testStatement).then(() => true);
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [providerInputs.username]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
if (providerInputs.gatewayId) {
|
||||
@@ -233,13 +246,13 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
const { inputs, expireAt, usernameTemplate, identity } = data;
|
||||
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const { database } = providerInputs;
|
||||
const username = generateUsername(providerInputs.client, usernameTemplate, identity);
|
||||
|
||||
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
try {
|
||||
const { database } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
@@ -256,6 +269,14 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
@@ -283,6 +304,14 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
@@ -319,6 +348,14 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [database]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to renew lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
|
@@ -1,6 +1,8 @@
|
||||
import { authenticator } from "otplib";
|
||||
import { HashAlgorithms } from "otplib/core";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
|
||||
@@ -12,62 +14,84 @@ export const TotpProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const validateConnection = async () => {
|
||||
return true;
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
try {
|
||||
await validateProviderInputs(inputs);
|
||||
return true;
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: []
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const create = async (data: { inputs: unknown }) => {
|
||||
const { inputs } = data;
|
||||
try {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
const authenticatorInstance = authenticator.clone();
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
const authenticatorInstance = authenticator.clone();
|
||||
|
||||
let secret: string;
|
||||
let period: number | null | undefined;
|
||||
let digits: number | null | undefined;
|
||||
let algorithm: HashAlgorithms | null | undefined;
|
||||
let secret: string;
|
||||
let period: number | null | undefined;
|
||||
let digits: number | null | undefined;
|
||||
let algorithm: HashAlgorithms | null | undefined;
|
||||
|
||||
if (providerInputs.configType === TotpConfigType.URL) {
|
||||
const urlObj = new URL(providerInputs.url);
|
||||
secret = urlObj.searchParams.get("secret") as string;
|
||||
const periodFromUrl = urlObj.searchParams.get("period");
|
||||
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||
if (providerInputs.configType === TotpConfigType.URL) {
|
||||
const urlObj = new URL(providerInputs.url);
|
||||
secret = urlObj.searchParams.get("secret") as string;
|
||||
const periodFromUrl = urlObj.searchParams.get("period");
|
||||
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||
|
||||
if (periodFromUrl) {
|
||||
period = +periodFromUrl;
|
||||
if (periodFromUrl) {
|
||||
period = +periodFromUrl;
|
||||
}
|
||||
|
||||
if (digitsFromUrl) {
|
||||
digits = +digitsFromUrl;
|
||||
}
|
||||
|
||||
if (algorithmFromUrl) {
|
||||
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||
}
|
||||
} else {
|
||||
secret = providerInputs.secret;
|
||||
period = providerInputs.period;
|
||||
digits = providerInputs.digits;
|
||||
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||
}
|
||||
|
||||
if (digitsFromUrl) {
|
||||
digits = +digitsFromUrl;
|
||||
if (digits) {
|
||||
authenticatorInstance.options = { digits };
|
||||
}
|
||||
|
||||
if (algorithmFromUrl) {
|
||||
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||
if (algorithm) {
|
||||
authenticatorInstance.options = { algorithm };
|
||||
}
|
||||
} else {
|
||||
secret = providerInputs.secret;
|
||||
period = providerInputs.period;
|
||||
digits = providerInputs.digits;
|
||||
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||
}
|
||||
|
||||
if (digits) {
|
||||
authenticatorInstance.options = { digits };
|
||||
}
|
||||
if (period) {
|
||||
authenticatorInstance.options = { step: period };
|
||||
}
|
||||
|
||||
if (algorithm) {
|
||||
authenticatorInstance.options = { algorithm };
|
||||
return {
|
||||
entityId,
|
||||
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||
};
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: []
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
}
|
||||
|
||||
if (period) {
|
||||
authenticatorInstance.options = { step: period };
|
||||
}
|
||||
|
||||
return {
|
||||
entityId,
|
||||
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||
|
@@ -4,6 +4,7 @@ import { z } from "zod";
|
||||
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { sanitizeString } from "@app/lib/fn";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
@@ -275,6 +276,14 @@ export const VerticaProvider = ({ gatewayService }: TVerticaProviderDTO): TDynam
|
||||
await client.raw(trimmedQuery);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, password, providerInputs.username, providerInputs.password]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
if (client) await client.destroy();
|
||||
}
|
||||
@@ -339,6 +348,14 @@ export const VerticaProvider = ({ gatewayService }: TVerticaProviderDTO): TDynam
|
||||
await client.raw(trimmedQuery);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
const sanitizedErrorMessage = sanitizeString({
|
||||
unsanitizedString: (err as Error)?.message,
|
||||
tokens: [username, providerInputs.username, providerInputs.password]
|
||||
});
|
||||
throw new BadRequestError({
|
||||
message: `Failed to revoke lease from provider: ${sanitizedErrorMessage}`
|
||||
});
|
||||
} finally {
|
||||
if (client) await client.destroy();
|
||||
}
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { OrgMembershipStatus, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
@@ -45,7 +46,7 @@ import { searchGroups, testLDAPConfig } from "./ldap-fns";
|
||||
import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal";
|
||||
|
||||
type TLdapConfigServiceFactoryDep = {
|
||||
ldapConfigDAL: Pick<TLdapConfigDALFactory, "create" | "update" | "findOne">;
|
||||
ldapConfigDAL: Pick<TLdapConfigDALFactory, "create" | "update" | "findOne" | "transaction">;
|
||||
ldapGroupMapDAL: Pick<TLdapGroupMapDALFactory, "find" | "create" | "delete" | "findLdapGroupMapsByLdapConfigId">;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||
orgDAL: Pick<
|
||||
@@ -131,6 +132,19 @@ export const ldapConfigServiceFactory = ({
|
||||
orgId
|
||||
});
|
||||
|
||||
const isConnected = await testLDAPConfig({
|
||||
bindDN,
|
||||
bindPass,
|
||||
caCert,
|
||||
url
|
||||
});
|
||||
|
||||
if (!isConnected) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to establish connection to LDAP directory. Please verify that your credentials are correct."
|
||||
});
|
||||
}
|
||||
|
||||
const ldapConfig = await ldapConfigDAL.create({
|
||||
orgId,
|
||||
isActive,
|
||||
@@ -148,6 +162,50 @@ export const ldapConfigServiceFactory = ({
|
||||
return ldapConfig;
|
||||
};
|
||||
|
||||
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean; id?: string }, tx?: Knex) => {
|
||||
const ldapConfig = await ldapConfigDAL.findOne(filter, tx);
|
||||
if (!ldapConfig) {
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find organization LDAP data in organization with ID '${filter.orgId}'`
|
||||
});
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: ldapConfig.orgId
|
||||
});
|
||||
|
||||
let bindDN = "";
|
||||
if (ldapConfig.encryptedLdapBindDN) {
|
||||
bindDN = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindDN }).toString();
|
||||
}
|
||||
|
||||
let bindPass = "";
|
||||
if (ldapConfig.encryptedLdapBindPass) {
|
||||
bindPass = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindPass }).toString();
|
||||
}
|
||||
|
||||
let caCert = "";
|
||||
if (ldapConfig.encryptedLdapCaCertificate) {
|
||||
caCert = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapCaCertificate }).toString();
|
||||
}
|
||||
|
||||
return {
|
||||
id: ldapConfig.id,
|
||||
organization: ldapConfig.orgId,
|
||||
isActive: ldapConfig.isActive,
|
||||
url: ldapConfig.url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: ldapConfig.searchFilter,
|
||||
groupSearchBase: ldapConfig.groupSearchBase,
|
||||
groupSearchFilter: ldapConfig.groupSearchFilter,
|
||||
caCert
|
||||
};
|
||||
};
|
||||
|
||||
const updateLdapCfg = async ({
|
||||
actor,
|
||||
actorId,
|
||||
@@ -202,53 +260,25 @@ export const ldapConfigServiceFactory = ({
|
||||
updateQuery.encryptedLdapCaCertificate = encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
const [ldapConfig] = await ldapConfigDAL.update({ orgId }, updateQuery);
|
||||
const config = await ldapConfigDAL.transaction(async (tx) => {
|
||||
const [updatedLdapCfg] = await ldapConfigDAL.update({ orgId }, updateQuery, tx);
|
||||
const decryptedLdapCfg = await getLdapCfg({ orgId }, tx);
|
||||
|
||||
return ldapConfig;
|
||||
};
|
||||
const isSoftDeletion = !decryptedLdapCfg.url && !decryptedLdapCfg.bindDN && !decryptedLdapCfg.bindPass;
|
||||
if (!isSoftDeletion) {
|
||||
const isConnected = await testLDAPConfig(decryptedLdapCfg);
|
||||
if (!isConnected) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to establish connection to LDAP directory. Please verify that your credentials are correct."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean; id?: string }) => {
|
||||
const ldapConfig = await ldapConfigDAL.findOne(filter);
|
||||
if (!ldapConfig) {
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find organization LDAP data in organization with ID '${filter.orgId}'`
|
||||
});
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: ldapConfig.orgId
|
||||
return updatedLdapCfg;
|
||||
});
|
||||
|
||||
let bindDN = "";
|
||||
if (ldapConfig.encryptedLdapBindDN) {
|
||||
bindDN = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindDN }).toString();
|
||||
}
|
||||
|
||||
let bindPass = "";
|
||||
if (ldapConfig.encryptedLdapBindPass) {
|
||||
bindPass = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindPass }).toString();
|
||||
}
|
||||
|
||||
let caCert = "";
|
||||
if (ldapConfig.encryptedLdapCaCertificate) {
|
||||
caCert = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapCaCertificate }).toString();
|
||||
}
|
||||
|
||||
return {
|
||||
id: ldapConfig.id,
|
||||
organization: ldapConfig.orgId,
|
||||
isActive: ldapConfig.isActive,
|
||||
url: ldapConfig.url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: ldapConfig.searchFilter,
|
||||
groupSearchBase: ldapConfig.groupSearchBase,
|
||||
groupSearchFilter: ldapConfig.groupSearchFilter,
|
||||
caCert
|
||||
};
|
||||
return config;
|
||||
};
|
||||
|
||||
const getLdapCfgWithPermissionCheck = async ({
|
||||
@@ -527,14 +557,13 @@ export const ldapConfigServiceFactory = ({
|
||||
});
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
hasExchangedPrivateKey: true,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
@@ -694,7 +723,17 @@ export const ldapConfigServiceFactory = ({
|
||||
return deletedGroupMap;
|
||||
};
|
||||
|
||||
const testLDAPConnection = async ({ actor, actorId, orgId, actorAuthMethod, actorOrgId }: TTestLdapConnectionDTO) => {
|
||||
const testLDAPConnection = async ({
|
||||
actor,
|
||||
actorId,
|
||||
orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
bindDN,
|
||||
bindPass,
|
||||
caCert,
|
||||
url
|
||||
}: TTestLdapConnectionDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
|
||||
|
||||
@@ -704,11 +743,12 @@ export const ldapConfigServiceFactory = ({
|
||||
message: "Failed to test LDAP connection due to plan restriction. Upgrade plan to test the LDAP connection."
|
||||
});
|
||||
|
||||
const ldapConfig = await getLdapCfg({
|
||||
orgId
|
||||
return testLDAPConfig({
|
||||
bindDN,
|
||||
bindPass,
|
||||
caCert,
|
||||
url
|
||||
});
|
||||
|
||||
return testLDAPConfig(ldapConfig);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -83,6 +83,4 @@ export type TDeleteLdapGroupMapDTO = {
|
||||
ldapGroupMapId: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
export type TTestLdapConnectionDTO = {
|
||||
ldapConfigId: string;
|
||||
} & TOrgPermission;
|
||||
export type TTestLdapConnectionDTO = TOrgPermission & TTestLDAPConfigDTO;
|
||||
|
@@ -31,7 +31,7 @@ export const getDefaultOnPremFeatures = () => {
|
||||
caCrl: false,
|
||||
sshHostGroups: false,
|
||||
enterpriseSecretSyncs: false,
|
||||
enterpriseAppConnections: false,
|
||||
enterpriseAppConnections: true,
|
||||
machineIdentityAuthTemplates: false
|
||||
};
|
||||
};
|
||||
|
@@ -404,7 +404,6 @@ export const oidcConfigServiceFactory = ({
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
@@ -417,7 +416,7 @@ export const oidcConfigServiceFactory = ({
|
||||
organizationName: organization.name,
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
hasExchangedPrivateKey: true,
|
||||
authMethod: AuthMethod.OIDC,
|
||||
authType: UserAliasType.OIDC,
|
||||
isUserCompleted,
|
||||
|
@@ -411,7 +411,6 @@ export const samlConfigServiceFactory = ({
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified);
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
@@ -424,7 +423,7 @@ export const samlConfigServiceFactory = ({
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
authMethod: authProvider,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
hasExchangedPrivateKey: true,
|
||||
authType: UserAliasType.SAML,
|
||||
isUserCompleted,
|
||||
...(relayState
|
||||
|
@@ -2,6 +2,7 @@ import { AxiosError } from "axios";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { AUTH0_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./auth0-client-secret";
|
||||
@@ -13,9 +14,11 @@ import { MYSQL_CREDENTIALS_ROTATION_LIST_OPTION } from "./mysql-credentials";
|
||||
import { OKTA_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./okta-client-secret";
|
||||
import { ORACLEDB_CREDENTIALS_ROTATION_LIST_OPTION } from "./oracledb-credentials";
|
||||
import { POSTGRES_CREDENTIALS_ROTATION_LIST_OPTION } from "./postgres-credentials";
|
||||
import { TSecretRotationV2DALFactory } from "./secret-rotation-v2-dal";
|
||||
import { SecretRotation, SecretRotationStatus } from "./secret-rotation-v2-enums";
|
||||
import { TSecretRotationV2ServiceFactoryDep } from "./secret-rotation-v2-service";
|
||||
import { TSecretRotationV2ServiceFactory, TSecretRotationV2ServiceFactoryDep } from "./secret-rotation-v2-service";
|
||||
import {
|
||||
TSecretRotationRotateSecretsJobPayload,
|
||||
TSecretRotationV2,
|
||||
TSecretRotationV2GeneratedCredentials,
|
||||
TSecretRotationV2ListItem,
|
||||
@@ -74,6 +77,10 @@ export const getNextUtcRotationInterval = (rotateAtUtc?: TSecretRotationV2["rota
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (appCfg.isRotationDevelopmentMode) {
|
||||
if (appCfg.isTestMode) {
|
||||
// if its test mode, it should always rotate
|
||||
return new Date(Date.now() + 365 * 24 * 60 * 60 * 1000); // Current time + 1 year
|
||||
}
|
||||
return getNextUTCMinuteInterval(rotateAtUtc);
|
||||
}
|
||||
|
||||
@@ -263,3 +270,51 @@ export const throwOnImmutableParameterUpdate = (
|
||||
// do nothing
|
||||
}
|
||||
};
|
||||
|
||||
export const rotateSecretsFns = async ({
|
||||
job,
|
||||
secretRotationV2DAL,
|
||||
secretRotationV2Service
|
||||
}: {
|
||||
job: {
|
||||
data: TSecretRotationRotateSecretsJobPayload;
|
||||
id: string;
|
||||
retryCount: number;
|
||||
retryLimit: number;
|
||||
};
|
||||
secretRotationV2DAL: Pick<TSecretRotationV2DALFactory, "findById">;
|
||||
secretRotationV2Service: Pick<TSecretRotationV2ServiceFactory, "rotateGeneratedCredentials">;
|
||||
}) => {
|
||||
const { rotationId, queuedAt, isManualRotation } = job.data;
|
||||
const { retryCount, retryLimit } = job;
|
||||
|
||||
const logDetails = `[rotationId=${rotationId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
|
||||
|
||||
try {
|
||||
const secretRotation = await secretRotationV2DAL.findById(rotationId);
|
||||
|
||||
if (!secretRotation) throw new Error(`Secret rotation ${rotationId} not found`);
|
||||
|
||||
if (!secretRotation.isAutoRotationEnabled) {
|
||||
logger.info(`secretRotationV2Queue: Skipping Rotation - Auto-Rotation Disabled Since Queue ${logDetails}`);
|
||||
}
|
||||
|
||||
if (new Date(secretRotation.lastRotatedAt).getTime() >= new Date(queuedAt).getTime()) {
|
||||
// rotated since being queued, skip rotation
|
||||
logger.info(`secretRotationV2Queue: Skipping Rotation - Rotated Since Queue ${logDetails}`);
|
||||
return;
|
||||
}
|
||||
|
||||
await secretRotationV2Service.rotateGeneratedCredentials(secretRotation, {
|
||||
jobId: job.id,
|
||||
shouldSendNotification: true,
|
||||
isFinalAttempt: retryCount === retryLimit,
|
||||
isManualRotation
|
||||
});
|
||||
|
||||
logger.info(`secretRotationV2Queue: Secrets Rotated ${logDetails}`);
|
||||
} catch (error) {
|
||||
logger.error(error, `secretRotationV2Queue: Failed to Rotate Secrets ${logDetails}`);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
@@ -1,9 +1,12 @@
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
import { ProjectMembershipRole } from "@app/db/schemas";
|
||||
import { TSecretRotationV2DALFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-dal";
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import {
|
||||
getNextUtcRotationInterval,
|
||||
getSecretRotationRotateSecretJobOptions
|
||||
getSecretRotationRotateSecretJobOptions,
|
||||
rotateSecretsFns
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-fns";
|
||||
import { SECRET_ROTATION_NAME_MAP } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-maps";
|
||||
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
|
||||
@@ -63,14 +66,34 @@ export const secretRotationV2QueueServiceFactory = async ({
|
||||
rotation.lastRotatedAt
|
||||
).toISOString()}] [rotateAt=${new Date(rotation.nextRotationAt!).toISOString()}]`
|
||||
);
|
||||
await queueService.queuePg(
|
||||
QueueJobs.SecretRotationV2RotateSecrets,
|
||||
{
|
||||
rotationId: rotation.id,
|
||||
queuedAt: currentTime
|
||||
},
|
||||
getSecretRotationRotateSecretJobOptions(rotation)
|
||||
);
|
||||
|
||||
const data = {
|
||||
rotationId: rotation.id,
|
||||
queuedAt: currentTime
|
||||
} as TSecretRotationRotateSecretsJobPayload;
|
||||
|
||||
if (appCfg.isTestMode) {
|
||||
logger.warn("secretRotationV2Queue: Manually rotating secrets for test mode");
|
||||
await rotateSecretsFns({
|
||||
job: {
|
||||
id: uuidv4(),
|
||||
data,
|
||||
retryCount: 0,
|
||||
retryLimit: 0
|
||||
},
|
||||
secretRotationV2DAL,
|
||||
secretRotationV2Service
|
||||
});
|
||||
} else {
|
||||
await queueService.queuePg(
|
||||
QueueJobs.SecretRotationV2RotateSecrets,
|
||||
{
|
||||
rotationId: rotation.id,
|
||||
queuedAt: currentTime
|
||||
},
|
||||
getSecretRotationRotateSecretJobOptions(rotation)
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(error, "secretRotationV2Queue: Queue Rotations Error:");
|
||||
@@ -87,38 +110,14 @@ export const secretRotationV2QueueServiceFactory = async ({
|
||||
await queueService.startPg<QueueName.SecretRotationV2>(
|
||||
QueueJobs.SecretRotationV2RotateSecrets,
|
||||
async ([job]) => {
|
||||
const { rotationId, queuedAt, isManualRotation } = job.data as TSecretRotationRotateSecretsJobPayload;
|
||||
const { retryCount, retryLimit } = job;
|
||||
|
||||
const logDetails = `[rotationId=${rotationId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
|
||||
|
||||
try {
|
||||
const secretRotation = await secretRotationV2DAL.findById(rotationId);
|
||||
|
||||
if (!secretRotation) throw new Error(`Secret rotation ${rotationId} not found`);
|
||||
|
||||
if (!secretRotation.isAutoRotationEnabled) {
|
||||
logger.info(`secretRotationV2Queue: Skipping Rotation - Auto-Rotation Disabled Since Queue ${logDetails}`);
|
||||
}
|
||||
|
||||
if (new Date(secretRotation.lastRotatedAt).getTime() >= new Date(queuedAt).getTime()) {
|
||||
// rotated since being queued, skip rotation
|
||||
logger.info(`secretRotationV2Queue: Skipping Rotation - Rotated Since Queue ${logDetails}`);
|
||||
return;
|
||||
}
|
||||
|
||||
await secretRotationV2Service.rotateGeneratedCredentials(secretRotation, {
|
||||
jobId: job.id,
|
||||
shouldSendNotification: true,
|
||||
isFinalAttempt: retryCount === retryLimit,
|
||||
isManualRotation
|
||||
});
|
||||
|
||||
logger.info(`secretRotationV2Queue: Secrets Rotated ${logDetails}`);
|
||||
} catch (error) {
|
||||
logger.error(error, `secretRotationV2Queue: Failed to Rotate Secrets ${logDetails}`);
|
||||
throw error;
|
||||
}
|
||||
await rotateSecretsFns({
|
||||
job: {
|
||||
...job,
|
||||
data: job.data as TSecretRotationRotateSecretsJobPayload
|
||||
},
|
||||
secretRotationV2DAL,
|
||||
secretRotationV2Service
|
||||
});
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
|
@@ -58,9 +58,9 @@ export function scanDirectory(inputPath: string, outputPath: string, configPath?
|
||||
});
|
||||
}
|
||||
|
||||
export function scanFile(inputPath: string): Promise<void> {
|
||||
export function scanFile(inputPath: string, configPath?: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const command = `infisical scan --exit-code=77 --source "${inputPath}" --no-git`;
|
||||
const command = `infisical scan --exit-code=77 --source "${inputPath}" --no-git ${configPath ? `-c ${configPath}` : ""}`;
|
||||
exec(command, (error) => {
|
||||
if (error && error.code === 77) {
|
||||
reject(error);
|
||||
@@ -166,6 +166,20 @@ export const parseScanErrorMessage = (err: unknown): string => {
|
||||
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
|
||||
};
|
||||
|
||||
const generateSecretValuePolicyConfiguration = (entropy: number): string => `
|
||||
# Extend default configuration to preserve existing rules
|
||||
[extend]
|
||||
useDefault = true
|
||||
|
||||
# Add custom high-entropy rule
|
||||
[[rules]]
|
||||
id = "high-entropy"
|
||||
description = "Will scan for high entropy secrets"
|
||||
regex = '''.*'''
|
||||
entropy = ${entropy}
|
||||
keywords = []
|
||||
`;
|
||||
|
||||
export const scanSecretPolicyViolations = async (
|
||||
projectId: string,
|
||||
secretPath: string,
|
||||
@@ -188,14 +202,25 @@ export const scanSecretPolicyViolations = async (
|
||||
|
||||
const tempFolder = await createTempFolder();
|
||||
try {
|
||||
const configPath = join(tempFolder, "infisical-scan.toml");
|
||||
|
||||
const secretPolicyConfiguration = generateSecretValuePolicyConfiguration(
|
||||
appCfg.PARAMS_FOLDER_SECRET_DETECTION_ENTROPY
|
||||
);
|
||||
|
||||
await writeTextToFile(configPath, secretPolicyConfiguration);
|
||||
|
||||
const scanPromises = secrets
|
||||
.filter((secret) => !ignoreValues.includes(secret.secretValue))
|
||||
.map(async (secret) => {
|
||||
const secretFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
|
||||
await writeTextToFile(secretFilePath, `${secret.secretKey}=${secret.secretValue}`);
|
||||
const secretKeyValueFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
|
||||
const secretValueOnlyFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
|
||||
await writeTextToFile(secretKeyValueFilePath, `${secret.secretKey}=${secret.secretValue}`);
|
||||
await writeTextToFile(secretValueOnlyFilePath, secret.secretValue);
|
||||
|
||||
try {
|
||||
await scanFile(secretFilePath);
|
||||
await scanFile(secretKeyValueFilePath);
|
||||
await scanFile(secretValueOnlyFilePath, configPath);
|
||||
} catch (error) {
|
||||
throw new BadRequestError({
|
||||
message: `Secret value detected in ${secret.secretKey}. Please add this instead to the designated secrets path in the project.`,
|
||||
|
@@ -79,6 +79,7 @@ const envSchema = z
|
||||
QUEUE_WORKER_PROFILE: z.nativeEnum(QueueWorkerProfile).default(QueueWorkerProfile.All),
|
||||
HTTPS_ENABLED: zodStrBool,
|
||||
ROTATION_DEVELOPMENT_MODE: zodStrBool.default("false").optional(),
|
||||
DAILY_RESOURCE_CLEAN_UP_DEVELOPMENT_MODE: zodStrBool.default("false").optional(),
|
||||
// smtp options
|
||||
SMTP_HOST: zpStr(z.string().optional()),
|
||||
SMTP_IGNORE_TLS: zodStrBool.default("false"),
|
||||
@@ -215,6 +216,7 @@ const envSchema = z
|
||||
return JSON.parse(val) as { secretPath: string; projectId: string }[];
|
||||
})
|
||||
),
|
||||
PARAMS_FOLDER_SECRET_DETECTION_ENTROPY: z.coerce.number().optional().default(3.7),
|
||||
|
||||
// HSM
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
@@ -346,7 +348,11 @@ const envSchema = z
|
||||
isSmtpConfigured: Boolean(data.SMTP_HOST),
|
||||
isRedisConfigured: Boolean(data.REDIS_URL || data.REDIS_SENTINEL_HOSTS),
|
||||
isDevelopmentMode: data.NODE_ENV === "development",
|
||||
isRotationDevelopmentMode: data.NODE_ENV === "development" && data.ROTATION_DEVELOPMENT_MODE,
|
||||
isTestMode: data.NODE_ENV === "test",
|
||||
isRotationDevelopmentMode:
|
||||
(data.NODE_ENV === "development" && data.ROTATION_DEVELOPMENT_MODE) || data.NODE_ENV === "test",
|
||||
isDailyResourceCleanUpDevelopmentMode:
|
||||
data.NODE_ENV === "development" && data.DAILY_RESOURCE_CLEAN_UP_DEVELOPMENT_MODE,
|
||||
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
|
||||
isRedisSentinelMode: Boolean(data.REDIS_SENTINEL_HOSTS),
|
||||
REDIS_SENTINEL_HOSTS: data.REDIS_SENTINEL_HOSTS?.trim()
|
||||
|
@@ -19,3 +19,17 @@ export const prefixWithSlash = (str: string) => {
|
||||
const vowelRegex = new RE2(/^[aeiou]/i);
|
||||
|
||||
export const startsWithVowel = (str: string) => vowelRegex.test(str);
|
||||
|
||||
const pickWordsRegex = new RE2(/(\W+)/);
|
||||
export const sanitizeString = (dto: { unsanitizedString: string; tokens: string[] }) => {
|
||||
const words = dto.unsanitizedString.split(pickWordsRegex);
|
||||
|
||||
const redactionSet = new Set(dto.tokens.filter(Boolean));
|
||||
const sanitizedWords = words.map((el) => {
|
||||
if (redactionSet.has(el)) {
|
||||
return "[REDACTED]";
|
||||
}
|
||||
return el;
|
||||
});
|
||||
return sanitizedWords.join("");
|
||||
};
|
||||
|
@@ -20,7 +20,10 @@ export const triggerWorkflowIntegrationNotification = async (dto: TTriggerWorkfl
|
||||
const slackConfig = await projectSlackConfigDAL.getIntegrationDetailsByProject(projectId);
|
||||
|
||||
if (slackConfig) {
|
||||
if (notification.type === TriggerFeature.ACCESS_REQUEST) {
|
||||
if (
|
||||
notification.type === TriggerFeature.ACCESS_REQUEST ||
|
||||
notification.type === TriggerFeature.ACCESS_REQUEST_UPDATED
|
||||
) {
|
||||
const targetChannelIds = slackConfig.accessRequestChannels?.split(", ") || [];
|
||||
if (targetChannelIds.length && slackConfig.isAccessRequestNotificationEnabled) {
|
||||
await sendSlackNotification({
|
||||
@@ -50,7 +53,10 @@ export const triggerWorkflowIntegrationNotification = async (dto: TTriggerWorkfl
|
||||
}
|
||||
|
||||
if (microsoftTeamsConfig) {
|
||||
if (notification.type === TriggerFeature.ACCESS_REQUEST) {
|
||||
if (
|
||||
notification.type === TriggerFeature.ACCESS_REQUEST ||
|
||||
notification.type === TriggerFeature.ACCESS_REQUEST_UPDATED
|
||||
) {
|
||||
if (microsoftTeamsConfig.isAccessRequestNotificationEnabled && microsoftTeamsConfig.accessRequestChannels) {
|
||||
const { success, data } = validateMicrosoftTeamsChannelsSchema.safeParse(
|
||||
microsoftTeamsConfig.accessRequestChannels
|
||||
|
@@ -6,7 +6,8 @@ import { TProjectSlackConfigDALFactory } from "@app/services/slack/project-slack
|
||||
|
||||
export enum TriggerFeature {
|
||||
SECRET_APPROVAL = "secret-approval",
|
||||
ACCESS_REQUEST = "access-request"
|
||||
ACCESS_REQUEST = "access-request",
|
||||
ACCESS_REQUEST_UPDATED = "access-request-updated"
|
||||
}
|
||||
|
||||
export type TNotification =
|
||||
@@ -34,6 +35,22 @@ export type TNotification =
|
||||
approvalUrl: string;
|
||||
note?: string;
|
||||
};
|
||||
}
|
||||
| {
|
||||
type: TriggerFeature.ACCESS_REQUEST_UPDATED;
|
||||
payload: {
|
||||
requesterFullName: string;
|
||||
requesterEmail: string;
|
||||
isTemporary: boolean;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
projectName: string;
|
||||
permissions: string[];
|
||||
approvalUrl: string;
|
||||
editNote?: string;
|
||||
editorFullName?: string;
|
||||
editorEmail?: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type TTriggerWorkflowNotificationDTO = {
|
||||
|
@@ -45,6 +45,8 @@ import { groupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { userGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||
import { identityAuthTemplateDALFactory } from "@app/ee/services/identity-auth-template/identity-auth-template-dal";
|
||||
import { identityAuthTemplateServiceFactory } from "@app/ee/services/identity-auth-template/identity-auth-template-service";
|
||||
import { identityProjectAdditionalPrivilegeDALFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-dal";
|
||||
import { identityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { identityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||
@@ -179,8 +181,6 @@ import { identityAccessTokenDALFactory } from "@app/services/identity-access-tok
|
||||
import { identityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
import { identityAliCloudAuthDALFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-dal";
|
||||
import { identityAliCloudAuthServiceFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-service";
|
||||
import { identityAuthTemplateDALFactory } from "@app/ee/services/identity-auth-template/identity-auth-template-dal";
|
||||
import { identityAuthTemplateServiceFactory } from "@app/ee/services/identity-auth-template/identity-auth-template-service";
|
||||
import { identityAwsAuthDALFactory } from "@app/services/identity-aws-auth/identity-aws-auth-dal";
|
||||
import { identityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||
import { identityAzureAuthDALFactory } from "@app/services/identity-azure-auth/identity-azure-auth-dal";
|
||||
@@ -849,8 +849,6 @@ export const registerRoutes = async (
|
||||
projectDAL,
|
||||
permissionService,
|
||||
projectUserMembershipRoleDAL,
|
||||
projectBotDAL,
|
||||
projectKeyDAL,
|
||||
projectMembershipDAL
|
||||
});
|
||||
|
||||
@@ -1974,7 +1972,7 @@ export const registerRoutes = async (
|
||||
|
||||
await telemetryQueue.startTelemetryCheck();
|
||||
await telemetryQueue.startAggregatedEventsJob();
|
||||
await dailyResourceCleanUp.startCleanUp();
|
||||
await dailyResourceCleanUp.init();
|
||||
await dailyReminderQueueService.startDailyRemindersJob();
|
||||
await dailyReminderQueueService.startSecretReminderMigrationJob();
|
||||
await dailyExpiringPkiItemAlert.startSendingAlerts();
|
||||
|
@@ -583,16 +583,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
email: z.string().email().trim(),
|
||||
password: z.string().trim(),
|
||||
firstName: z.string().trim(),
|
||||
lastName: z.string().trim().optional(),
|
||||
protectedKey: z.string().trim(),
|
||||
protectedKeyIV: z.string().trim(),
|
||||
protectedKeyTag: z.string().trim(),
|
||||
publicKey: z.string().trim(),
|
||||
encryptedPrivateKey: z.string().trim(),
|
||||
encryptedPrivateKeyIV: z.string().trim(),
|
||||
encryptedPrivateKeyTag: z.string().trim(),
|
||||
salt: z.string().trim(),
|
||||
verifier: z.string().trim()
|
||||
lastName: z.string().trim().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@@ -2,6 +2,7 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretFoldersSchema, SecretImportsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { RemindersSchema } from "@app/db/schemas/reminders";
|
||||
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ProjectPermissionSecretActions } from "@app/ee/services/permission/project-permission";
|
||||
import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema";
|
||||
@@ -628,7 +629,10 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
secretValueHidden: z.boolean(),
|
||||
secretPath: z.string().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.optional(),
|
||||
tags: SanitizedTagSchema.array().optional()
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
reminder: RemindersSchema.extend({
|
||||
recipients: z.string().array().optional()
|
||||
}).nullish()
|
||||
})
|
||||
.array()
|
||||
.optional(),
|
||||
@@ -706,7 +710,11 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
let imports: Awaited<ReturnType<typeof server.services.secretImport.getImports>> | undefined;
|
||||
let folders: Awaited<ReturnType<typeof server.services.folder.getFolders>> | undefined;
|
||||
let secrets: Awaited<ReturnType<typeof server.services.secret.getSecretsRaw>>["secrets"] | undefined;
|
||||
let secrets:
|
||||
| (Awaited<ReturnType<typeof server.services.secret.getSecretsRaw>>["secrets"][number] & {
|
||||
reminder: Awaited<ReturnType<typeof server.services.reminder.getRemindersForDashboard>>[string] | null;
|
||||
})[]
|
||||
| undefined;
|
||||
let dynamicSecrets: Awaited<ReturnType<typeof server.services.dynamicSecret.listDynamicSecretsByEnv>> | undefined;
|
||||
let secretRotations:
|
||||
| Awaited<ReturnType<typeof server.services.secretRotationV2.getDashboardSecretRotations>>
|
||||
@@ -904,7 +912,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
if (remainingLimit > 0 && totalSecretCount > adjustedOffset) {
|
||||
secrets = (
|
||||
const rawSecrets = (
|
||||
await server.services.secret.getSecretsRaw({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
@@ -925,6 +933,15 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
includeMetadataInSearch: true
|
||||
})
|
||||
).secrets;
|
||||
|
||||
const reminders = await server.services.reminder.getRemindersForDashboard(
|
||||
rawSecrets.map((secret) => secret.id)
|
||||
);
|
||||
|
||||
secrets = rawSecrets.map((secret) => ({
|
||||
...secret,
|
||||
reminder: reminders[secret.id] ?? null
|
||||
}));
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
|
@@ -45,7 +45,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(FOLDERS.CREATE.path)
|
||||
.optional(),
|
||||
// backward compatiability with cli
|
||||
// backward compatibility with cli
|
||||
directory: z
|
||||
.string()
|
||||
.trim()
|
||||
@@ -58,7 +58,9 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
folder: SecretFoldersSchema
|
||||
folder: SecretFoldersSchema.extend({
|
||||
path: z.string()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -130,7 +132,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(FOLDERS.UPDATE.path)
|
||||
.optional(),
|
||||
// backward compatiability with cli
|
||||
// backward compatibility with cli
|
||||
directory: z
|
||||
.string()
|
||||
.trim()
|
||||
@@ -143,7 +145,9 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
folder: SecretFoldersSchema
|
||||
folder: SecretFoldersSchema.extend({
|
||||
path: z.string()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -359,7 +363,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(FOLDERS.LIST.path)
|
||||
.optional(),
|
||||
// backward compatiability with cli
|
||||
// backward compatibility with cli
|
||||
directory: z
|
||||
.string()
|
||||
.trim()
|
||||
|
@@ -283,6 +283,14 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Projects],
|
||||
description: "Get project details by slug",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
slug: slugSchema({ max: 36 }).describe("The slug of the project to get.")
|
||||
}),
|
||||
|
@@ -19,7 +19,7 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const data = await req.file({
|
||||
limits: {
|
||||
@@ -69,7 +69,7 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
|
||||
mappingType: z.nativeEnum(VaultMappingType)
|
||||
})
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
await server.services.migration.importVaultData({
|
||||
actorId: req.permission.id,
|
||||
|
@@ -44,7 +44,8 @@ const getConnectionConfig = ({
|
||||
? {
|
||||
trustServerCertificate: !sslRejectUnauthorized,
|
||||
encrypt: true,
|
||||
cryptoCredentialsDetails: sslCertificate ? { ca: sslCertificate } : {}
|
||||
cryptoCredentialsDetails: sslCertificate ? { ca: sslCertificate } : {},
|
||||
servername: host
|
||||
}
|
||||
: { encrypt: false }
|
||||
};
|
||||
|
@@ -148,9 +148,15 @@ export const authLoginServiceFactory = ({
|
||||
|
||||
if (organizationId) {
|
||||
const org = await orgDAL.findById(organizationId);
|
||||
if (org && org.userTokenExpiration) {
|
||||
tokenSessionExpiresIn = getMinExpiresIn(cfg.JWT_AUTH_LIFETIME, org.userTokenExpiration);
|
||||
refreshTokenExpiresIn = org.userTokenExpiration;
|
||||
if (org) {
|
||||
await orgMembershipDAL.update(
|
||||
{ userId: user.id, orgId: org.id },
|
||||
{ lastLoginAuthMethod: authMethod, lastLoginTime: new Date() }
|
||||
);
|
||||
if (org.userTokenExpiration) {
|
||||
tokenSessionExpiresIn = getMinExpiresIn(cfg.JWT_AUTH_LIFETIME, org.userTokenExpiration);
|
||||
refreshTokenExpiresIn = org.userTokenExpiration;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -818,7 +824,6 @@ export const authLoginServiceFactory = ({
|
||||
}
|
||||
}
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const isUserCompleted = user.isAccepted;
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
@@ -829,7 +834,7 @@ export const authLoginServiceFactory = ({
|
||||
isEmailVerified: user.isEmailVerified,
|
||||
firstName: user.firstName,
|
||||
lastName: user.lastName,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
hasExchangedPrivateKey: true,
|
||||
authMethod,
|
||||
isUserCompleted,
|
||||
...(callbackPort
|
||||
@@ -874,8 +879,7 @@ export const authLoginServiceFactory = ({
|
||||
const userEnc =
|
||||
usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === email) : usersByUsername?.[0];
|
||||
|
||||
if (!userEnc?.serverEncryptedPrivateKey)
|
||||
throw new BadRequestError({ message: "Key handoff incomplete. Please try logging in again." });
|
||||
if (!userEnc) throw new BadRequestError({ message: "User encryption not found" });
|
||||
|
||||
const token = await generateUserTokens({
|
||||
user: { ...userEnc, id: userEnc.userId },
|
||||
|
@@ -32,8 +32,8 @@ import {
|
||||
keyAlgorithmToAlgCfg
|
||||
} from "../certificate-authority-fns";
|
||||
import { TCertificateAuthoritySecretDALFactory } from "../certificate-authority-secret-dal";
|
||||
import { TIssueCertWithTemplateDTO } from "./internal-certificate-authority-types";
|
||||
import { validateAndMapAltNameType } from "../certificate-authority-validators";
|
||||
import { TIssueCertWithTemplateDTO } from "./internal-certificate-authority-types";
|
||||
|
||||
type TInternalCertificateAuthorityFnsDeps = {
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findByIdWithAssociatedCa" | "findById">;
|
||||
|
@@ -52,6 +52,7 @@ import {
|
||||
} from "../certificate-authority-fns";
|
||||
import { TCertificateAuthorityQueueFactory } from "../certificate-authority-queue";
|
||||
import { TCertificateAuthoritySecretDALFactory } from "../certificate-authority-secret-dal";
|
||||
import { validateAndMapAltNameType } from "../certificate-authority-validators";
|
||||
import { TInternalCertificateAuthorityDALFactory } from "./internal-certificate-authority-dal";
|
||||
import {
|
||||
TCreateCaDTO,
|
||||
@@ -68,7 +69,6 @@ import {
|
||||
TSignIntermediateDTO,
|
||||
TUpdateCaDTO
|
||||
} from "./internal-certificate-authority-types";
|
||||
import { validateAndMapAltNameType } from "../certificate-authority-validators";
|
||||
|
||||
type TInternalCertificateAuthorityServiceFactoryDep = {
|
||||
certificateAuthorityDAL: Pick<
|
||||
|
@@ -254,29 +254,26 @@ export const transformToInfisicalFormatNamespaceToProjects = (
|
||||
let currentFolderId: string | undefined;
|
||||
let currentPath = "";
|
||||
|
||||
if (path.includes("/")) {
|
||||
const pathParts = path.split("/").filter(Boolean);
|
||||
const pathParts = path.split("/").filter(Boolean);
|
||||
const folderParts = pathParts;
|
||||
|
||||
const folderParts = pathParts;
|
||||
// create nested folder structure for the entire path
|
||||
for (const folderName of folderParts) {
|
||||
currentPath = currentPath ? `${currentPath}/${folderName}` : folderName;
|
||||
const folderKey = `${namespace}:${mount}:${currentPath}`;
|
||||
|
||||
// create nested folder structure for the entire path
|
||||
for (const folderName of folderParts) {
|
||||
currentPath = currentPath ? `${currentPath}/${folderName}` : folderName;
|
||||
const folderKey = `${namespace}:${mount}:${currentPath}`;
|
||||
|
||||
if (!folderMap.has(folderKey)) {
|
||||
const folderId = uuidv4();
|
||||
folderMap.set(folderKey, folderId);
|
||||
folders.push({
|
||||
id: folderId,
|
||||
name: folderName,
|
||||
environmentId,
|
||||
parentFolderId: currentFolderId || environmentId
|
||||
});
|
||||
currentFolderId = folderId;
|
||||
} else {
|
||||
currentFolderId = folderMap.get(folderKey)!;
|
||||
}
|
||||
if (!folderMap.has(folderKey)) {
|
||||
const folderId = uuidv4();
|
||||
folderMap.set(folderKey, folderId);
|
||||
folders.push({
|
||||
id: folderId,
|
||||
name: folderName,
|
||||
environmentId,
|
||||
parentFolderId: currentFolderId || environmentId
|
||||
});
|
||||
currentFolderId = folderId;
|
||||
} else {
|
||||
currentFolderId = folderMap.get(folderKey)!;
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -38,7 +38,7 @@ type TIdentityAliCloudAuthServiceFactoryDep = {
|
||||
TIdentityAliCloudAuthDALFactory,
|
||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||
>;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
};
|
||||
@@ -64,6 +64,8 @@ export const identityAliCloudAuthServiceFactory = ({
|
||||
identityId: identityAliCloudAuth.identityId
|
||||
});
|
||||
|
||||
if (!identityMembershipOrg) throw new UnauthorizedError({ message: "Identity not attached to a organization" });
|
||||
|
||||
const requestUrl = new URL("https://sts.aliyuncs.com");
|
||||
|
||||
for (const key of Object.keys(params)) {
|
||||
@@ -87,6 +89,14 @@ export const identityAliCloudAuthServiceFactory = ({
|
||||
|
||||
// Generate the token
|
||||
const identityAccessToken = await identityAliCloudAuthDAL.transaction(async (tx) => {
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.ALICLOUD_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityAliCloudAuth.identityId,
|
||||
|
@@ -36,7 +36,7 @@ import {
|
||||
type TIdentityAwsAuthServiceFactoryDep = {
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
};
|
||||
@@ -91,6 +91,7 @@ export const identityAwsAuthServiceFactory = ({
|
||||
}
|
||||
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityAwsAuth.identityId });
|
||||
if (!identityMembershipOrg) throw new UnauthorizedError({ message: "Identity not attached to a organization" });
|
||||
|
||||
const headers: TAwsGetCallerIdentityHeaders = JSON.parse(Buffer.from(iamRequestHeaders, "base64").toString());
|
||||
const body: string = Buffer.from(iamRequestBody, "base64").toString();
|
||||
@@ -152,6 +153,14 @@ export const identityAwsAuthServiceFactory = ({
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityAwsAuthDAL.transaction(async (tx) => {
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.AWS_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityAwsAuth.identityId,
|
||||
|
@@ -33,7 +33,7 @@ type TIdentityAzureAuthServiceFactoryDep = {
|
||||
TIdentityAzureAuthDALFactory,
|
||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||
>;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@@ -80,6 +80,14 @@ export const identityAzureAuthServiceFactory = ({
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.AZURE_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityAzureAuth.identityId,
|
||||
|
@@ -31,7 +31,7 @@ import {
|
||||
|
||||
type TIdentityGcpAuthServiceFactoryDep = {
|
||||
identityGcpAuthDAL: Pick<TIdentityGcpAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@@ -119,6 +119,14 @@ export const identityGcpAuthServiceFactory = ({
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityGcpAuthDAL.transaction(async (tx) => {
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.GCP_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityGcpAuth.identityId,
|
||||
|
@@ -43,7 +43,7 @@ import {
|
||||
|
||||
type TIdentityJwtAuthServiceFactoryDep = {
|
||||
identityJwtAuthDAL: TIdentityJwtAuthDALFactory;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@@ -209,6 +209,14 @@ export const identityJwtAuthServiceFactory = ({
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityJwtAuthDAL.transaction(async (tx) => {
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.JWT_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityJwtAuth.identityId,
|
||||
|
@@ -49,7 +49,7 @@ type TIdentityKubernetesAuthServiceFactoryDep = {
|
||||
"create" | "findOne" | "transaction" | "updateById" | "delete"
|
||||
>;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById" | "updateById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
@@ -380,6 +380,14 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.KUBERNETES_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityKubernetesAuth.identityId,
|
||||
|
@@ -44,7 +44,7 @@ type TIdentityLdapAuthServiceFactoryDep = {
|
||||
TIdentityLdapAuthDALFactory,
|
||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||
>;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
kmsService: TKmsServiceFactory;
|
||||
@@ -144,6 +144,14 @@ export const identityLdapAuthServiceFactory = ({
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityLdapAuthDAL.transaction(async (tx) => {
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.LDAP_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityLdapAuth.identityId,
|
||||
|
@@ -36,7 +36,7 @@ import {
|
||||
type TIdentityOciAuthServiceFactoryDep = {
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityOciAuthDAL: Pick<TIdentityOciAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
};
|
||||
@@ -57,6 +57,7 @@ export const identityOciAuthServiceFactory = ({
|
||||
}
|
||||
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityOciAuth.identityId });
|
||||
if (!identityMembershipOrg) throw new UnauthorizedError({ message: "Identity not attached to a organization" });
|
||||
|
||||
// Validate OCI host format. Ensures that the host is in "identity.<region>.oraclecloud.com" format.
|
||||
if (!headers.host || !new RE2("^identity\\.([a-z]{2}-[a-z]+-[1-9])\\.oraclecloud\\.com$").test(headers.host)) {
|
||||
@@ -91,6 +92,14 @@ export const identityOciAuthServiceFactory = ({
|
||||
|
||||
// Generate the token
|
||||
const identityAccessToken = await identityOciAuthDAL.transaction(async (tx) => {
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.OCI_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityOciAuth.identityId,
|
||||
|
@@ -43,7 +43,7 @@ import {
|
||||
|
||||
type TIdentityOidcAuthServiceFactoryDep = {
|
||||
identityOidcAuthDAL: TIdentityOidcAuthDALFactory;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@@ -178,6 +178,14 @@ export const identityOidcAuthServiceFactory = ({
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.OIDC_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityOidcAuth.identityId,
|
||||
|
@@ -30,7 +30,7 @@ type TIdentityTlsCertAuthServiceFactoryDep = {
|
||||
TIdentityTlsCertAuthDALFactory,
|
||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||
>;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
@@ -118,6 +118,14 @@ export const identityTlsCertAuthServiceFactory = ({
|
||||
|
||||
// Generate the token
|
||||
const identityAccessToken = await identityTlsCertAuthDAL.transaction(async (tx) => {
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.TLS_CERT_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityTlsCertAuth.identityId,
|
||||
|
@@ -35,7 +35,7 @@ type TIdentityTokenAuthServiceFactoryDep = {
|
||||
TIdentityTokenAuthDALFactory,
|
||||
"transaction" | "create" | "findOne" | "updateById" | "delete"
|
||||
>;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "updateById">;
|
||||
identityAccessTokenDAL: Pick<
|
||||
TIdentityAccessTokenDALFactory,
|
||||
"create" | "find" | "update" | "findById" | "findOne" | "updateById" | "delete"
|
||||
@@ -345,6 +345,14 @@ export const identityTokenAuthServiceFactory = ({
|
||||
const identityTokenAuth = await identityTokenAuthDAL.findOne({ identityId });
|
||||
|
||||
const identityAccessToken = await identityTokenAuthDAL.transaction(async (tx) => {
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.TOKEN_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityTokenAuth.identityId,
|
||||
|
@@ -59,6 +59,11 @@ export const identityUaServiceFactory = ({
|
||||
}
|
||||
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityUa.identityId });
|
||||
if (!identityMembershipOrg) {
|
||||
throw new NotFoundError({
|
||||
message: "No identity with the org membership was found"
|
||||
});
|
||||
}
|
||||
|
||||
checkIPAgainstBlocklist({
|
||||
ipAddress: ip,
|
||||
@@ -127,7 +132,14 @@ export const identityUaServiceFactory = ({
|
||||
|
||||
const identityAccessToken = await identityUaDAL.transaction(async (tx) => {
|
||||
const uaClientSecretDoc = await identityUaClientSecretDAL.incrementUsage(validClientSecretInfo!.id, tx);
|
||||
|
||||
await identityOrgMembershipDAL.updateById(
|
||||
identityMembershipOrg.id,
|
||||
{
|
||||
lastLoginAuthMethod: IdentityAuthMethod.UNIVERSAL_AUTH,
|
||||
lastLoginTime: new Date()
|
||||
},
|
||||
tx
|
||||
);
|
||||
const newToken = await identityAccessTokenDAL.create(
|
||||
{
|
||||
identityId: identityUa.identityId,
|
||||
|
@@ -254,6 +254,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
||||
db.ref("role").withSchema("paginatedIdentity"),
|
||||
db.ref("roleId").withSchema("paginatedIdentity"),
|
||||
db.ref("orgId").withSchema("paginatedIdentity"),
|
||||
db.ref("lastLoginAuthMethod").withSchema("paginatedIdentity"),
|
||||
db.ref("lastLoginTime").withSchema("paginatedIdentity"),
|
||||
db.ref("createdAt").withSchema("paginatedIdentity"),
|
||||
db.ref("updatedAt").withSchema("paginatedIdentity"),
|
||||
db.ref("identityId").withSchema("paginatedIdentity").as("identityId"),
|
||||
@@ -319,7 +321,9 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
||||
ldapId,
|
||||
tlsCertId,
|
||||
createdAt,
|
||||
updatedAt
|
||||
updatedAt,
|
||||
lastLoginAuthMethod,
|
||||
lastLoginTime
|
||||
}) => ({
|
||||
role,
|
||||
roleId,
|
||||
@@ -328,6 +332,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
||||
orgId,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
lastLoginAuthMethod,
|
||||
lastLoginTime,
|
||||
customRole: roleId
|
||||
? {
|
||||
id: crId,
|
||||
@@ -497,6 +503,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
||||
db.ref("orgId").withSchema(TableName.IdentityOrgMembership),
|
||||
db.ref("createdAt").withSchema(TableName.IdentityOrgMembership),
|
||||
db.ref("updatedAt").withSchema(TableName.IdentityOrgMembership),
|
||||
db.ref("lastLoginAuthMethod").withSchema(TableName.IdentityOrgMembership),
|
||||
db.ref("lastLoginTime").withSchema(TableName.IdentityOrgMembership),
|
||||
db.ref("identityId").withSchema(TableName.IdentityOrgMembership).as("identityId"),
|
||||
db.ref("name").withSchema(TableName.Identity).as("identityName"),
|
||||
db.ref("hasDeleteProtection").withSchema(TableName.Identity),
|
||||
@@ -531,10 +539,10 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
||||
} else if (orderBy === OrgIdentityOrderBy.Role) {
|
||||
void query.orderByRaw(
|
||||
`
|
||||
CASE
|
||||
WHEN ??.role = ?
|
||||
THEN ??.slug
|
||||
ELSE ??.role
|
||||
CASE
|
||||
WHEN ??.role = ?
|
||||
THEN ??.slug
|
||||
ELSE ??.role
|
||||
END ?
|
||||
`,
|
||||
[
|
||||
@@ -576,7 +584,9 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
||||
tokenId,
|
||||
ldapId,
|
||||
createdAt,
|
||||
updatedAt
|
||||
updatedAt,
|
||||
lastLoginTime,
|
||||
lastLoginAuthMethod
|
||||
}) => ({
|
||||
role,
|
||||
roleId,
|
||||
@@ -586,6 +596,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
|
||||
orgId,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
lastLoginTime,
|
||||
lastLoginAuthMethod,
|
||||
customRole: roleId
|
||||
? {
|
||||
id: crId,
|
||||
|
@@ -462,6 +462,54 @@ export const buildTeamsPayload = (notification: TNotification) => {
|
||||
};
|
||||
}
|
||||
|
||||
case TriggerFeature.ACCESS_REQUEST_UPDATED: {
|
||||
const { payload } = notification;
|
||||
|
||||
const adaptiveCard = {
|
||||
type: "AdaptiveCard",
|
||||
$schema: "http://adaptivecards.io/schemas/adaptive-card.json",
|
||||
version: "1.5",
|
||||
body: [
|
||||
{
|
||||
type: "TextBlock",
|
||||
text: "Updated access approval request pending for review",
|
||||
weight: "Bolder",
|
||||
size: "Large"
|
||||
},
|
||||
{
|
||||
type: "TextBlock",
|
||||
text: `${payload.editorFullName} (${payload.editorEmail}) has updated the ${
|
||||
payload.isTemporary ? "temporary" : "permanent"
|
||||
} access request from ${payload.requesterFullName} (${payload.requesterEmail}) to ${payload.secretPath} in the ${payload.environment} environment of ${payload.projectName}.`,
|
||||
wrap: true
|
||||
},
|
||||
{
|
||||
type: "TextBlock",
|
||||
text: `The following permissions are requested: ${payload.permissions.join(", ")}`,
|
||||
wrap: true
|
||||
},
|
||||
payload.editNote
|
||||
? {
|
||||
type: "TextBlock",
|
||||
text: `**Editor Note**: ${payload.editNote}`,
|
||||
wrap: true
|
||||
}
|
||||
: null
|
||||
].filter(Boolean),
|
||||
actions: [
|
||||
{
|
||||
type: "Action.OpenUrl",
|
||||
title: "View request in Infisical",
|
||||
url: payload.approvalUrl
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
return {
|
||||
adaptiveCard
|
||||
};
|
||||
}
|
||||
|
||||
default: {
|
||||
throw new BadRequestError({
|
||||
message: "Teams notification type not supported."
|
||||
|
@@ -6,8 +6,6 @@ import { TPermissionServiceFactory } from "@app/ee/services/permission/permissio
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
import { TProjectBotDALFactory } from "../project-bot/project-bot-dal";
|
||||
import { TProjectKeyDALFactory } from "../project-key/project-key-dal";
|
||||
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
|
||||
import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||
@@ -20,8 +18,6 @@ type TOrgAdminServiceFactoryDep = {
|
||||
TProjectMembershipDALFactory,
|
||||
"findOne" | "create" | "transaction" | "delete" | "findAllProjectMembers"
|
||||
>;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "findLatestProjectKey" | "create">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
projectUserMembershipRoleDAL: Pick<TProjectUserMembershipRoleDALFactory, "create" | "delete">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
};
|
||||
@@ -32,8 +28,6 @@ export const orgAdminServiceFactory = ({
|
||||
permissionService,
|
||||
projectDAL,
|
||||
projectMembershipDAL,
|
||||
projectKeyDAL,
|
||||
projectBotDAL,
|
||||
projectUserMembershipRoleDAL,
|
||||
smtpService
|
||||
}: TOrgAdminServiceFactoryDep) => {
|
||||
@@ -119,28 +113,6 @@ export const orgAdminServiceFactory = ({
|
||||
return { isExistingMember: true, membership: projectMembership };
|
||||
}
|
||||
|
||||
// missing membership thus add admin back as admin to project
|
||||
const ghostUser = await projectDAL.findProjectGhostUser(projectId);
|
||||
if (!ghostUser) {
|
||||
throw new NotFoundError({
|
||||
message: `Project owner of project with ID '${projectId}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, projectId);
|
||||
if (!ghostUserLatestKey) {
|
||||
throw new NotFoundError({
|
||||
message: `Project owner's latest key of project with ID '${projectId}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
const bot = await projectBotDAL.findOne({ projectId });
|
||||
if (!bot) {
|
||||
throw new NotFoundError({
|
||||
message: `Project bot for project with ID '${projectId}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
const updatedMembership = await projectMembershipDAL.transaction(async (tx) => {
|
||||
const newProjectMembership = await projectMembershipDAL.create(
|
||||
{
|
||||
|
@@ -32,6 +32,8 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
|
||||
db.ref("roleId").withSchema(TableName.OrgMembership),
|
||||
db.ref("status").withSchema(TableName.OrgMembership),
|
||||
db.ref("isActive").withSchema(TableName.OrgMembership),
|
||||
db.ref("lastLoginAuthMethod").withSchema(TableName.OrgMembership),
|
||||
db.ref("lastLoginTime").withSchema(TableName.OrgMembership),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
@@ -64,7 +66,9 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
|
||||
role,
|
||||
status,
|
||||
isActive,
|
||||
inviteEmail
|
||||
inviteEmail,
|
||||
lastLoginAuthMethod,
|
||||
lastLoginTime
|
||||
}) => ({
|
||||
roleId,
|
||||
orgId,
|
||||
@@ -73,6 +77,8 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
|
||||
status,
|
||||
isActive,
|
||||
inviteEmail,
|
||||
lastLoginAuthMethod,
|
||||
lastLoginTime,
|
||||
user: {
|
||||
id: userId,
|
||||
email,
|
||||
|
@@ -285,6 +285,8 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
db.ref("roleId").withSchema(TableName.OrgMembership),
|
||||
db.ref("status").withSchema(TableName.OrgMembership),
|
||||
db.ref("isActive").withSchema(TableName.OrgMembership),
|
||||
db.ref("lastLoginAuthMethod").withSchema(TableName.OrgMembership),
|
||||
db.ref("lastLoginTime").withSchema(TableName.OrgMembership),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("isEmailVerified").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
|
@@ -124,10 +124,35 @@ export const reminderDALFactory = (db: TDbClient) => {
|
||||
return reminders[0] || null;
|
||||
};
|
||||
|
||||
const findSecretReminders = async (secretIds: string[], tx?: Knex) => {
|
||||
const rawReminders = await (tx || db)(TableName.Reminder)
|
||||
.whereIn(`${TableName.Reminder}.secretId`, secretIds)
|
||||
.leftJoin(TableName.ReminderRecipient, `${TableName.Reminder}.id`, `${TableName.ReminderRecipient}.reminderId`)
|
||||
.select(selectAllTableCols(TableName.Reminder))
|
||||
.select(db.ref("userId").withSchema(TableName.ReminderRecipient));
|
||||
const reminders = sqlNestRelationships({
|
||||
data: rawReminders,
|
||||
key: "id",
|
||||
parentMapper: (el) => ({
|
||||
_id: el.id,
|
||||
...RemindersSchema.parse(el)
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "userId",
|
||||
label: "recipients" as const,
|
||||
mapper: ({ userId }) => userId
|
||||
}
|
||||
]
|
||||
});
|
||||
return reminders;
|
||||
};
|
||||
|
||||
return {
|
||||
...reminderOrm,
|
||||
findSecretDailyReminders,
|
||||
findUpcomingReminders,
|
||||
findSecretReminder
|
||||
findSecretReminder,
|
||||
findSecretReminders
|
||||
};
|
||||
};
|
||||
|
@@ -372,6 +372,21 @@ export const reminderServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
const getRemindersForDashboard: TReminderServiceFactory["getRemindersForDashboard"] = async (secretIds) => {
|
||||
// scott we don't need to check permissions/secret existence because these are the
|
||||
// secrets from the dashboard that have already gone through these checks
|
||||
|
||||
const reminders = await reminderDAL.findSecretReminders(secretIds);
|
||||
|
||||
const reminderMap: Record<string, (typeof reminders)[number]> = {};
|
||||
|
||||
reminders.forEach((reminder) => {
|
||||
if (reminder.secretId) reminderMap[reminder.secretId] = reminder;
|
||||
});
|
||||
|
||||
return reminderMap;
|
||||
};
|
||||
|
||||
return {
|
||||
createReminder,
|
||||
getReminder,
|
||||
@@ -379,6 +394,7 @@ export const reminderServiceFactory = ({
|
||||
deleteReminder,
|
||||
deleteReminderBySecretId,
|
||||
batchCreateReminders,
|
||||
createReminderInternal
|
||||
createReminderInternal,
|
||||
getRemindersForDashboard
|
||||
};
|
||||
};
|
||||
|
@@ -103,4 +103,6 @@ export interface TReminderServiceFactory {
|
||||
id: string;
|
||||
created: boolean;
|
||||
}>;
|
||||
|
||||
getRemindersForDashboard: (secretIds: string[]) => Promise<Record<string, TReminder & { recipients: string[] }>>;
|
||||
}
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import { TAuditLogDALFactory } from "@app/ee/services/audit-log/audit-log-dal";
|
||||
import { TSnapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
|
||||
@@ -41,32 +42,19 @@ export const dailyResourceCleanUpQueueServiceFactory = ({
|
||||
serviceTokenService,
|
||||
orgService
|
||||
}: TDailyResourceCleanUpQueueServiceFactoryDep) => {
|
||||
queueService.start(QueueName.DailyResourceCleanUp, async () => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: queue task started`);
|
||||
await identityAccessTokenDAL.removeExpiredTokens();
|
||||
await identityUniversalAuthClientSecretDAL.removeExpiredClientSecrets();
|
||||
await secretSharingDAL.pruneExpiredSharedSecrets();
|
||||
await secretSharingDAL.pruneExpiredSecretRequests();
|
||||
await snapshotDAL.pruneExcessSnapshots();
|
||||
await secretVersionDAL.pruneExcessVersions();
|
||||
await secretVersionV2DAL.pruneExcessVersions();
|
||||
await secretFolderVersionDAL.pruneExcessVersions();
|
||||
await serviceTokenService.notifyExpiringTokens();
|
||||
await orgService.notifyInvitedUsers();
|
||||
await auditLogDAL.pruneAuditLog();
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: queue task completed`);
|
||||
});
|
||||
const appCfg = getConfig();
|
||||
|
||||
// we do a repeat cron job in utc timezone at 12 Midnight each day
|
||||
const startCleanUp = async () => {
|
||||
// TODO(akhilmhdh): remove later
|
||||
if (appCfg.isDailyResourceCleanUpDevelopmentMode) {
|
||||
logger.warn("Daily Resource Clean Up is in development mode.");
|
||||
}
|
||||
|
||||
const init = async () => {
|
||||
await queueService.stopRepeatableJob(
|
||||
QueueName.AuditLogPrune,
|
||||
QueueJobs.AuditLogPrune,
|
||||
{ pattern: "0 0 * * *", utc: true },
|
||||
QueueName.AuditLogPrune // just a job id
|
||||
);
|
||||
// clear previous job
|
||||
await queueService.stopRepeatableJob(
|
||||
QueueName.DailyResourceCleanUp,
|
||||
QueueJobs.DailyResourceCleanUp,
|
||||
@@ -74,18 +62,43 @@ export const dailyResourceCleanUpQueueServiceFactory = ({
|
||||
QueueName.DailyResourceCleanUp // just a job id
|
||||
);
|
||||
|
||||
await queueService.queue(QueueName.DailyResourceCleanUp, QueueJobs.DailyResourceCleanUp, undefined, {
|
||||
delay: 5000,
|
||||
jobId: QueueName.DailyResourceCleanUp,
|
||||
repeat: { pattern: "0 0 * * *", utc: true }
|
||||
});
|
||||
await queueService.startPg<QueueName.DailyResourceCleanUp>(
|
||||
QueueJobs.DailyResourceCleanUp,
|
||||
async () => {
|
||||
try {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: queue task started`);
|
||||
await identityAccessTokenDAL.removeExpiredTokens();
|
||||
await identityUniversalAuthClientSecretDAL.removeExpiredClientSecrets();
|
||||
await secretSharingDAL.pruneExpiredSharedSecrets();
|
||||
await secretSharingDAL.pruneExpiredSecretRequests();
|
||||
await snapshotDAL.pruneExcessSnapshots();
|
||||
await secretVersionDAL.pruneExcessVersions();
|
||||
await secretVersionV2DAL.pruneExcessVersions();
|
||||
await secretFolderVersionDAL.pruneExcessVersions();
|
||||
await serviceTokenService.notifyExpiringTokens();
|
||||
await orgService.notifyInvitedUsers();
|
||||
await auditLogDAL.pruneAuditLog();
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: queue task completed`);
|
||||
} catch (error) {
|
||||
logger.error(error, `${QueueName.DailyResourceCleanUp}: resource cleanup failed`);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 1,
|
||||
pollingIntervalSeconds: 1
|
||||
}
|
||||
);
|
||||
await queueService.schedulePg(
|
||||
QueueJobs.DailyResourceCleanUp,
|
||||
appCfg.isDailyResourceCleanUpDevelopmentMode ? "*/5 * * * *" : "0 0 * * *",
|
||||
undefined,
|
||||
{ tz: "UTC" }
|
||||
);
|
||||
};
|
||||
|
||||
queueService.listen(QueueName.DailyResourceCleanUp, "failed", (_, err) => {
|
||||
logger.error(err, `${QueueName.DailyResourceCleanUp}: resource cleanup failed`);
|
||||
});
|
||||
|
||||
return {
|
||||
startCleanUp
|
||||
init
|
||||
};
|
||||
};
|
||||
|
@@ -238,8 +238,16 @@ export const secretFolderServiceFactory = ({
|
||||
return doc;
|
||||
});
|
||||
|
||||
const [folderWithFullPath] = await folderDAL.findSecretPathByFolderIds(projectId, [folder.id]);
|
||||
|
||||
if (!folderWithFullPath) {
|
||||
throw new NotFoundError({
|
||||
message: `Failed to retrieve path for folder with ID '${folder.id}'`
|
||||
});
|
||||
}
|
||||
|
||||
await snapshotService.performSnapshot(folder.parentId as string);
|
||||
return folder;
|
||||
return { ...folder, path: folderWithFullPath.path };
|
||||
};
|
||||
|
||||
const updateManyFolders = async ({
|
||||
@@ -496,8 +504,27 @@ export const secretFolderServiceFactory = ({
|
||||
return doc;
|
||||
});
|
||||
|
||||
const foldersWithFullPaths = await folderDAL.findSecretPathByFolderIds(projectId, [newFolder.id, folder.id]);
|
||||
|
||||
const newFolderWithFullPath = foldersWithFullPaths.find((f) => f?.id === newFolder.id);
|
||||
if (!newFolderWithFullPath) {
|
||||
throw new NotFoundError({
|
||||
message: `Failed to retrieve path for folder with ID '${newFolder.id}'`
|
||||
});
|
||||
}
|
||||
|
||||
const folderWithFullPath = foldersWithFullPaths.find((f) => f?.id === folder.id);
|
||||
if (!folderWithFullPath) {
|
||||
throw new NotFoundError({
|
||||
message: `Failed to retrieve path for folder with ID '${folder.id}'`
|
||||
});
|
||||
}
|
||||
|
||||
await snapshotService.performSnapshot(newFolder.parentId as string);
|
||||
return { folder: newFolder, old: folder };
|
||||
return {
|
||||
folder: { ...newFolder, path: newFolderWithFullPath.path },
|
||||
old: { ...folder, path: folderWithFullPath.path }
|
||||
};
|
||||
};
|
||||
|
||||
const $checkFolderPolicy = async ({
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import AWS, { AWSError } from "aws-sdk";
|
||||
import handlebars from "handlebars";
|
||||
|
||||
import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns";
|
||||
import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
|
||||
@@ -34,18 +35,51 @@ const sleep = async () =>
|
||||
setTimeout(resolve, 1000);
|
||||
});
|
||||
|
||||
const getParametersByPath = async (ssm: AWS.SSM, path: string): Promise<TAWSParameterStoreRecord> => {
|
||||
const getFullPath = ({ path, keySchema, environment }: { path: string; keySchema?: string; environment: string }) => {
|
||||
if (!keySchema || !keySchema.includes("/")) return path;
|
||||
|
||||
if (keySchema.startsWith("/")) {
|
||||
throw new SecretSyncError({ message: `Key schema cannot contain leading '/'`, shouldRetry: false });
|
||||
}
|
||||
|
||||
const keySchemaSegments = handlebars
|
||||
.compile(keySchema)({
|
||||
environment,
|
||||
secretKey: "{{secretKey}}"
|
||||
})
|
||||
.split("/");
|
||||
|
||||
const pathSegments = keySchemaSegments.slice(0, keySchemaSegments.length - 1);
|
||||
|
||||
if (pathSegments.some((segment) => segment.includes("{{secretKey}}"))) {
|
||||
throw new SecretSyncError({
|
||||
message: "Key schema cannot contain '/' after {{secretKey}}",
|
||||
shouldRetry: false
|
||||
});
|
||||
}
|
||||
|
||||
return `${path}${pathSegments.join("/")}/`;
|
||||
};
|
||||
|
||||
const getParametersByPath = async (
|
||||
ssm: AWS.SSM,
|
||||
path: string,
|
||||
keySchema: string | undefined,
|
||||
environment: string
|
||||
): Promise<TAWSParameterStoreRecord> => {
|
||||
const awsParameterStoreSecretsRecord: TAWSParameterStoreRecord = {};
|
||||
let hasNext = true;
|
||||
let nextToken: string | undefined;
|
||||
let attempt = 0;
|
||||
|
||||
const fullPath = getFullPath({ path, keySchema, environment });
|
||||
|
||||
while (hasNext) {
|
||||
try {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const parameters = await ssm
|
||||
.getParametersByPath({
|
||||
Path: path,
|
||||
Path: fullPath,
|
||||
Recursive: false,
|
||||
WithDecryption: true,
|
||||
MaxResults: BATCH_SIZE,
|
||||
@@ -59,7 +93,7 @@ const getParametersByPath = async (ssm: AWS.SSM, path: string): Promise<TAWSPara
|
||||
parameters.Parameters.forEach((parameter) => {
|
||||
if (parameter.Name) {
|
||||
// no leading slash if path is '/'
|
||||
const secKey = path.length > 1 ? parameter.Name.substring(path.length) : parameter.Name;
|
||||
const secKey = fullPath.length > 1 ? parameter.Name.substring(path.length) : parameter.Name;
|
||||
awsParameterStoreSecretsRecord[secKey] = parameter;
|
||||
}
|
||||
});
|
||||
@@ -83,12 +117,19 @@ const getParametersByPath = async (ssm: AWS.SSM, path: string): Promise<TAWSPara
|
||||
return awsParameterStoreSecretsRecord;
|
||||
};
|
||||
|
||||
const getParameterMetadataByPath = async (ssm: AWS.SSM, path: string): Promise<TAWSParameterStoreMetadataRecord> => {
|
||||
const getParameterMetadataByPath = async (
|
||||
ssm: AWS.SSM,
|
||||
path: string,
|
||||
keySchema: string | undefined,
|
||||
environment: string
|
||||
): Promise<TAWSParameterStoreMetadataRecord> => {
|
||||
const awsParameterStoreMetadataRecord: TAWSParameterStoreMetadataRecord = {};
|
||||
let hasNext = true;
|
||||
let nextToken: string | undefined;
|
||||
let attempt = 0;
|
||||
|
||||
const fullPath = getFullPath({ path, keySchema, environment });
|
||||
|
||||
while (hasNext) {
|
||||
try {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
@@ -100,7 +141,7 @@ const getParameterMetadataByPath = async (ssm: AWS.SSM, path: string): Promise<T
|
||||
{
|
||||
Key: "Path",
|
||||
Option: "OneLevel",
|
||||
Values: [path]
|
||||
Values: [fullPath]
|
||||
}
|
||||
]
|
||||
})
|
||||
@@ -112,7 +153,7 @@ const getParameterMetadataByPath = async (ssm: AWS.SSM, path: string): Promise<T
|
||||
parameters.Parameters.forEach((parameter) => {
|
||||
if (parameter.Name) {
|
||||
// no leading slash if path is '/'
|
||||
const secKey = path.length > 1 ? parameter.Name.substring(path.length) : parameter.Name;
|
||||
const secKey = fullPath.length > 1 ? parameter.Name.substring(path.length) : parameter.Name;
|
||||
awsParameterStoreMetadataRecord[secKey] = parameter;
|
||||
}
|
||||
});
|
||||
@@ -298,9 +339,19 @@ export const AwsParameterStoreSyncFns = {
|
||||
|
||||
const ssm = await getSSM(secretSync);
|
||||
|
||||
const awsParameterStoreSecretsRecord = await getParametersByPath(ssm, destinationConfig.path);
|
||||
const awsParameterStoreSecretsRecord = await getParametersByPath(
|
||||
ssm,
|
||||
destinationConfig.path,
|
||||
syncOptions.keySchema,
|
||||
environment!.slug
|
||||
);
|
||||
|
||||
const awsParameterStoreMetadataRecord = await getParameterMetadataByPath(ssm, destinationConfig.path);
|
||||
const awsParameterStoreMetadataRecord = await getParameterMetadataByPath(
|
||||
ssm,
|
||||
destinationConfig.path,
|
||||
syncOptions.keySchema,
|
||||
environment!.slug
|
||||
);
|
||||
|
||||
const { shouldManageTags, awsParameterStoreTagsRecord } = await getParameterStoreTagsRecord(
|
||||
ssm,
|
||||
@@ -400,22 +451,32 @@ export const AwsParameterStoreSyncFns = {
|
||||
await deleteParametersBatch(ssm, parametersToDelete);
|
||||
},
|
||||
getSecrets: async (secretSync: TAwsParameterStoreSyncWithCredentials): Promise<TSecretMap> => {
|
||||
const { destinationConfig } = secretSync;
|
||||
const { destinationConfig, syncOptions, environment } = secretSync;
|
||||
|
||||
const ssm = await getSSM(secretSync);
|
||||
|
||||
const awsParameterStoreSecretsRecord = await getParametersByPath(ssm, destinationConfig.path);
|
||||
const awsParameterStoreSecretsRecord = await getParametersByPath(
|
||||
ssm,
|
||||
destinationConfig.path,
|
||||
syncOptions.keySchema,
|
||||
environment!.slug
|
||||
);
|
||||
|
||||
return Object.fromEntries(
|
||||
Object.entries(awsParameterStoreSecretsRecord).map(([key, value]) => [key, { value: value.Value ?? "" }])
|
||||
);
|
||||
},
|
||||
removeSecrets: async (secretSync: TAwsParameterStoreSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
const { destinationConfig } = secretSync;
|
||||
const { destinationConfig, syncOptions, environment } = secretSync;
|
||||
|
||||
const ssm = await getSSM(secretSync);
|
||||
|
||||
const awsParameterStoreSecretsRecord = await getParametersByPath(ssm, destinationConfig.path);
|
||||
const awsParameterStoreSecretsRecord = await getParametersByPath(
|
||||
ssm,
|
||||
destinationConfig.path,
|
||||
syncOptions.keySchema,
|
||||
environment!.slug
|
||||
);
|
||||
|
||||
const parametersToDelete: AWS.SSM.Parameter[] = [];
|
||||
|
||||
|
@@ -115,6 +115,44 @@ User Note: ${payload.note}`
|
||||
payloadBlocks
|
||||
};
|
||||
}
|
||||
case TriggerFeature.ACCESS_REQUEST_UPDATED: {
|
||||
const { payload } = notification;
|
||||
const messageBody = `${payload.editorFullName} (${payload.editorEmail}) has updated the ${
|
||||
payload.isTemporary ? "temporary" : "permanent"
|
||||
} access request from ${payload.requesterFullName} (${payload.requesterEmail}) to ${payload.secretPath} in the ${payload.environment} environment of ${payload.projectName}.
|
||||
|
||||
The following permissions are requested: ${payload.permissions.join(", ")}
|
||||
|
||||
View the request and approve or deny it <${payload.approvalUrl}|here>.${
|
||||
payload.editNote
|
||||
? `
|
||||
Editor Note: ${payload.editNote}`
|
||||
: ""
|
||||
}`;
|
||||
|
||||
const payloadBlocks = [
|
||||
{
|
||||
type: "header",
|
||||
text: {
|
||||
type: "plain_text",
|
||||
text: "Updated access approval request pending for review",
|
||||
emoji: true
|
||||
}
|
||||
},
|
||||
{
|
||||
type: "section",
|
||||
text: {
|
||||
type: "mrkdwn",
|
||||
text: messageBody
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
return {
|
||||
payloadMessage: messageBody,
|
||||
payloadBlocks
|
||||
};
|
||||
}
|
||||
default: {
|
||||
throw new BadRequestError({
|
||||
message: "Slack notification type not supported."
|
||||
|
@@ -0,0 +1,95 @@
|
||||
import { Heading, Section, Text } from "@react-email/components";
|
||||
import React from "react";
|
||||
|
||||
import { BaseButton } from "./BaseButton";
|
||||
import { BaseEmailWrapper, BaseEmailWrapperProps } from "./BaseEmailWrapper";
|
||||
import { BaseLink } from "./BaseLink";
|
||||
|
||||
interface AccessApprovalRequestUpdatedTemplateProps
|
||||
extends Omit<BaseEmailWrapperProps, "title" | "preview" | "children"> {
|
||||
projectName: string;
|
||||
requesterFullName: string;
|
||||
requesterEmail: string;
|
||||
isTemporary: boolean;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
expiresIn: string;
|
||||
permissions: string[];
|
||||
editNote: string;
|
||||
editorFullName: string;
|
||||
editorEmail: string;
|
||||
approvalUrl: string;
|
||||
}
|
||||
|
||||
export const AccessApprovalRequestUpdatedTemplate = ({
|
||||
projectName,
|
||||
siteUrl,
|
||||
requesterFullName,
|
||||
requesterEmail,
|
||||
isTemporary,
|
||||
secretPath,
|
||||
environment,
|
||||
expiresIn,
|
||||
permissions,
|
||||
editNote,
|
||||
editorEmail,
|
||||
editorFullName,
|
||||
approvalUrl
|
||||
}: AccessApprovalRequestUpdatedTemplateProps) => {
|
||||
return (
|
||||
<BaseEmailWrapper
|
||||
title="Access Approval Request Update"
|
||||
preview="An access approval request was updated and requires your review."
|
||||
siteUrl={siteUrl}
|
||||
>
|
||||
<Heading className="text-black text-[18px] leading-[28px] text-center font-normal p-0 mx-0">
|
||||
An access approval request was updated and is pending your review for the project <strong>{projectName}</strong>
|
||||
</Heading>
|
||||
<Section className="px-[24px] mb-[28px] mt-[36px] pt-[12px] pb-[8px] border border-solid border-gray-200 rounded-md bg-gray-50">
|
||||
<Text className="text-black text-[14px] leading-[24px]">
|
||||
<strong>{editorFullName}</strong> (<BaseLink href={`mailto:${editorEmail}`}>{editorEmail}</BaseLink>) has
|
||||
updated the access request submitted by <strong>{requesterFullName}</strong> (
|
||||
<BaseLink href={`mailto:${requesterEmail}`}>{requesterEmail}</BaseLink>) for <strong>{secretPath}</strong> in
|
||||
the <strong>{environment}</strong> environment.
|
||||
</Text>
|
||||
|
||||
{isTemporary && (
|
||||
<Text className="text-[14px] text-red-600 leading-[24px]">
|
||||
<strong>This access will expire {expiresIn} after approval.</strong>
|
||||
</Text>
|
||||
)}
|
||||
<Text className="text-[14px] leading-[24px] mb-[4px]">
|
||||
<strong>The following permissions are requested:</strong>
|
||||
</Text>
|
||||
{permissions.map((permission) => (
|
||||
<Text key={permission} className="text-[14px] my-[2px] leading-[24px]">
|
||||
- {permission}
|
||||
</Text>
|
||||
))}
|
||||
<Text className="text-[14px] text-slate-700 leading-[24px]">
|
||||
<strong className="text-black">Editor Note:</strong> "{editNote}"
|
||||
</Text>
|
||||
</Section>
|
||||
<Section className="text-center">
|
||||
<BaseButton href={approvalUrl}>Review Request</BaseButton>
|
||||
</Section>
|
||||
</BaseEmailWrapper>
|
||||
);
|
||||
};
|
||||
|
||||
export default AccessApprovalRequestUpdatedTemplate;
|
||||
|
||||
AccessApprovalRequestUpdatedTemplate.PreviewProps = {
|
||||
requesterFullName: "Abigail Williams",
|
||||
requesterEmail: "abigail@infisical.com",
|
||||
isTemporary: true,
|
||||
secretPath: "/api/secrets",
|
||||
environment: "Production",
|
||||
siteUrl: "https://infisical.com",
|
||||
projectName: "Example Project",
|
||||
expiresIn: "1 day",
|
||||
permissions: ["Read Secret", "Delete Project", "Create Dynamic Secret"],
|
||||
editNote: "Too permissive, they only need 3 days",
|
||||
editorEmail: "john@infisical.com",
|
||||
editorFullName: "John Smith"
|
||||
} as AccessApprovalRequestUpdatedTemplateProps;
|
@@ -1,4 +1,5 @@
|
||||
export * from "./AccessApprovalRequestTemplate";
|
||||
export * from "./AccessApprovalRequestUpdatedTemplate";
|
||||
export * from "./EmailMfaTemplate";
|
||||
export * from "./EmailVerificationTemplate";
|
||||
export * from "./ExternalImportFailedTemplate";
|
||||
|
@@ -8,6 +8,7 @@ import { logger } from "@app/lib/logger";
|
||||
|
||||
import {
|
||||
AccessApprovalRequestTemplate,
|
||||
AccessApprovalRequestUpdatedTemplate,
|
||||
EmailMfaTemplate,
|
||||
EmailVerificationTemplate,
|
||||
ExternalImportFailedTemplate,
|
||||
@@ -54,6 +55,7 @@ export enum SmtpTemplates {
|
||||
EmailMfa = "emailMfa",
|
||||
UnlockAccount = "unlockAccount",
|
||||
AccessApprovalRequest = "accessApprovalRequest",
|
||||
AccessApprovalRequestUpdated = "accessApprovalRequestUpdated",
|
||||
AccessSecretRequestBypassed = "accessSecretRequestBypassed",
|
||||
SecretApprovalRequestNeedsReview = "secretApprovalRequestNeedsReview",
|
||||
// HistoricalSecretList = "historicalSecretLeakIncident", not used anymore?
|
||||
@@ -96,6 +98,7 @@ const EmailTemplateMap: Record<SmtpTemplates, React.FC<any>> = {
|
||||
[SmtpTemplates.SignupEmailVerification]: SignupEmailVerificationTemplate,
|
||||
[SmtpTemplates.EmailMfa]: EmailMfaTemplate,
|
||||
[SmtpTemplates.AccessApprovalRequest]: AccessApprovalRequestTemplate,
|
||||
[SmtpTemplates.AccessApprovalRequestUpdated]: AccessApprovalRequestUpdatedTemplate,
|
||||
[SmtpTemplates.EmailVerification]: EmailVerificationTemplate,
|
||||
[SmtpTemplates.ExternalImportFailed]: ExternalImportFailedTemplate,
|
||||
[SmtpTemplates.ExternalImportStarted]: ExternalImportStartedTemplate,
|
||||
|
@@ -11,7 +11,6 @@ import {
|
||||
validateOverrides
|
||||
} from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { generateUserSrpKeys, getUserPrivateKey } from "@app/lib/crypto/srp";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TIdentityDALFactory } from "@app/services/identity/identity-dal";
|
||||
@@ -465,43 +464,15 @@ export const superAdminServiceFactory = ({
|
||||
return updatedServerCfg;
|
||||
};
|
||||
|
||||
const adminSignUp = async ({
|
||||
lastName,
|
||||
firstName,
|
||||
email,
|
||||
salt,
|
||||
password,
|
||||
verifier,
|
||||
publicKey,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
encryptedPrivateKeyIV,
|
||||
encryptedPrivateKeyTag,
|
||||
ip,
|
||||
userAgent
|
||||
}: TAdminSignUpDTO) => {
|
||||
const adminSignUp = async ({ lastName, firstName, email, password, ip, userAgent }: TAdminSignUpDTO) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const sanitizedEmail = email.trim().toLowerCase();
|
||||
const existingUser = await userDAL.findOne({ username: sanitizedEmail });
|
||||
if (existingUser) throw new BadRequestError({ name: "Admin sign up", message: "User already exists" });
|
||||
|
||||
const privateKey = await getUserPrivateKey(password, {
|
||||
encryptionVersion: 2,
|
||||
salt,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag
|
||||
});
|
||||
|
||||
const hashedPassword = await crypto.hashing().createHash(password, appCfg.SALT_ROUNDS);
|
||||
|
||||
const { iv, tag, ciphertext, encoding } = crypto.encryption().symmetric().encryptWithRootEncryptionKey(privateKey);
|
||||
const userInfo = await userDAL.transaction(async (tx) => {
|
||||
const newUser = await userDAL.create(
|
||||
{
|
||||
@@ -519,25 +490,13 @@ export const superAdminServiceFactory = ({
|
||||
);
|
||||
const userEnc = await userDAL.createUserEncryption(
|
||||
{
|
||||
salt,
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
publicKey,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
verifier,
|
||||
userId: newUser.id,
|
||||
hashedPassword,
|
||||
serverEncryptedPrivateKey: ciphertext,
|
||||
serverEncryptedPrivateKeyIV: iv,
|
||||
serverEncryptedPrivateKeyTag: tag,
|
||||
serverEncryptedPrivateKeyEncoding: encoding
|
||||
hashedPassword
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return { user: newUser, enc: userEnc };
|
||||
});
|
||||
|
||||
@@ -587,26 +546,14 @@ export const superAdminServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
const { tag, encoding, ciphertext, iv } = crypto.encryption().symmetric().encryptWithRootEncryptionKey(password);
|
||||
const encKeys = await generateUserSrpKeys(sanitizedEmail, password);
|
||||
|
||||
const hashedPassword = await crypto.hashing().createHash(password, appCfg.SALT_ROUNDS);
|
||||
|
||||
const userEnc = await userDAL.createUserEncryption(
|
||||
{
|
||||
userId: newUser.id,
|
||||
encryptionVersion: 2,
|
||||
protectedKey: encKeys.protectedKey,
|
||||
protectedKeyIV: encKeys.protectedKeyIV,
|
||||
protectedKeyTag: encKeys.protectedKeyTag,
|
||||
publicKey: encKeys.publicKey,
|
||||
encryptedPrivateKey: encKeys.encryptedPrivateKey,
|
||||
iv: encKeys.encryptedPrivateKeyIV,
|
||||
tag: encKeys.encryptedPrivateKeyTag,
|
||||
salt: encKeys.salt,
|
||||
verifier: encKeys.verifier,
|
||||
serverEncryptedPrivateKeyEncoding: encoding,
|
||||
serverEncryptedPrivateKeyTag: tag,
|
||||
serverEncryptedPrivateKeyIV: iv,
|
||||
serverEncryptedPrivateKey: ciphertext
|
||||
hashedPassword
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@@ -3,17 +3,8 @@ import { TEnvConfig } from "@app/lib/config/env";
|
||||
export type TAdminSignUpDTO = {
|
||||
email: string;
|
||||
password: string;
|
||||
publicKey: string;
|
||||
salt: string;
|
||||
lastName?: string;
|
||||
verifier: string;
|
||||
firstName: string;
|
||||
protectedKey: string;
|
||||
protectedKeyIV: string;
|
||||
protectedKeyTag: string;
|
||||
encryptedPrivateKey: string;
|
||||
encryptedPrivateKeyIV: string;
|
||||
encryptedPrivateKeyTag: string;
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
};
|
||||
|
@@ -5,7 +5,10 @@ export default defineConfig({
|
||||
test: {
|
||||
globals: true,
|
||||
env: {
|
||||
NODE_ENV: "test"
|
||||
NODE_ENV: "test",
|
||||
E2E_TEST_ORACLE_DB_19_HOST: process.env.E2E_TEST_ORACLE_DB_19_HOST!,
|
||||
E2E_TEST_ORACLE_DB_19_USERNAME: process.env.E2E_TEST_ORACLE_DB_19_USERNAME!,
|
||||
E2E_TEST_ORACLE_DB_19_PASSWORD: process.env.E2E_TEST_ORACLE_DB_19_PASSWORD!
|
||||
},
|
||||
environment: "./e2e-test/vitest-environment-knex.ts",
|
||||
include: ["./e2e-test/**/*.spec.ts"],
|
||||
|
157
docker-compose.e2e-dbs.yml
Normal file
157
docker-compose.e2e-dbs.yml
Normal file
@@ -0,0 +1,157 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
# Oracle Databases
|
||||
oracle-db-23.8:
|
||||
image: container-registry.oracle.com/database/free:23.8.0.0
|
||||
container_name: oracle-db-23.8
|
||||
ports:
|
||||
- "1521:1521"
|
||||
environment:
|
||||
- ORACLE_PDB=pdb
|
||||
- ORACLE_PWD=pdb-password
|
||||
volumes:
|
||||
- oracle-data-23.8:/opt/oracle/oradata
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "sqlplus", "-L", "system/pdb-password@//localhost:1521/FREEPDB1", "<<<", "SELECT 1 FROM DUAL;"]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 30
|
||||
start_period: 30s
|
||||
|
||||
# MySQL Databases
|
||||
mysql-8.4.6:
|
||||
image: mysql:8.4.6
|
||||
container_name: mysql-8.4.6
|
||||
ports:
|
||||
- "3306:3306"
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=mysql-test
|
||||
- MYSQL_DATABASE=mysql-test
|
||||
- MYSQL_ROOT_HOST=%
|
||||
- MYSQL_USER=mysql-test
|
||||
- MYSQL_PASSWORD=mysql-test
|
||||
volumes:
|
||||
- mysql-data-8.4.6:/var/lib/mysql
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "mysqladmin", "ping", "-h", "localhost", "-u", "mysql-test", "-pmysql-test"]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 30
|
||||
start_period: 30s
|
||||
|
||||
mysql-8.0.29:
|
||||
image: mysql:8.0.29
|
||||
container_name: mysql-8.0.28
|
||||
ports:
|
||||
- "3307:3306"
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=mysql-test
|
||||
- MYSQL_DATABASE=mysql-test
|
||||
- MYSQL_ROOT_HOST=%
|
||||
- MYSQL_USER=mysql-test
|
||||
- MYSQL_PASSWORD=mysql-test
|
||||
volumes:
|
||||
- mysql-data-8.0.29:/var/lib/mysql
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "mysqladmin", "ping", "-h", "localhost", "-u", "mysql-test", "-pmysql-test"]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 30
|
||||
start_period: 30s
|
||||
|
||||
mysql-5.7.31:
|
||||
image: mysql:5.7.31
|
||||
container_name: mysql-5.7.31
|
||||
platform: linux/amd64
|
||||
ports:
|
||||
- "3308:3306"
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=mysql-test
|
||||
- MYSQL_DATABASE=mysql-test
|
||||
- MYSQL_ROOT_HOST=%
|
||||
- MYSQL_USER=mysql-test
|
||||
- MYSQL_PASSWORD=mysql-test
|
||||
volumes:
|
||||
- mysql-data-5.7.31:/var/lib/mysql
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "mysqladmin", "ping", "-h", "localhost", "-u", "mysql-test", "-pmysql-test"]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 30
|
||||
start_period: 30s
|
||||
|
||||
|
||||
# PostgreSQL Databases
|
||||
postgres-17:
|
||||
image: postgres:17
|
||||
platform: linux/amd64
|
||||
container_name: postgres-17
|
||||
ports:
|
||||
- "5433:5432"
|
||||
environment:
|
||||
- POSTGRES_DB=postgres-test
|
||||
- POSTGRES_USER=postgres-test
|
||||
- POSTGRES_PASSWORD=postgres-test
|
||||
volumes:
|
||||
- postgres-data-17:/var/lib/postgresql/data
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U postgres-test -d postgres-test"]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 30
|
||||
start_period: 30s
|
||||
|
||||
postgres-16:
|
||||
image: postgres:16
|
||||
platform: linux/amd64
|
||||
container_name: postgres-16
|
||||
ports:
|
||||
- "5434:5432"
|
||||
environment:
|
||||
- POSTGRES_DB=postgres-test
|
||||
- POSTGRES_USER=postgres-test
|
||||
- POSTGRES_PASSWORD=postgres-test
|
||||
volumes:
|
||||
- postgres-data-16:/var/lib/postgresql/data
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U postgres-test -d postgres-test"]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 30
|
||||
start_period: 30s
|
||||
|
||||
postgres-10.12:
|
||||
image: postgres:10.12
|
||||
platform: linux/amd64
|
||||
container_name: postgres-10.12
|
||||
ports:
|
||||
- "5435:5432"
|
||||
environment:
|
||||
- POSTGRES_DB=postgres-test
|
||||
- POSTGRES_USER=postgres-test
|
||||
- POSTGRES_PASSWORD=postgres-test
|
||||
volumes:
|
||||
- postgres-data-10.12:/var/lib/postgresql/data
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U postgres-test -d postgres-test"]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 30
|
||||
start_period: 30s
|
||||
|
||||
volumes:
|
||||
oracle-data-23.8:
|
||||
mysql-data-8.4.6:
|
||||
mysql-data-8.0.29:
|
||||
mysql-data-5.7.31:
|
||||
postgres-data-17:
|
||||
postgres-data-16:
|
||||
postgres-data-10.12:
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user