mirror of
https://github.com/Infisical/infisical.git
synced 2025-08-03 20:23:35 +00:00
Compare commits
213 Commits
daniel/cli
...
maidul98-p
Author | SHA1 | Date | |
---|---|---|---|
|
6333eccc4a | ||
|
0af2b113df | ||
|
63a7941047 | ||
|
edeac08cb5 | ||
|
019b0ae09a | ||
|
1d00bb0a64 | ||
|
d96f1320ed | ||
|
50dbefeb48 | ||
|
56ac2c6780 | ||
|
c2f16da411 | ||
|
8223aee2ef | ||
|
5bd2af9621 | ||
|
b3df6ce6b5 | ||
|
e12eb5347d | ||
|
83a4426d31 | ||
|
3fd1fbc355 | ||
|
306d2b4bd9 | ||
|
c2c66af1f9 | ||
|
7ae65478aa | ||
|
b1594e65c6 | ||
|
0bce5b1daa | ||
|
207db93483 | ||
|
972f6a4887 | ||
|
6e1bece9d9 | ||
|
63e8bc1845 | ||
|
4f92663b66 | ||
|
a66a6790c0 | ||
|
bde853d280 | ||
|
acda627236 | ||
|
875afbb4d6 | ||
|
56f50a18dc | ||
|
801c438d05 | ||
|
baba411502 | ||
|
4c20ac6564 | ||
|
4e8556dec2 | ||
|
2d7b9ec1e4 | ||
|
8bb9ed4394 | ||
|
e4246ae85f | ||
|
f24067542f | ||
|
a7f5a61f37 | ||
|
b5fd7698d8 | ||
|
61c3102573 | ||
|
d6a5bf9d50 | ||
|
70f63b3190 | ||
|
2b0670a409 | ||
|
cc25639157 | ||
|
5ff30aed10 | ||
|
656ec4bf16 | ||
|
0bac9a8e02 | ||
|
5142e6e5f6 | ||
|
49c735caf9 | ||
|
b4de2ea85d | ||
|
8b8baf1ef2 | ||
|
2a89b872c5 | ||
|
2d2d9a5987 | ||
|
a20a60850b | ||
|
35e38c23dd | ||
|
b79e61c86b | ||
|
e555d3129d | ||
|
a41883137c | ||
|
c414bf6c39 | ||
|
9b782a9da6 | ||
|
497c0cf63d | ||
|
93761f37ea | ||
|
68e530e5d2 | ||
|
20b1cdf909 | ||
|
4bae65cc55 | ||
|
6da5f12855 | ||
|
7a242c4976 | ||
|
b01d381993 | ||
|
1ac18fcf0c | ||
|
8d5ef5f4d9 | ||
|
35b5253853 | ||
|
99d59a38d5 | ||
|
9ab1fce0e0 | ||
|
9992fbf3dd | ||
|
3ca596d4af | ||
|
1c95b3abe7 | ||
|
1f3c72b997 | ||
|
e55b981cea | ||
|
49d4e67e07 | ||
|
a54d156bf0 | ||
|
f3fc898232 | ||
|
c61602370e | ||
|
5178663797 | ||
|
f04f3aee25 | ||
|
e5333e2718 | ||
|
f27d9f8cee | ||
|
cbd568b714 | ||
|
b330c5570d | ||
|
d222bbf131 | ||
|
961c6391a8 | ||
|
d68d7df0f8 | ||
|
c44c7810ce | ||
|
b7893a6a72 | ||
|
7a3d425b0e | ||
|
bd570bd02f | ||
|
b94ffb8a82 | ||
|
246b8728a4 | ||
|
00415e1a87 | ||
|
ad354c106e | ||
|
26778d92d3 | ||
|
b135ba263c | ||
|
9b7ef55ad7 | ||
|
872f8bdad8 | ||
|
80b0dc6895 | ||
|
b067751027 | ||
|
f2b3b7b726 | ||
|
2d51445dd9 | ||
|
20898c00c6 | ||
|
2200bd646e | ||
|
fb69236f47 | ||
|
918734b26b | ||
|
729c75112b | ||
|
738e8cfc5c | ||
|
1ba7a31e0d | ||
|
233a4f7d77 | ||
|
44ff1abd74 | ||
|
08cb105fe4 | ||
|
62aebe2fd4 | ||
|
5c0542c5a3 | ||
|
6874bff302 | ||
|
e1b8aa8347 | ||
|
a041fd4762 | ||
|
1534ba516a | ||
|
f7183347dc | ||
|
105b8d6493 | ||
|
b9d35058bf | ||
|
22a3c46902 | ||
|
be8232dc93 | ||
|
8c566a5ff7 | ||
|
0a124093d6 | ||
|
088cb72621 | ||
|
6daeed68a0 | ||
|
31a499c9cd | ||
|
04491ee1b7 | ||
|
ad79ee56e4 | ||
|
c72280e9ab | ||
|
032c5b5620 | ||
|
aa5cd0fd0f | ||
|
358ca3decd | ||
|
5bad4adbdf | ||
|
0899fdb7d5 | ||
|
b893c3e690 | ||
|
cee13a0e8b | ||
|
3745b65148 | ||
|
a0f0593e2d | ||
|
ea6e739b46 | ||
|
12f4868957 | ||
|
4d43a77f6c | ||
|
3f3c15d715 | ||
|
ca453df9e9 | ||
|
c959fa6fdd | ||
|
d11ded9abc | ||
|
714a3186a9 | ||
|
20d1572220 | ||
|
21290d8e6c | ||
|
a339c473d5 | ||
|
718cabe49b | ||
|
a087deb1eb | ||
|
7ce283e891 | ||
|
52cf38449b | ||
|
8d6f76698a | ||
|
71cc84c9a5 | ||
|
5d95d7f31d | ||
|
2f15e0e767 | ||
|
6e1b29025b | ||
|
1dd451f221 | ||
|
fcc18996d3 | ||
|
bcaafcb49f | ||
|
b4558981c1 | ||
|
64099908eb | ||
|
98e0c1b4ca | ||
|
4d1a41e24e | ||
|
43f676b078 | ||
|
130ec68288 | ||
|
c4d5c1a454 | ||
|
e1407cc093 | ||
|
1b38d969df | ||
|
6e3d5a8c7c | ||
|
e2a447dd05 | ||
|
2522cc1ede | ||
|
56876a77e4 | ||
|
0111ee9efb | ||
|
581ffc613c | ||
|
03848b30a2 | ||
|
5537b00a26 | ||
|
d71d59e399 | ||
|
8f8553760a | ||
|
708c2af979 | ||
|
fa7587900e | ||
|
afa1e7e139 | ||
|
2aea73861b | ||
|
2002db2007 | ||
|
26148b633b | ||
|
4b463c6fde | ||
|
e6823c520e | ||
|
ab83e61068 | ||
|
08420cc38d | ||
|
62aa23a059 | ||
|
f9957e111c | ||
|
1193e33890 | ||
|
ec64753795 | ||
|
c908310f6e | ||
|
ee2b8a594a | ||
|
3ae27e088f | ||
|
393c0c9e90 | ||
|
5e453ab8a6 | ||
|
273c78c0a5 | ||
|
1bcc742466 | ||
|
1fc9e60254 | ||
|
126e385046 | ||
|
2f932ad103 |
26
.github/resources/rename_migration_files.py
vendored
Normal file
26
.github/resources/rename_migration_files.py
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
def rename_migrations():
|
||||
migration_folder = "./backend/src/db/migrations"
|
||||
with open("added_files.txt", "r") as file:
|
||||
changed_files = file.readlines()
|
||||
|
||||
# Find the latest file among the changed files
|
||||
latest_timestamp = datetime.now() # utc time
|
||||
for file_path in changed_files:
|
||||
file_path = file_path.strip()
|
||||
# each new file bump by 1s
|
||||
latest_timestamp = latest_timestamp + timedelta(seconds=1)
|
||||
|
||||
new_filename = os.path.join(migration_folder, latest_timestamp.strftime("%Y%m%d%H%M%S") + f"_{file_path.split('_')[1]}")
|
||||
old_filename = os.path.join(migration_folder, file_path)
|
||||
os.rename(old_filename, new_filename)
|
||||
print(f"Renamed {old_filename} to {new_filename}")
|
||||
|
||||
if len(changed_files) == 0:
|
||||
print("No new files added to migration folder")
|
||||
|
||||
if __name__ == "__main__":
|
||||
rename_migrations()
|
||||
|
38
.github/workflows/build-patroni-docker-img.yml
vendored
Normal file
38
.github/workflows/build-patroni-docker-img.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
name: Build patroni
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
patroni-image:
|
||||
name: Build patroni
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: 'zalando/patroni'
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
infisical/patroni:${{ steps.commit.outputs.short }}
|
||||
infisical/patroni:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
|
@@ -5,6 +5,7 @@ on:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/src/server/routes/**"
|
||||
- "backend/src/ee/routes/**"
|
||||
|
||||
jobs:
|
||||
check-be-api-changes:
|
||||
|
@@ -1,6 +1,8 @@
|
||||
name: Build and release CLI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
@@ -14,6 +16,12 @@ jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
|
13
.github/workflows/run-cli-tests.yml
vendored
13
.github/workflows/run-cli-tests.yml
vendored
@@ -6,7 +6,20 @@ on:
|
||||
paths:
|
||||
- "cli/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
workflow_call:
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID:
|
||||
required: true
|
||||
CLI_TESTS_UA_CLIENT_SECRET:
|
||||
required: true
|
||||
CLI_TESTS_SERVICE_TOKEN:
|
||||
required: true
|
||||
CLI_TESTS_PROJECT_ID:
|
||||
required: true
|
||||
CLI_TESTS_ENV_SLUG:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
|
48
.github/workflows/update-be-new-migration-latest-timestamp.yml
vendored
Normal file
48
.github/workflows/update-be-new-migration-latest-timestamp.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: Rename Migrations
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
paths:
|
||||
- 'backend/src/db/migrations/**'
|
||||
|
||||
jobs:
|
||||
rename:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get list of newly added files in migration folder
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' | cut -f2 | xargs -n1 basename > added_files.txt
|
||||
if [ ! -s added_files.txt ]; then
|
||||
echo "No new files added. Skipping"
|
||||
echo "SKIP_RENAME=true" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Script to rename migrations
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
run: python .github/resources/rename_migration_files.py
|
||||
|
||||
- name: Commit and push changes
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
run: |
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git add ./backend/src/db/migrations
|
||||
rm added_files.txt
|
||||
git commit -m "chore: renamed new migration files to latest timestamp (gh-action)"
|
||||
|
||||
- name: Create Pull Request
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: 'chore: renamed new migration files to latest UTC (gh-action)'
|
||||
title: 'GH Action: rename new migration file timestamp'
|
||||
branch-suffix: timestamp
|
@@ -76,7 +76,7 @@ Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/int
|
||||
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2"><img src=".github/images/deploy-to-aws.png" width="150" width="300" /></a> <a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean"> <img width="217" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/> </a> <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
|
||||
### Run Infisical locally
|
||||
|
||||
|
@@ -942,6 +942,113 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual([]);
|
||||
});
|
||||
|
||||
test.each(testRawSecrets)("Bulk create secret raw in path $path", async ({ path, secret }) => {
|
||||
const createSecretReqBody = {
|
||||
projectSlug: seedData1.project.slug,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: [
|
||||
{
|
||||
secretKey: secret.key,
|
||||
secretValue: secret.value,
|
||||
secretComment: secret.comment
|
||||
}
|
||||
]
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// fetch secrets
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
value: secret.value,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteRawSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(testRawSecrets)("Bulk update secret raw in path $path", async ({ secret, path }) => {
|
||||
await createRawSecret({ path, ...secret });
|
||||
const updateSecretReqBody = {
|
||||
projectSlug: seedData1.project.slug,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: [
|
||||
{
|
||||
secretValue: "new-value",
|
||||
secretKey: secret.key
|
||||
}
|
||||
]
|
||||
};
|
||||
const updateSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: updateSecretReqBody
|
||||
});
|
||||
expect(updateSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// fetch secrets
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
value: "new-value",
|
||||
version: 2,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteRawSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(testRawSecrets)("Bulk delete secret raw in path $path", async ({ path, secret }) => {
|
||||
await createRawSecret({ path, ...secret });
|
||||
|
||||
const deletedSecretReqBody = {
|
||||
projectSlug: seedData1.project.slug,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: [{ secretKey: secret.key }]
|
||||
};
|
||||
const deletedSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: deletedSecretReqBody
|
||||
});
|
||||
expect(deletedSecRes.statusCode).toBe(200);
|
||||
const deletedSecretPayload = JSON.parse(deletedSecRes.payload);
|
||||
expect(deletedSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// fetch secrets
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual([]);
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
|
593
backend/package-lock.json
generated
593
backend/package-lock.json
generated
@@ -35,6 +35,7 @@
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.3.3",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@@ -44,13 +45,15 @@
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jsrp": "^0.2.4",
|
||||
"knex": "^3.0.1",
|
||||
"ldapjs": "^3.0.7",
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.1",
|
||||
"mysql2": "^3.9.4",
|
||||
"nanoid": "^5.0.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
@@ -1708,6 +1711,22 @@
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/aix-ppc64": {
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz",
|
||||
"integrity": "sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"aix"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/android-arm": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.20.tgz",
|
||||
@@ -2492,6 +2511,83 @@
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||
}
|
||||
},
|
||||
"node_modules/@ldapjs/asn1": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@ldapjs/asn1/-/asn1-2.0.0.tgz",
|
||||
"integrity": "sha512-G9+DkEOirNgdPmD0I8nu57ygQJKOOgFEMKknEuQvIHbGLwP3ny1mY+OTUYLCbCaGJP4sox5eYgBJRuSUpnAddA=="
|
||||
},
|
||||
"node_modules/@ldapjs/attribute": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@ldapjs/attribute/-/attribute-1.0.0.tgz",
|
||||
"integrity": "sha512-ptMl2d/5xJ0q+RgmnqOi3Zgwk/TMJYG7dYMC0Keko+yZU6n+oFM59MjQOUht5pxJeS4FWrImhu/LebX24vJNRQ==",
|
||||
"dependencies": {
|
||||
"@ldapjs/asn1": "2.0.0",
|
||||
"@ldapjs/protocol": "^1.2.1",
|
||||
"process-warning": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ldapjs/change": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@ldapjs/change/-/change-1.0.0.tgz",
|
||||
"integrity": "sha512-EOQNFH1RIku3M1s0OAJOzGfAohuFYXFY4s73wOhRm4KFGhmQQ7MChOh2YtYu9Kwgvuq1B0xKciXVzHCGkB5V+Q==",
|
||||
"dependencies": {
|
||||
"@ldapjs/asn1": "2.0.0",
|
||||
"@ldapjs/attribute": "1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ldapjs/controls": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@ldapjs/controls/-/controls-2.1.0.tgz",
|
||||
"integrity": "sha512-2pFdD1yRC9V9hXfAWvCCO2RRWK9OdIEcJIos/9cCVP9O4k72BY1bLDQQ4KpUoJnl4y/JoD4iFgM+YWT3IfITWw==",
|
||||
"dependencies": {
|
||||
"@ldapjs/asn1": "^1.2.0",
|
||||
"@ldapjs/protocol": "^1.2.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@ldapjs/controls/node_modules/@ldapjs/asn1": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@ldapjs/asn1/-/asn1-1.2.0.tgz",
|
||||
"integrity": "sha512-KX/qQJ2xxzvO2/WOvr1UdQ+8P5dVvuOLk/C9b1bIkXxZss8BaR28njXdPgFCpj5aHaf1t8PmuVnea+N9YG9YMw=="
|
||||
},
|
||||
"node_modules/@ldapjs/dn": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@ldapjs/dn/-/dn-1.1.0.tgz",
|
||||
"integrity": "sha512-R72zH5ZeBj/Fujf/yBu78YzpJjJXG46YHFo5E4W1EqfNpo1UsVPqdLrRMXeKIsJT3x9dJVIfR6OpzgINlKpi0A==",
|
||||
"dependencies": {
|
||||
"@ldapjs/asn1": "2.0.0",
|
||||
"process-warning": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ldapjs/filter": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@ldapjs/filter/-/filter-2.1.1.tgz",
|
||||
"integrity": "sha512-TwPK5eEgNdUO1ABPBUQabcZ+h9heDORE4V9WNZqCtYLKc06+6+UAJ3IAbr0L0bYTnkkWC/JEQD2F+zAFsuikNw==",
|
||||
"dependencies": {
|
||||
"@ldapjs/asn1": "2.0.0",
|
||||
"@ldapjs/protocol": "^1.2.1",
|
||||
"process-warning": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ldapjs/messages": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@ldapjs/messages/-/messages-1.3.0.tgz",
|
||||
"integrity": "sha512-K7xZpXJ21bj92jS35wtRbdcNrwmxAtPwy4myeh9duy/eR3xQKvikVycbdWVzkYEAVE5Ce520VXNOwCHjomjCZw==",
|
||||
"dependencies": {
|
||||
"@ldapjs/asn1": "^2.0.0",
|
||||
"@ldapjs/attribute": "^1.0.0",
|
||||
"@ldapjs/change": "^1.0.0",
|
||||
"@ldapjs/controls": "^2.1.0",
|
||||
"@ldapjs/dn": "^1.1.0",
|
||||
"@ldapjs/filter": "^2.1.1",
|
||||
"@ldapjs/protocol": "^1.2.1",
|
||||
"process-warning": "^2.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ldapjs/protocol": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@ldapjs/protocol/-/protocol-1.2.1.tgz",
|
||||
"integrity": "sha512-O89xFDLW2gBoZWNXuXpBSM32/KealKCTb3JGtJdtUQc7RjAk8XzrRgyz02cPAwGKwKPxy0ivuC7UP9bmN87egQ=="
|
||||
},
|
||||
"node_modules/@lukeed/ms": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@lukeed/ms/-/ms-2.0.1.tgz",
|
||||
@@ -3162,9 +3258,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rollup/rollup-android-arm-eabi": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.8.0.tgz",
|
||||
"integrity": "sha512-zdTObFRoNENrdPpnTNnhOljYIcOX7aI7+7wyrSpPFFIOf/nRdedE6IYsjaBE7tjukphh1tMTojgJ7p3lKY8x6Q==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.14.3.tgz",
|
||||
"integrity": "sha512-X9alQ3XM6I9IlSlmC8ddAvMSyG1WuHk5oUnXGw+yUBs3BFoTizmG1La/Gr8fVJvDWAq+zlYTZ9DBgrlKRVY06g==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
@@ -3175,9 +3271,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-android-arm64": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.8.0.tgz",
|
||||
"integrity": "sha512-aiItwP48BiGpMFS9Znjo/xCNQVwTQVcRKkFKsO81m8exrGjHkCBDvm9PHay2kpa8RPnZzzKcD1iQ9KaLY4fPQQ==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.14.3.tgz",
|
||||
"integrity": "sha512-eQK5JIi+POhFpzk+LnjKIy4Ks+pwJ+NXmPxOCSvOKSNRPONzKuUvWE+P9JxGZVxrtzm6BAYMaL50FFuPe0oWMQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -3188,9 +3284,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-darwin-arm64": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.8.0.tgz",
|
||||
"integrity": "sha512-zhNIS+L4ZYkYQUjIQUR6Zl0RXhbbA0huvNIWjmPc2SL0cB1h5Djkcy+RZ3/Bwszfb6vgwUvcVJYD6e6Zkpsi8g==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.14.3.tgz",
|
||||
"integrity": "sha512-Od4vE6f6CTT53yM1jgcLqNfItTsLt5zE46fdPaEmeFHvPs5SjZYlLpHrSiHEKR1+HdRfxuzXHjDOIxQyC3ptBA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -3201,9 +3297,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-darwin-x64": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.8.0.tgz",
|
||||
"integrity": "sha512-A/FAHFRNQYrELrb/JHncRWzTTXB2ticiRFztP4ggIUAfa9Up1qfW8aG2w/mN9jNiZ+HB0t0u0jpJgFXG6BfRTA==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.14.3.tgz",
|
||||
"integrity": "sha512-0IMAO21axJeNIrvS9lSe/PGthc8ZUS+zC53O0VhF5gMxfmcKAP4ESkKOCwEi6u2asUrt4mQv2rjY8QseIEb1aw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -3214,9 +3310,22 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.8.0.tgz",
|
||||
"integrity": "sha512-JsidBnh3p2IJJA4/2xOF2puAYqbaczB3elZDT0qHxn362EIoIkq7hrR43Xa8RisgI6/WPfvb2umbGsuvf7E37A==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.14.3.tgz",
|
||||
"integrity": "sha512-ge2DC7tHRHa3caVEoSbPRJpq7azhG+xYsd6u2MEnJ6XzPSzQsTKyXvh6iWjXRf7Rt9ykIUWHtl0Uz3T6yXPpKw==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.14.3.tgz",
|
||||
"integrity": "sha512-ljcuiDI4V3ySuc7eSk4lQ9wU8J8r8KrOUvB2U+TtK0TiW6OFDmJ+DdIjjwZHIw9CNxzbmXY39wwpzYuFDwNXuw==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
@@ -3227,9 +3336,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm64-gnu": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.8.0.tgz",
|
||||
"integrity": "sha512-hBNCnqw3EVCkaPB0Oqd24bv8SklETptQWcJz06kb9OtiShn9jK1VuTgi7o4zPSt6rNGWQOTDEAccbk0OqJmS+g==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.14.3.tgz",
|
||||
"integrity": "sha512-Eci2us9VTHm1eSyn5/eEpaC7eP/mp5n46gTRB3Aar3BgSvDQGJZuicyq6TsH4HngNBgVqC5sDYxOzTExSU+NjA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -3240,9 +3349,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm64-musl": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.8.0.tgz",
|
||||
"integrity": "sha512-Fw9ChYfJPdltvi9ALJ9wzdCdxGw4wtq4t1qY028b2O7GwB5qLNSGtqMsAel1lfWTZvf4b6/+4HKp0GlSYg0ahA==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.14.3.tgz",
|
||||
"integrity": "sha512-UrBoMLCq4E92/LCqlh+blpqMz5h1tJttPIniwUgOFJyjWI1qrtrDhhpHPuFxULlUmjFHfloWdixtDhSxJt5iKw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -3252,10 +3361,23 @@
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.14.3.tgz",
|
||||
"integrity": "sha512-5aRjvsS8q1nWN8AoRfrq5+9IflC3P1leMoy4r2WjXyFqf3qcqsxRCfxtZIV58tCxd+Yv7WELPcO9mY9aeQyAmw==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.8.0.tgz",
|
||||
"integrity": "sha512-BH5xIh7tOzS9yBi8dFrCTG8Z6iNIGWGltd3IpTSKp6+pNWWO6qy8eKoRxOtwFbMrid5NZaidLYN6rHh9aB8bEw==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.14.3.tgz",
|
||||
"integrity": "sha512-sk/Qh1j2/RJSX7FhEpJn8n0ndxy/uf0kI/9Zc4b1ELhqULVdTfN6HL31CDaTChiBAOgLcsJ1sgVZjWv8XNEsAQ==",
|
||||
"cpu": [
|
||||
"riscv64"
|
||||
],
|
||||
@@ -3265,10 +3387,23 @@
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-s390x-gnu": {
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.14.3.tgz",
|
||||
"integrity": "sha512-jOO/PEaDitOmY9TgkxF/TQIjXySQe5KVYB57H/8LRP/ux0ZoO8cSHCX17asMSv3ruwslXW/TLBcxyaUzGRHcqg==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-x64-gnu": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.8.0.tgz",
|
||||
"integrity": "sha512-PmvAj8k6EuWiyLbkNpd6BLv5XeYFpqWuRvRNRl80xVfpGXK/z6KYXmAgbI4ogz7uFiJxCnYcqyvZVD0dgFog7Q==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.14.3.tgz",
|
||||
"integrity": "sha512-8ybV4Xjy59xLMyWo3GCfEGqtKV5M5gCSrZlxkPGvEPCGDLNla7v48S662HSGwRd6/2cSneMQWiv+QzcttLrrOA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -3279,9 +3414,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-x64-musl": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.8.0.tgz",
|
||||
"integrity": "sha512-mdxnlW2QUzXwY+95TuxZ+CurrhgrPAMveDWI97EQlA9bfhR8tw3Pt7SUlc/eSlCNxlWktpmT//EAA8UfCHOyXg==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.14.3.tgz",
|
||||
"integrity": "sha512-s+xf1I46trOY10OqAtZ5Rm6lzHre/UiLA1J2uOhCFXWkbZrJRkYBPO6FhvGfHmdtQ3Bx793MNa7LvoWFAm93bg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -3292,9 +3427,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-arm64-msvc": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.8.0.tgz",
|
||||
"integrity": "sha512-ge7saUz38aesM4MA7Cad8CHo0Fyd1+qTaqoIo+Jtk+ipBi4ATSrHWov9/S4u5pbEQmLjgUjB7BJt+MiKG2kzmA==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.14.3.tgz",
|
||||
"integrity": "sha512-+4h2WrGOYsOumDQ5S2sYNyhVfrue+9tc9XcLWLh+Kw3UOxAvrfOrSMFon60KspcDdytkNDh7K2Vs6eMaYImAZg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -3305,9 +3440,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-ia32-msvc": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.8.0.tgz",
|
||||
"integrity": "sha512-p9E3PZlzurhlsN5h9g7zIP1DnqKXJe8ZUkFwAazqSvHuWfihlIISPxG9hCHCoA+dOOspL/c7ty1eeEVFTE0UTw==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.14.3.tgz",
|
||||
"integrity": "sha512-T1l7y/bCeL/kUwh9OD4PQT4aM7Bq43vX05htPJJ46RTI4r5KNt6qJRzAfNfM+OYMNEVBWQzR2Gyk+FXLZfogGw==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
@@ -3318,9 +3453,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-x64-msvc": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.8.0.tgz",
|
||||
"integrity": "sha512-kb4/auKXkYKqlUYTE8s40FcJIj5soOyRLHKd4ugR0dCq0G2EfcF54eYcfQiGkHzjidZ40daB4ulsFdtqNKZtBg==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.14.3.tgz",
|
||||
"integrity": "sha512-/BypzV0H1y1HzgYpxqRaXGBRqfodgoBBCcsrujT6QRcakDQdfU+Lq9PENPh5jB4I44YWq+0C2eHsHya+nZY1sA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -4509,6 +4644,15 @@
|
||||
"@types/lodash": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/long": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/long/-/long-5.0.0.tgz",
|
||||
"integrity": "sha512-eQs9RsucA/LNjnMoJvWG/nXa7Pot/RbBzilF/QRIU/xRl+0ApxrSUFsV5lmf01SvSlqMzJ7Zwxe440wmz2SJGA==",
|
||||
"deprecated": "This is a stub types definition. long provides its own type definitions, so you do not need this installed.",
|
||||
"dependencies": {
|
||||
"long": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/mime": {
|
||||
"version": "1.3.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz",
|
||||
@@ -5257,6 +5401,14 @@
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/adm-zip": {
|
||||
"version": "0.5.12",
|
||||
"resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.12.tgz",
|
||||
"integrity": "sha512-6TVU49mK6KZb4qG6xWaaM4C7sA/sgUMLy/JYMOzkcp3BvVLpW0fXDFQiIzAuxFCt/2+xD7fNIiPFAoLZPhVNLQ==",
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/agent-base": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||
@@ -5916,12 +6068,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/body-parser": {
|
||||
"version": "1.20.1",
|
||||
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz",
|
||||
"integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==",
|
||||
"version": "1.20.2",
|
||||
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz",
|
||||
"integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==",
|
||||
"dependencies": {
|
||||
"bytes": "3.1.2",
|
||||
"content-type": "~1.0.4",
|
||||
"content-type": "~1.0.5",
|
||||
"debug": "2.6.9",
|
||||
"depd": "2.0.0",
|
||||
"destroy": "1.2.0",
|
||||
@@ -5929,7 +6081,7 @@
|
||||
"iconv-lite": "0.4.24",
|
||||
"on-finished": "2.4.1",
|
||||
"qs": "6.11.0",
|
||||
"raw-body": "2.5.1",
|
||||
"raw-body": "2.5.2",
|
||||
"type-is": "~1.6.18",
|
||||
"unpipe": "1.0.0"
|
||||
},
|
||||
@@ -6134,6 +6286,20 @@
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/cassandra-driver": {
|
||||
"version": "4.7.2",
|
||||
"resolved": "https://registry.npmjs.org/cassandra-driver/-/cassandra-driver-4.7.2.tgz",
|
||||
"integrity": "sha512-gwl1DeYvL8Wy3i1GDMzFtpUg5G473fU7EnHFZj7BUtdLB7loAfgZgB3zBhROc9fbaDSUDs6YwOPPojS5E1kbSA==",
|
||||
"dependencies": {
|
||||
"@types/long": "~5.0.0",
|
||||
"@types/node": ">=8",
|
||||
"adm-zip": "~0.5.10",
|
||||
"long": "~5.2.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/chai": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/chai/-/chai-4.4.1.tgz",
|
||||
@@ -7379,16 +7545,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/express": {
|
||||
"version": "4.18.2",
|
||||
"resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz",
|
||||
"integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==",
|
||||
"version": "4.19.2",
|
||||
"resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz",
|
||||
"integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==",
|
||||
"dependencies": {
|
||||
"accepts": "~1.3.8",
|
||||
"array-flatten": "1.1.1",
|
||||
"body-parser": "1.20.1",
|
||||
"body-parser": "1.20.2",
|
||||
"content-disposition": "0.5.4",
|
||||
"content-type": "~1.0.4",
|
||||
"cookie": "0.5.0",
|
||||
"cookie": "0.6.0",
|
||||
"cookie-signature": "1.0.6",
|
||||
"debug": "2.6.9",
|
||||
"depd": "2.0.0",
|
||||
@@ -7419,6 +7585,14 @@
|
||||
"node": ">= 0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/express/node_modules/cookie": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
|
||||
"integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/express/node_modules/cookie-signature": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
|
||||
@@ -7749,9 +7923,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/follow-redirects": {
|
||||
"version": "1.15.4",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz",
|
||||
"integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==",
|
||||
"version": "1.15.6",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
|
||||
"integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "individual",
|
||||
@@ -9208,15 +9382,7 @@
|
||||
"node": ">=0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ldapauth-fork/node_modules/lru-cache": {
|
||||
"version": "7.18.3",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
|
||||
"integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/ldapjs": {
|
||||
"node_modules/ldapauth-fork/node_modules/ldapjs": {
|
||||
"version": "2.3.3",
|
||||
"resolved": "https://registry.npmjs.org/ldapjs/-/ldapjs-2.3.3.tgz",
|
||||
"integrity": "sha512-75QiiLJV/PQqtpH+HGls44dXweviFwQ6SiIK27EqzKQ5jU/7UFrl2E5nLdQ3IYRBzJ/AVFJI66u0MZ0uofKYwg==",
|
||||
@@ -9234,6 +9400,35 @@
|
||||
"node": ">=10.13.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ldapauth-fork/node_modules/lru-cache": {
|
||||
"version": "7.18.3",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
|
||||
"integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/ldapjs": {
|
||||
"version": "3.0.7",
|
||||
"resolved": "https://registry.npmjs.org/ldapjs/-/ldapjs-3.0.7.tgz",
|
||||
"integrity": "sha512-1ky+WrN+4CFMuoekUOv7Y1037XWdjKpu0xAPwSP+9KdvmV9PG+qOKlssDV6a+U32apwxdD3is/BZcWOYzN30cg==",
|
||||
"dependencies": {
|
||||
"@ldapjs/asn1": "^2.0.0",
|
||||
"@ldapjs/attribute": "^1.0.0",
|
||||
"@ldapjs/change": "^1.0.0",
|
||||
"@ldapjs/controls": "^2.1.0",
|
||||
"@ldapjs/dn": "^1.1.0",
|
||||
"@ldapjs/filter": "^2.1.1",
|
||||
"@ldapjs/messages": "^1.3.0",
|
||||
"@ldapjs/protocol": "^1.2.1",
|
||||
"abstract-logging": "^2.0.1",
|
||||
"assert-plus": "^1.0.0",
|
||||
"backoff": "^2.5.0",
|
||||
"once": "^1.4.0",
|
||||
"vasync": "^2.2.1",
|
||||
"verror": "^1.10.1"
|
||||
}
|
||||
},
|
||||
"node_modules/leven": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/leven/-/leven-2.1.0.tgz",
|
||||
@@ -9759,9 +9954,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/mysql2": {
|
||||
"version": "3.9.1",
|
||||
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.1.tgz",
|
||||
"integrity": "sha512-3njoWAAhGBYy0tWBabqUQcLtczZUxrmmtc2vszQUekg3kTJyZ5/IeLC3Fo04u6y6Iy5Sba7pIIa2P/gs8D3ZeQ==",
|
||||
"version": "3.9.4",
|
||||
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.4.tgz",
|
||||
"integrity": "sha512-OEESQuwxMza803knC1YSt7NMuc1BrK9j7gZhCSs2WAyxr1vfiI7QLaLOKTh5c9SWGz98qVyQUbK8/WckevNQhg==",
|
||||
"dependencies": {
|
||||
"denque": "^2.1.0",
|
||||
"generate-function": "^2.3.1",
|
||||
@@ -10231,6 +10426,15 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/oracledb": {
|
||||
"version": "6.4.0",
|
||||
"resolved": "https://registry.npmjs.org/oracledb/-/oracledb-6.4.0.tgz",
|
||||
"integrity": "sha512-TJI08qzQlf/l7T49VojP9BoQpjEr14NXZmpSzzcLrbNs7qSl0QA/Mc9gGiEdkg5WmwH0wqUjtMC7jlf1WamlYA==",
|
||||
"hasInstallScript": true,
|
||||
"engines": {
|
||||
"node": ">=14.6"
|
||||
}
|
||||
},
|
||||
"node_modules/p-limit": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
|
||||
@@ -10818,9 +11022,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.4.32",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.32.tgz",
|
||||
"integrity": "sha512-D/kj5JNu6oo2EIy+XL/26JEDTlIbB8hw85G8StOE6L74RQAVVP5rej6wxCNqyMbR4RkPfqvezVbPw81Ngd6Kcw==",
|
||||
"version": "8.4.38",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz",
|
||||
"integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
@@ -10839,7 +11043,7 @@
|
||||
"dependencies": {
|
||||
"nanoid": "^3.3.7",
|
||||
"picocolors": "^1.0.0",
|
||||
"source-map-js": "^1.0.2"
|
||||
"source-map-js": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10 || ^12 || >=14"
|
||||
@@ -11234,9 +11438,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/raw-body": {
|
||||
"version": "2.5.1",
|
||||
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz",
|
||||
"integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==",
|
||||
"version": "2.5.2",
|
||||
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
|
||||
"integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
|
||||
"dependencies": {
|
||||
"bytes": "3.1.2",
|
||||
"http-errors": "2.0.0",
|
||||
@@ -11511,10 +11715,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/rollup": {
|
||||
"version": "4.8.0",
|
||||
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.8.0.tgz",
|
||||
"integrity": "sha512-NpsklK2fach5CdI+PScmlE5R4Ao/FSWtF7LkoIrHDxPACY/xshNasPsbpG0VVHxUTbf74tJbVT4PrP8JsJ6ZDA==",
|
||||
"version": "4.14.3",
|
||||
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.14.3.tgz",
|
||||
"integrity": "sha512-ag5tTQKYsj1bhrFC9+OEWqb5O6VYgtQDO9hPDBMmIbePwhfSr+ExlcU741t8Dhw5DkPCQf6noz0jb36D6W9/hw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/estree": "1.0.5"
|
||||
},
|
||||
"bin": {
|
||||
"rollup": "dist/bin/rollup"
|
||||
},
|
||||
@@ -11523,19 +11730,22 @@
|
||||
"npm": ">=8.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@rollup/rollup-android-arm-eabi": "4.8.0",
|
||||
"@rollup/rollup-android-arm64": "4.8.0",
|
||||
"@rollup/rollup-darwin-arm64": "4.8.0",
|
||||
"@rollup/rollup-darwin-x64": "4.8.0",
|
||||
"@rollup/rollup-linux-arm-gnueabihf": "4.8.0",
|
||||
"@rollup/rollup-linux-arm64-gnu": "4.8.0",
|
||||
"@rollup/rollup-linux-arm64-musl": "4.8.0",
|
||||
"@rollup/rollup-linux-riscv64-gnu": "4.8.0",
|
||||
"@rollup/rollup-linux-x64-gnu": "4.8.0",
|
||||
"@rollup/rollup-linux-x64-musl": "4.8.0",
|
||||
"@rollup/rollup-win32-arm64-msvc": "4.8.0",
|
||||
"@rollup/rollup-win32-ia32-msvc": "4.8.0",
|
||||
"@rollup/rollup-win32-x64-msvc": "4.8.0",
|
||||
"@rollup/rollup-android-arm-eabi": "4.14.3",
|
||||
"@rollup/rollup-android-arm64": "4.14.3",
|
||||
"@rollup/rollup-darwin-arm64": "4.14.3",
|
||||
"@rollup/rollup-darwin-x64": "4.14.3",
|
||||
"@rollup/rollup-linux-arm-gnueabihf": "4.14.3",
|
||||
"@rollup/rollup-linux-arm-musleabihf": "4.14.3",
|
||||
"@rollup/rollup-linux-arm64-gnu": "4.14.3",
|
||||
"@rollup/rollup-linux-arm64-musl": "4.14.3",
|
||||
"@rollup/rollup-linux-powerpc64le-gnu": "4.14.3",
|
||||
"@rollup/rollup-linux-riscv64-gnu": "4.14.3",
|
||||
"@rollup/rollup-linux-s390x-gnu": "4.14.3",
|
||||
"@rollup/rollup-linux-x64-gnu": "4.14.3",
|
||||
"@rollup/rollup-linux-x64-musl": "4.14.3",
|
||||
"@rollup/rollup-win32-arm64-msvc": "4.14.3",
|
||||
"@rollup/rollup-win32-ia32-msvc": "4.14.3",
|
||||
"@rollup/rollup-win32-x64-msvc": "4.14.3",
|
||||
"fsevents": "~2.3.2"
|
||||
}
|
||||
},
|
||||
@@ -11887,9 +12097,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/source-map-js": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz",
|
||||
"integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==",
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz",
|
||||
"integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
@@ -12249,9 +12459,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/tar": {
|
||||
"version": "6.2.0",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-6.2.0.tgz",
|
||||
"integrity": "sha512-/Wo7DcT0u5HUV486xg675HtjNd3BXZ6xDbzsCUZPt5iw8bTQ63bP0Raut3mvro9u+CUyq7YQd8Cx55fsZXxqLQ==",
|
||||
"version": "6.2.1",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
|
||||
"integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==",
|
||||
"dependencies": {
|
||||
"chownr": "^2.0.0",
|
||||
"fs-minipass": "^2.0.0",
|
||||
@@ -13447,14 +13657,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "5.0.12",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.0.12.tgz",
|
||||
"integrity": "sha512-4hsnEkG3q0N4Tzf1+t6NdN9dg/L3BM+q8SWgbSPnJvrgH2kgdyzfVJwbR1ic69/4uMJJ/3dqDZZE5/WwqW8U1w==",
|
||||
"version": "5.2.9",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.2.9.tgz",
|
||||
"integrity": "sha512-uOQWfuZBlc6Y3W/DTuQ1Sr+oIXWvqljLvS881SVmAj00d5RdgShLcuXWxseWPd4HXwiYBFW/vXHfKFeqj9uQnw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"esbuild": "^0.19.3",
|
||||
"postcss": "^8.4.32",
|
||||
"rollup": "^4.2.0"
|
||||
"esbuild": "^0.20.1",
|
||||
"postcss": "^8.4.38",
|
||||
"rollup": "^4.13.0"
|
||||
},
|
||||
"bin": {
|
||||
"vite": "bin/vite.js"
|
||||
@@ -13589,9 +13799,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/android-arm": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.9.tgz",
|
||||
"integrity": "sha512-jkYjjq7SdsWuNI6b5quymW0oC83NN5FdRPuCbs9HZ02mfVdAP8B8eeqLSYU3gb6OJEaY5CQabtTFbqBf26H3GA==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz",
|
||||
"integrity": "sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
@@ -13605,9 +13815,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/android-arm64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.9.tgz",
|
||||
"integrity": "sha512-q4cR+6ZD0938R19MyEW3jEsMzbb/1rulLXiNAJQADD/XYp7pT+rOS5JGxvpRW8dFDEfjW4wLgC/3FXIw4zYglQ==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz",
|
||||
"integrity": "sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -13621,9 +13831,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/android-x64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.9.tgz",
|
||||
"integrity": "sha512-KOqoPntWAH6ZxDwx1D6mRntIgZh9KodzgNOy5Ebt9ghzffOk9X2c1sPwtM9P+0eXbefnDhqYfkh5PLP5ULtWFA==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -13637,9 +13847,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/darwin-arm64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.9.tgz",
|
||||
"integrity": "sha512-KBJ9S0AFyLVx2E5D8W0vExqRW01WqRtczUZ8NRu+Pi+87opZn5tL4Y0xT0mA4FtHctd0ZgwNoN639fUUGlNIWw==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz",
|
||||
"integrity": "sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -13653,9 +13863,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/darwin-x64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.9.tgz",
|
||||
"integrity": "sha512-vE0VotmNTQaTdX0Q9dOHmMTao6ObjyPm58CHZr1UK7qpNleQyxlFlNCaHsHx6Uqv86VgPmR4o2wdNq3dP1qyDQ==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -13669,9 +13879,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/freebsd-arm64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.9.tgz",
|
||||
"integrity": "sha512-uFQyd/o1IjiEk3rUHSwUKkqZwqdvuD8GevWF065eqgYfexcVkxh+IJgwTaGZVu59XczZGcN/YMh9uF1fWD8j1g==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz",
|
||||
"integrity": "sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -13685,9 +13895,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/freebsd-x64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.9.tgz",
|
||||
"integrity": "sha512-WMLgWAtkdTbTu1AWacY7uoj/YtHthgqrqhf1OaEWnZb7PQgpt8eaA/F3LkV0E6K/Lc0cUr/uaVP/49iE4M4asA==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -13701,9 +13911,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/linux-arm": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.9.tgz",
|
||||
"integrity": "sha512-C/ChPohUYoyUaqn1h17m/6yt6OB14hbXvT8EgM1ZWaiiTYz7nWZR0SYmMnB5BzQA4GXl3BgBO1l8MYqL/He3qw==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz",
|
||||
"integrity": "sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
@@ -13717,9 +13927,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/linux-arm64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.9.tgz",
|
||||
"integrity": "sha512-PiPblfe1BjK7WDAKR1Cr9O7VVPqVNpwFcPWgfn4xu0eMemzRp442hXyzF/fSwgrufI66FpHOEJk0yYdPInsmyQ==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz",
|
||||
"integrity": "sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -13733,9 +13943,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/linux-ia32": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.9.tgz",
|
||||
"integrity": "sha512-f37i/0zE0MjDxijkPSQw1CO/7C27Eojqb+r3BbHVxMLkj8GCa78TrBZzvPyA/FNLUMzP3eyHCVkAopkKVja+6Q==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz",
|
||||
"integrity": "sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
@@ -13749,9 +13959,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/linux-loong64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.9.tgz",
|
||||
"integrity": "sha512-t6mN147pUIf3t6wUt3FeumoOTPfmv9Cc6DQlsVBpB7eCpLOqQDyWBP1ymXn1lDw4fNUSb/gBcKAmvTP49oIkaA==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz",
|
||||
"integrity": "sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==",
|
||||
"cpu": [
|
||||
"loong64"
|
||||
],
|
||||
@@ -13765,9 +13975,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/linux-mips64el": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.9.tgz",
|
||||
"integrity": "sha512-jg9fujJTNTQBuDXdmAg1eeJUL4Jds7BklOTkkH80ZgQIoCTdQrDaHYgbFZyeTq8zbY+axgptncko3v9p5hLZtw==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz",
|
||||
"integrity": "sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==",
|
||||
"cpu": [
|
||||
"mips64el"
|
||||
],
|
||||
@@ -13781,9 +13991,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/linux-ppc64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.9.tgz",
|
||||
"integrity": "sha512-tkV0xUX0pUUgY4ha7z5BbDS85uI7ABw3V1d0RNTii7E9lbmV8Z37Pup2tsLV46SQWzjOeyDi1Q7Wx2+QM8WaCQ==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz",
|
||||
"integrity": "sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
@@ -13797,9 +14007,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/linux-riscv64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.9.tgz",
|
||||
"integrity": "sha512-DfLp8dj91cufgPZDXr9p3FoR++m3ZJ6uIXsXrIvJdOjXVREtXuQCjfMfvmc3LScAVmLjcfloyVtpn43D56JFHg==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz",
|
||||
"integrity": "sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==",
|
||||
"cpu": [
|
||||
"riscv64"
|
||||
],
|
||||
@@ -13813,9 +14023,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/linux-s390x": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.9.tgz",
|
||||
"integrity": "sha512-zHbglfEdC88KMgCWpOl/zc6dDYJvWGLiUtmPRsr1OgCViu3z5GncvNVdf+6/56O2Ca8jUU+t1BW261V6kp8qdw==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz",
|
||||
"integrity": "sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
@@ -13829,9 +14039,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/linux-x64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.9.tgz",
|
||||
"integrity": "sha512-JUjpystGFFmNrEHQnIVG8hKwvA2DN5o7RqiO1CVX8EN/F/gkCjkUMgVn6hzScpwnJtl2mPR6I9XV1oW8k9O+0A==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -13845,9 +14055,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/netbsd-x64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.9.tgz",
|
||||
"integrity": "sha512-GThgZPAwOBOsheA2RUlW5UeroRfESwMq/guy8uEe3wJlAOjpOXuSevLRd70NZ37ZrpO6RHGHgEHvPg1h3S1Jug==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -13861,9 +14071,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/openbsd-x64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.9.tgz",
|
||||
"integrity": "sha512-Ki6PlzppaFVbLnD8PtlVQfsYw4S9n3eQl87cqgeIw+O3sRr9IghpfSKY62mggdt1yCSZ8QWvTZ9jo9fjDSg9uw==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -13877,9 +14087,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/sunos-x64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.9.tgz",
|
||||
"integrity": "sha512-MLHj7k9hWh4y1ddkBpvRj2b9NCBhfgBt3VpWbHQnXRedVun/hC7sIyTGDGTfsGuXo4ebik2+3ShjcPbhtFwWDw==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -13893,9 +14103,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/win32-arm64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.9.tgz",
|
||||
"integrity": "sha512-GQoa6OrQ8G08guMFgeXPH7yE/8Dt0IfOGWJSfSH4uafwdC7rWwrfE6P9N8AtPGIjUzdo2+7bN8Xo3qC578olhg==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz",
|
||||
"integrity": "sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -13909,9 +14119,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/win32-ia32": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.9.tgz",
|
||||
"integrity": "sha512-UOozV7Ntykvr5tSOlGCrqU3NBr3d8JqPes0QWN2WOXfvkWVGRajC+Ym0/Wj88fUgecUCLDdJPDF0Nna2UK3Qtg==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz",
|
||||
"integrity": "sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
@@ -13925,9 +14135,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/@esbuild/win32-x64": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.9.tgz",
|
||||
"integrity": "sha512-oxoQgglOP7RH6iasDrhY+R/3cHrfwIDvRlT4CGChflq6twk8iENeVvMJjmvBb94Ik1Z+93iGO27err7w6l54GQ==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz",
|
||||
"integrity": "sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -13941,9 +14151,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/esbuild": {
|
||||
"version": "0.19.9",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.9.tgz",
|
||||
"integrity": "sha512-U9CHtKSy+EpPsEBa+/A2gMs/h3ylBC0H0KSqIg7tpztHerLi6nrrcoUJAkNCEPumx8yJ+Byic4BVwHgRbN0TBg==",
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz",
|
||||
"integrity": "sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"bin": {
|
||||
@@ -13953,28 +14163,29 @@
|
||||
"node": ">=12"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@esbuild/android-arm": "0.19.9",
|
||||
"@esbuild/android-arm64": "0.19.9",
|
||||
"@esbuild/android-x64": "0.19.9",
|
||||
"@esbuild/darwin-arm64": "0.19.9",
|
||||
"@esbuild/darwin-x64": "0.19.9",
|
||||
"@esbuild/freebsd-arm64": "0.19.9",
|
||||
"@esbuild/freebsd-x64": "0.19.9",
|
||||
"@esbuild/linux-arm": "0.19.9",
|
||||
"@esbuild/linux-arm64": "0.19.9",
|
||||
"@esbuild/linux-ia32": "0.19.9",
|
||||
"@esbuild/linux-loong64": "0.19.9",
|
||||
"@esbuild/linux-mips64el": "0.19.9",
|
||||
"@esbuild/linux-ppc64": "0.19.9",
|
||||
"@esbuild/linux-riscv64": "0.19.9",
|
||||
"@esbuild/linux-s390x": "0.19.9",
|
||||
"@esbuild/linux-x64": "0.19.9",
|
||||
"@esbuild/netbsd-x64": "0.19.9",
|
||||
"@esbuild/openbsd-x64": "0.19.9",
|
||||
"@esbuild/sunos-x64": "0.19.9",
|
||||
"@esbuild/win32-arm64": "0.19.9",
|
||||
"@esbuild/win32-ia32": "0.19.9",
|
||||
"@esbuild/win32-x64": "0.19.9"
|
||||
"@esbuild/aix-ppc64": "0.20.2",
|
||||
"@esbuild/android-arm": "0.20.2",
|
||||
"@esbuild/android-arm64": "0.20.2",
|
||||
"@esbuild/android-x64": "0.20.2",
|
||||
"@esbuild/darwin-arm64": "0.20.2",
|
||||
"@esbuild/darwin-x64": "0.20.2",
|
||||
"@esbuild/freebsd-arm64": "0.20.2",
|
||||
"@esbuild/freebsd-x64": "0.20.2",
|
||||
"@esbuild/linux-arm": "0.20.2",
|
||||
"@esbuild/linux-arm64": "0.20.2",
|
||||
"@esbuild/linux-ia32": "0.20.2",
|
||||
"@esbuild/linux-loong64": "0.20.2",
|
||||
"@esbuild/linux-mips64el": "0.20.2",
|
||||
"@esbuild/linux-ppc64": "0.20.2",
|
||||
"@esbuild/linux-riscv64": "0.20.2",
|
||||
"@esbuild/linux-s390x": "0.20.2",
|
||||
"@esbuild/linux-x64": "0.20.2",
|
||||
"@esbuild/netbsd-x64": "0.20.2",
|
||||
"@esbuild/openbsd-x64": "0.20.2",
|
||||
"@esbuild/sunos-x64": "0.20.2",
|
||||
"@esbuild/win32-arm64": "0.20.2",
|
||||
"@esbuild/win32-ia32": "0.20.2",
|
||||
"@esbuild/win32-x64": "0.20.2"
|
||||
}
|
||||
},
|
||||
"node_modules/vitest": {
|
||||
|
@@ -96,6 +96,7 @@
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.3.3",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@@ -105,6 +106,7 @@
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jsrp": "^0.2.4",
|
||||
"knex": "^3.0.1",
|
||||
"ldapjs": "^3.0.7",
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"ms": "^2.1.3",
|
||||
@@ -112,6 +114,7 @@
|
||||
"nanoid": "^5.0.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
|
4
backend/src/@types/knex.d.ts
vendored
4
backend/src/@types/knex.d.ts
vendored
@@ -74,6 +74,9 @@ import {
|
||||
TLdapConfigs,
|
||||
TLdapConfigsInsert,
|
||||
TLdapConfigsUpdate,
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate,
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate,
|
||||
@@ -398,6 +401,7 @@ declare module "knex/types/tables" {
|
||||
>;
|
||||
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: Knex.CompositeTableType<TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate>;
|
||||
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.GitAppInstallSession]: Knex.CompositeTableType<
|
||||
|
@@ -42,6 +42,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
await knex.transaction(async (tx) => {
|
||||
const duplicateRows = await tx(TableName.OrgMembership)
|
||||
.select("userId", "orgId") // Select the userId and orgId so we can group by them
|
||||
.whereNotNull("userId") // Ensure that the userId is not null
|
||||
.count("* as cnt") // Count the number of rows for each userId and orgId, so we can make sure there are more than 1 row (a duplicate)
|
||||
.groupBy("userId", "orgId")
|
||||
.havingRaw("count(*) > ?", [1]); // Using havingRaw for direct SQL expressions
|
||||
|
@@ -0,0 +1,15 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.UserGroupMembership, (t) => {
|
||||
t.boolean("isPending").notNullable().defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.UserGroupMembership, (t) => {
|
||||
t.dropColumn("isPending");
|
||||
});
|
||||
}
|
@@ -0,0 +1,34 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.LdapGroupMap))) {
|
||||
await knex.schema.createTable(TableName.LdapGroupMap, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("ldapConfigId").notNullable();
|
||||
t.foreign("ldapConfigId").references("id").inTable(TableName.LdapConfig).onDelete("CASCADE");
|
||||
t.string("ldapGroupCN").notNullable();
|
||||
t.uuid("groupId").notNullable();
|
||||
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
t.unique(["ldapGroupCN", "groupId", "ldapConfigId"]);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.LdapGroupMap);
|
||||
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.string("groupSearchBase").notNullable().defaultTo("");
|
||||
t.string("groupSearchFilter").notNullable().defaultTo("");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.LdapGroupMap);
|
||||
await dropOnUpdateTrigger(knex, TableName.LdapGroupMap);
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.dropColumn("groupSearchBase");
|
||||
t.dropColumn("groupSearchFilter");
|
||||
});
|
||||
}
|
@@ -0,0 +1,15 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.string("searchFilter").notNullable().defaultTo("");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.dropColumn("searchFilter");
|
||||
});
|
||||
}
|
1
backend/src/db/migrations/20240426171026_test.ts
Normal file
1
backend/src/db/migrations/20240426171026_test.ts
Normal file
@@ -0,0 +1 @@
|
||||
|
@@ -22,6 +22,7 @@ export * from "./incident-contacts";
|
||||
export * from "./integration-auths";
|
||||
export * from "./integrations";
|
||||
export * from "./ldap-configs";
|
||||
export * from "./ldap-group-maps";
|
||||
export * from "./models";
|
||||
export * from "./org-bots";
|
||||
export * from "./org-memberships";
|
||||
|
@@ -23,7 +23,10 @@ export const LdapConfigsSchema = z.object({
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
groupSearchBase: z.string().default(""),
|
||||
groupSearchFilter: z.string().default(""),
|
||||
searchFilter: z.string().default("")
|
||||
});
|
||||
|
||||
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
||||
|
19
backend/src/db/schemas/ldap-group-maps.ts
Normal file
19
backend/src/db/schemas/ldap-group-maps.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const LdapGroupMapsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
ldapConfigId: z.string().uuid(),
|
||||
ldapGroupCN: z.string(),
|
||||
groupId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TLdapGroupMaps = z.infer<typeof LdapGroupMapsSchema>;
|
||||
export type TLdapGroupMapsInsert = Omit<z.input<typeof LdapGroupMapsSchema>, TImmutableDBKeys>;
|
||||
export type TLdapGroupMapsUpdate = Partial<Omit<z.input<typeof LdapGroupMapsSchema>, TImmutableDBKeys>>;
|
@@ -60,6 +60,7 @@ export enum TableName {
|
||||
SecretRotationOutput = "secret_rotation_outputs",
|
||||
SamlConfig = "saml_configs",
|
||||
LdapConfig = "ldap_configs",
|
||||
LdapGroupMap = "ldap_group_maps",
|
||||
AuditLog = "audit_logs",
|
||||
GitAppInstallSession = "git_app_install_sessions",
|
||||
GitAppOrg = "git_app_org",
|
||||
|
@@ -12,7 +12,8 @@ export const UserGroupMembershipSchema = z.object({
|
||||
userId: z.string().uuid(),
|
||||
groupId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
isPending: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TUserGroupMembership = z.infer<typeof UserGroupMembershipSchema>;
|
||||
|
@@ -14,7 +14,9 @@ import { FastifyRequest } from "fastify";
|
||||
import LdapStrategy from "passport-ldapauth";
|
||||
import { z } from "zod";
|
||||
|
||||
import { LdapConfigsSchema } from "@app/db/schemas";
|
||||
import { LdapConfigsSchema, LdapGroupMapsSchema } from "@app/db/schemas";
|
||||
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
|
||||
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@@ -50,20 +52,38 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
// eslint-disable-next-line
|
||||
async (req: IncomingMessage, user, cb) => {
|
||||
try {
|
||||
const ldapConfig = (req as unknown as FastifyRequest).ldapConfig as TLDAPConfig;
|
||||
|
||||
let groups: { dn: string; cn: string }[] | undefined;
|
||||
if (ldapConfig.groupSearchBase) {
|
||||
const groupFilter = "(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))";
|
||||
const groupSearchFilter = (ldapConfig.groupSearchFilter || groupFilter)
|
||||
.replace(/{{\.Username}}/g, user.uid)
|
||||
.replace(/{{\.UserDN}}/g, user.dn);
|
||||
|
||||
if (!isValidLdapFilter(groupSearchFilter)) {
|
||||
throw new Error("Generated LDAP search filter is invalid.");
|
||||
}
|
||||
|
||||
groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase);
|
||||
}
|
||||
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.ldap.ldapLogin({
|
||||
ldapConfigId: ldapConfig.id,
|
||||
externalId: user.uidNumber,
|
||||
username: user.uid,
|
||||
firstName: user.givenName,
|
||||
lastName: user.sn,
|
||||
firstName: user.givenName ?? user.cn ?? "",
|
||||
lastName: user.sn ?? "",
|
||||
emails: user.mail ? [user.mail] : [],
|
||||
groups,
|
||||
relayState: ((req as unknown as FastifyRequest).body as { RelayState?: string }).RelayState,
|
||||
orgId: (req as unknown as FastifyRequest).ldapConfig.organization
|
||||
});
|
||||
|
||||
return cb(null, { isUserCompleted, providerAuthToken });
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
return cb(err, false);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
return cb(error, false);
|
||||
}
|
||||
}
|
||||
)
|
||||
@@ -117,6 +137,9 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
bindDN: z.string(),
|
||||
bindPass: z.string(),
|
||||
searchBase: z.string(),
|
||||
searchFilter: z.string(),
|
||||
groupSearchBase: z.string(),
|
||||
groupSearchFilter: z.string(),
|
||||
caCert: z.string()
|
||||
})
|
||||
}
|
||||
@@ -148,6 +171,12 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
searchBase: z.string().trim(),
|
||||
searchFilter: z.string().trim().default("(uid={{username}})"),
|
||||
groupSearchBase: z.string().trim(),
|
||||
groupSearchFilter: z
|
||||
.string()
|
||||
.trim()
|
||||
.default("(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))"),
|
||||
caCert: z.string().trim().default("")
|
||||
}),
|
||||
response: {
|
||||
@@ -183,6 +212,9 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
searchBase: z.string().trim(),
|
||||
searchFilter: z.string().trim(),
|
||||
groupSearchBase: z.string().trim(),
|
||||
groupSearchFilter: z.string().trim(),
|
||||
caCert: z.string().trim()
|
||||
})
|
||||
.partial()
|
||||
@@ -204,4 +236,134 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
return ldap;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/config/:configId/group-maps",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
configId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
ldapConfigId: z.string(),
|
||||
ldapGroupCN: z.string(),
|
||||
group: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
})
|
||||
)
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const ldapGroupMaps = await server.services.ldap.getLdapGroupMaps({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
orgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
ldapConfigId: req.params.configId
|
||||
});
|
||||
return ldapGroupMaps;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/config/:configId/group-maps",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
configId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
ldapGroupCN: z.string().trim(),
|
||||
groupSlug: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: LdapGroupMapsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const ldapGroupMap = await server.services.ldap.createLdapGroupMap({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
orgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
ldapConfigId: req.params.configId,
|
||||
...req.body
|
||||
});
|
||||
return ldapGroupMap;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/config/:configId/group-maps/:groupMapId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
configId: z.string().trim(),
|
||||
groupMapId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: LdapGroupMapsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const ldapGroupMap = await server.services.ldap.deleteLdapGroupMap({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
orgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
ldapConfigId: req.params.configId,
|
||||
ldapGroupMapId: req.params.groupMapId
|
||||
});
|
||||
return ldapGroupMap;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/config/:configId/test-connection",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
configId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.boolean()
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const result = await server.services.ldap.testLDAPConnection({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
orgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
ldapConfigId: req.params.configId
|
||||
});
|
||||
return result;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -157,7 +157,13 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.object({
|
||||
data: z.object({
|
||||
membership: ProjectMembershipsSchema,
|
||||
membership: ProjectMembershipsSchema.extend({
|
||||
roles: z
|
||||
.object({
|
||||
role: z.string()
|
||||
})
|
||||
.array()
|
||||
}),
|
||||
permissions: z.any().array()
|
||||
})
|
||||
})
|
||||
|
@@ -19,7 +19,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Return project secret snapshots ids",
|
||||
security: [
|
||||
{
|
||||
apiKeyAuth: [],
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
@@ -97,8 +96,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Return audit logs",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
|
@@ -289,14 +289,28 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(z.any()).length(0).optional() // okta-specific
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional() // okta-specific
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(z.any()).length(0),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
@@ -306,8 +320,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.createScimGroup({
|
||||
displayName: req.body.displayName,
|
||||
orgId: req.permission.orgId
|
||||
orgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
return group;
|
||||
@@ -400,7 +414,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(z.any()).length(0)
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(), // infisical userId
|
||||
display: z.string()
|
||||
})
|
||||
) // note: is this where members are added to group?
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -424,7 +443,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
const group = await req.server.services.scim.updateScimGroupNamePut({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
displayName: req.body.displayName
|
||||
...req.body
|
||||
});
|
||||
|
||||
return group;
|
||||
@@ -482,8 +501,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
// console.log("PATCH /Groups/:groupId req.body: ", req.body);
|
||||
// console.log("PATCH /Groups/:groupId req.body: ", req.body.Operations[0]);
|
||||
const group = await req.server.services.scim.updateScimGroupNamePatch({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
|
@@ -69,7 +69,6 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Roll back project secrets to those captured in a secret snapshot version.",
|
||||
security: [
|
||||
{
|
||||
apiKeyAuth: [],
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
|
125
backend/src/ee/services/dynamic-secret/providers/cassandra.ts
Normal file
125
backend/src/ee/services/dynamic-secret/providers/cassandra.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import cassandra from "cassandra-driver";
|
||||
import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const client = new cassandra.Client({
|
||||
sslOptions,
|
||||
protocolOptions: {
|
||||
port: providerInputs.port
|
||||
},
|
||||
credentials: {
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password
|
||||
},
|
||||
keyspace: providerInputs.keyspace,
|
||||
localDataCenter: providerInputs?.localDataCenter,
|
||||
contactPoints: providerInputs.host.split(",").filter(Boolean)
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
||||
await client.shutdown();
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const { keyspace } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration,
|
||||
keyspace
|
||||
});
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { keyspace } = providerInputs;
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { keyspace } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@@ -1,6 +1,8 @@
|
||||
import { CassandraProvider } from "./cassandra";
|
||||
import { DynamicSecretProviders } from "./models";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
|
||||
export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider()
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider()
|
||||
});
|
||||
|
@@ -2,7 +2,8 @@ import { z } from "zod";
|
||||
|
||||
export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2"
|
||||
MySQL = "mysql2",
|
||||
Oracle = "oracledb"
|
||||
}
|
||||
|
||||
export const DynamicSecretSqlDBSchema = z.object({
|
||||
@@ -18,12 +19,27 @@ export const DynamicSecretSqlDBSchema = z.object({
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretCassandraSchema = z.object({
|
||||
host: z.string().toLowerCase(),
|
||||
port: z.number(),
|
||||
localDataCenter: z.string().min(1),
|
||||
keyspace: z.string().optional(),
|
||||
username: z.string(),
|
||||
password: z.string(),
|
||||
creationStatement: z.string(),
|
||||
revocationStatement: z.string(),
|
||||
renewStatement: z.string().optional(),
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database"
|
||||
SqlDatabase = "sql-database",
|
||||
Cassandra = "cassandra"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
@@ -8,31 +8,46 @@ import { BadRequestError } from "@app/lib/errors";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretSqlDBSchema, TDynamicProviderFns } from "./models";
|
||||
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
|
||||
|
||||
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
||||
|
||||
const generatePassword = (size?: number) => {
|
||||
const generatePassword = (provider: SqlProviders) => {
|
||||
// oracle has limit of 48 password length
|
||||
const size = provider === SqlProviders.Oracle ? 30 : 48;
|
||||
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = (provider: SqlProviders) => {
|
||||
// For oracle, the client assumes everything is upper case when not using quotes around the password
|
||||
if (provider === SqlProviders.Oracle) return alphaNumericNanoId(32).toUpperCase();
|
||||
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
if (
|
||||
providerInputs.host === "localhost" ||
|
||||
providerInputs.host === "127.0.0.1" ||
|
||||
// database infisical uses
|
||||
dbHost === providerInputs.host ||
|
||||
// internal ips
|
||||
providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/)
|
||||
dbHost === providerInputs.host
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
return providerInputs;
|
||||
@@ -59,10 +74,10 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const isConnected = await db
|
||||
.raw("SELECT NOW()")
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
||||
const isConnected = await db.raw(testStatement).then(() => true);
|
||||
await db.destroy();
|
||||
return isConnected;
|
||||
};
|
||||
@@ -71,8 +86,8 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
|
||||
const username = alphaNumericNanoId(32);
|
||||
const password = generatePassword();
|
||||
const username = generateUsername(providerInputs.client);
|
||||
const password = generatePassword(providerInputs.client);
|
||||
const { database } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
@@ -83,15 +98,13 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
database
|
||||
});
|
||||
|
||||
await db.transaction(async (tx) =>
|
||||
Promise.all(
|
||||
creationStatement
|
||||
.toString()
|
||||
.split(";")
|
||||
.filter(Boolean)
|
||||
.map((query) => tx.raw(query))
|
||||
)
|
||||
);
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
await db.destroy();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
@@ -104,15 +117,13 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const { database } = providerInputs;
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
|
||||
await db.transaction(async (tx) =>
|
||||
Promise.all(
|
||||
revokeStatement
|
||||
.toString()
|
||||
.split(";")
|
||||
.filter(Boolean)
|
||||
.map((query) => tx.raw(query))
|
||||
)
|
||||
);
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
|
||||
await db.destroy();
|
||||
return { entityId: username };
|
||||
@@ -127,16 +138,15 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const { database } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
|
||||
if (renewStatement)
|
||||
await db.transaction(async (tx) =>
|
||||
Promise.all(
|
||||
renewStatement
|
||||
.toString()
|
||||
.split(";")
|
||||
.filter(Boolean)
|
||||
.map((query) => tx.raw(query))
|
||||
)
|
||||
);
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await db.destroy();
|
||||
return { entityId: username };
|
||||
|
@@ -59,32 +59,6 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const countAllGroupMembers = async ({ orgId, groupId }: { orgId: string; groupId: string }) => {
|
||||
try {
|
||||
interface CountResult {
|
||||
count: string;
|
||||
}
|
||||
|
||||
const doc = await db<CountResult>(TableName.OrgMembership)
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.UserGroupMembership, function () {
|
||||
this.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
"=",
|
||||
db.raw("?", [groupId])
|
||||
);
|
||||
})
|
||||
.where({ isGhost: false })
|
||||
.count(`${TableName.Users}.id`)
|
||||
.first();
|
||||
|
||||
return parseInt((doc?.count as string) || "0", 10);
|
||||
} catch (err) {
|
||||
throw new DatabaseError({ error: err, name: "Count all group members" });
|
||||
}
|
||||
};
|
||||
|
||||
// special query
|
||||
const findAllGroupMembers = async ({
|
||||
orgId,
|
||||
@@ -150,7 +124,6 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
return {
|
||||
findGroups,
|
||||
findByOrgId,
|
||||
countAllGroupMembers,
|
||||
findAllGroupMembers,
|
||||
...groupOrm
|
||||
};
|
||||
|
450
backend/src/ee/services/group/group-fns.ts
Normal file
450
backend/src/ee/services/group/group-fns.ts
Normal file
@@ -0,0 +1,450 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { SecretKeyEncoding, TUsers } from "@app/db/schemas";
|
||||
import { decryptAsymmetric, encryptAsymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ScimRequestError } from "@app/lib/errors";
|
||||
|
||||
import {
|
||||
TAddUsersToGroup,
|
||||
TAddUsersToGroupByUserIds,
|
||||
TConvertPendingGroupAdditionsToGroupMemberships,
|
||||
TRemoveUsersFromGroupByUserIds
|
||||
} from "./group-types";
|
||||
|
||||
const addAcceptedUsersToGroup = async ({
|
||||
userIds,
|
||||
group,
|
||||
userGroupMembershipDAL,
|
||||
userDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
}: TAddUsersToGroup) => {
|
||||
const users = await userDAL.findUserEncKeyByUserIdsBatch(
|
||||
{
|
||||
userIds
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await userGroupMembershipDAL.insertMany(
|
||||
users.map((user) => ({
|
||||
userId: user.userId,
|
||||
groupId: group.id,
|
||||
isPending: false
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
||||
// check which projects the group is part of
|
||||
const projectIds = Array.from(
|
||||
new Set(
|
||||
(
|
||||
await groupProjectDAL.find(
|
||||
{
|
||||
groupId: group.id
|
||||
},
|
||||
{ tx }
|
||||
)
|
||||
).map((gp) => gp.projectId)
|
||||
)
|
||||
);
|
||||
|
||||
const keys = await projectKeyDAL.find(
|
||||
{
|
||||
$in: {
|
||||
projectId: projectIds,
|
||||
receiverId: users.map((u) => u.id)
|
||||
}
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
const userKeysSet = new Set(keys.map((k) => `${k.projectId}-${k.receiverId}`));
|
||||
|
||||
for await (const projectId of projectIds) {
|
||||
const usersToAddProjectKeyFor = users.filter((u) => !userKeysSet.has(`${projectId}-${u.userId}`));
|
||||
|
||||
if (usersToAddProjectKeyFor.length) {
|
||||
// there are users who need to be shared keys
|
||||
// process adding bulk users to projects for each project individually
|
||||
const ghostUser = await projectDAL.findProjectGhostUser(projectId, tx);
|
||||
|
||||
if (!ghostUser) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find sudo user"
|
||||
});
|
||||
}
|
||||
|
||||
const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, projectId, tx);
|
||||
|
||||
if (!ghostUserLatestKey) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find sudo user latest key"
|
||||
});
|
||||
}
|
||||
|
||||
const bot = await projectBotDAL.findOne({ projectId }, tx);
|
||||
|
||||
if (!bot) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find bot"
|
||||
});
|
||||
}
|
||||
|
||||
const botPrivateKey = infisicalSymmetricDecrypt({
|
||||
keyEncoding: bot.keyEncoding as SecretKeyEncoding,
|
||||
iv: bot.iv,
|
||||
tag: bot.tag,
|
||||
ciphertext: bot.encryptedPrivateKey
|
||||
});
|
||||
|
||||
const plaintextProjectKey = decryptAsymmetric({
|
||||
ciphertext: ghostUserLatestKey.encryptedKey,
|
||||
nonce: ghostUserLatestKey.nonce,
|
||||
publicKey: ghostUserLatestKey.sender.publicKey,
|
||||
privateKey: botPrivateKey
|
||||
});
|
||||
|
||||
const projectKeysToAdd = usersToAddProjectKeyFor.map((user) => {
|
||||
const { ciphertext: encryptedKey, nonce } = encryptAsymmetric(
|
||||
plaintextProjectKey,
|
||||
user.publicKey,
|
||||
botPrivateKey
|
||||
);
|
||||
return {
|
||||
encryptedKey,
|
||||
nonce,
|
||||
senderId: ghostUser.id,
|
||||
receiverId: user.userId,
|
||||
projectId
|
||||
};
|
||||
});
|
||||
|
||||
await projectKeyDAL.insertMany(projectKeysToAdd, tx);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Add users with user ids [userIds] to group [group].
|
||||
* - Users may or may not have finished completing their accounts; this function will
|
||||
* handle both adding users to groups directly and via pending group additions.
|
||||
* @param {group} group - group to add user(s) to
|
||||
* @param {string[]} userIds - id(s) of user(s) to add to group
|
||||
*/
|
||||
export const addUsersToGroupByUserIds = async ({
|
||||
group,
|
||||
userIds,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx: outerTx
|
||||
}: TAddUsersToGroupByUserIds) => {
|
||||
const processAddition = async (tx: Knex) => {
|
||||
const foundMembers = await userDAL.find(
|
||||
{
|
||||
$in: {
|
||||
id: userIds
|
||||
}
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
const foundMembersIdsSet = new Set(foundMembers.map((member) => member.id));
|
||||
|
||||
const isCompleteMatch = userIds.every((userId) => foundMembersIdsSet.has(userId));
|
||||
|
||||
if (!isCompleteMatch) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Members not found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
// check if user(s) group membership(s) already exists
|
||||
const existingUserGroupMemberships = await userGroupMembershipDAL.find(
|
||||
{
|
||||
groupId: group.id,
|
||||
$in: {
|
||||
userId: userIds
|
||||
}
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
if (existingUserGroupMemberships.length) {
|
||||
throw new BadRequestError({
|
||||
message: `User(s) are already part of the group ${group.slug}`
|
||||
});
|
||||
}
|
||||
|
||||
// check if all user(s) are part of the organization
|
||||
const existingUserOrgMemberships = await orgDAL.findMembership(
|
||||
{
|
||||
orgId: group.orgId,
|
||||
$in: {
|
||||
userId: userIds
|
||||
}
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
const existingUserOrgMembershipsUserIdsSet = new Set(existingUserOrgMemberships.map((u) => u.userId));
|
||||
|
||||
userIds.forEach((userId) => {
|
||||
if (!existingUserOrgMembershipsUserIdsSet.has(userId))
|
||||
throw new BadRequestError({
|
||||
message: `User with id ${userId} is not part of the organization`
|
||||
});
|
||||
});
|
||||
|
||||
const membersToAddToGroupNonPending: TUsers[] = [];
|
||||
const membersToAddToGroupPending: TUsers[] = [];
|
||||
|
||||
foundMembers.forEach((member) => {
|
||||
if (member.isAccepted) {
|
||||
// add accepted member to group
|
||||
membersToAddToGroupNonPending.push(member);
|
||||
} else {
|
||||
// add incomplete member to pending group addition
|
||||
membersToAddToGroupPending.push(member);
|
||||
}
|
||||
});
|
||||
|
||||
if (membersToAddToGroupNonPending.length) {
|
||||
await addAcceptedUsersToGroup({
|
||||
userIds: membersToAddToGroupNonPending.map((member) => member.id),
|
||||
group,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
|
||||
if (membersToAddToGroupPending.length) {
|
||||
await userGroupMembershipDAL.insertMany(
|
||||
membersToAddToGroupPending.map((member) => ({
|
||||
userId: member.id,
|
||||
groupId: group.id,
|
||||
isPending: true
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return membersToAddToGroupNonPending.concat(membersToAddToGroupPending);
|
||||
};
|
||||
|
||||
if (outerTx) {
|
||||
return processAddition(outerTx);
|
||||
}
|
||||
return userDAL.transaction(async (tx) => {
|
||||
return processAddition(tx);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Remove users with user ids [userIds] from group [group].
|
||||
* - Users may be part of the group (non-pending + pending);
|
||||
* this function will handle both cases.
|
||||
* @param {group} group - group to remove user(s) from
|
||||
* @param {string[]} userIds - id(s) of user(s) to remove from group
|
||||
*/
|
||||
export const removeUsersFromGroupByUserIds = async ({
|
||||
group,
|
||||
userIds,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
tx: outerTx
|
||||
}: TRemoveUsersFromGroupByUserIds) => {
|
||||
const processRemoval = async (tx: Knex) => {
|
||||
const foundMembers = await userDAL.find({
|
||||
$in: {
|
||||
id: userIds
|
||||
}
|
||||
});
|
||||
|
||||
const foundMembersIdsSet = new Set(foundMembers.map((member) => member.id));
|
||||
|
||||
const isCompleteMatch = userIds.every((userId) => foundMembersIdsSet.has(userId));
|
||||
|
||||
if (!isCompleteMatch) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Members not found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
// check if user group membership already exists
|
||||
const existingUserGroupMemberships = await userGroupMembershipDAL.find(
|
||||
{
|
||||
groupId: group.id,
|
||||
$in: {
|
||||
userId: userIds
|
||||
}
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
const existingUserGroupMembershipsUserIdsSet = new Set(existingUserGroupMemberships.map((u) => u.userId));
|
||||
|
||||
userIds.forEach((userId) => {
|
||||
if (!existingUserGroupMembershipsUserIdsSet.has(userId))
|
||||
throw new BadRequestError({
|
||||
message: `User(s) are not part of the group ${group.slug}`
|
||||
});
|
||||
});
|
||||
|
||||
const membersToRemoveFromGroupNonPending: TUsers[] = [];
|
||||
const membersToRemoveFromGroupPending: TUsers[] = [];
|
||||
|
||||
foundMembers.forEach((member) => {
|
||||
if (member.isAccepted) {
|
||||
// remove accepted member from group
|
||||
membersToRemoveFromGroupNonPending.push(member);
|
||||
} else {
|
||||
// remove incomplete member from pending group addition
|
||||
membersToRemoveFromGroupPending.push(member);
|
||||
}
|
||||
});
|
||||
|
||||
if (membersToRemoveFromGroupNonPending.length) {
|
||||
// check which projects the group is part of
|
||||
const projectIds = Array.from(
|
||||
new Set(
|
||||
(
|
||||
await groupProjectDAL.find(
|
||||
{
|
||||
groupId: group.id
|
||||
},
|
||||
{ tx }
|
||||
)
|
||||
).map((gp) => gp.projectId)
|
||||
)
|
||||
);
|
||||
|
||||
// TODO: this part can be optimized
|
||||
for await (const userId of userIds) {
|
||||
const t = await userGroupMembershipDAL.filterProjectsByUserMembership(userId, group.id, projectIds, tx);
|
||||
const projectsToDeleteKeyFor = projectIds.filter((p) => !t.has(p));
|
||||
|
||||
if (projectsToDeleteKeyFor.length) {
|
||||
await projectKeyDAL.delete(
|
||||
{
|
||||
receiverId: userId,
|
||||
$in: {
|
||||
projectId: projectsToDeleteKeyFor
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
await userGroupMembershipDAL.delete(
|
||||
{
|
||||
groupId: group.id,
|
||||
userId
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (membersToRemoveFromGroupPending.length) {
|
||||
await userGroupMembershipDAL.delete({
|
||||
groupId: group.id,
|
||||
$in: {
|
||||
userId: membersToRemoveFromGroupPending.map((member) => member.id)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return membersToRemoveFromGroupNonPending.concat(membersToRemoveFromGroupPending);
|
||||
};
|
||||
|
||||
if (outerTx) {
|
||||
return processRemoval(outerTx);
|
||||
}
|
||||
return userDAL.transaction(async (tx) => {
|
||||
return processRemoval(tx);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert pending group additions for users with ids [userIds] to group memberships.
|
||||
* @param {string[]} userIds - id(s) of user(s) to try to convert pending group additions to group memberships
|
||||
*/
|
||||
export const convertPendingGroupAdditionsToGroupMemberships = async ({
|
||||
userIds,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx: outerTx
|
||||
}: TConvertPendingGroupAdditionsToGroupMemberships) => {
|
||||
const processConversion = async (tx: Knex) => {
|
||||
const users = await userDAL.find(
|
||||
{
|
||||
$in: {
|
||||
id: userIds
|
||||
}
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
const usersUserIdsSet = new Set(users.map((u) => u.id));
|
||||
userIds.forEach((userId) => {
|
||||
if (!usersUserIdsSet.has(userId)) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to find user with id ${userId}`
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
users.forEach((user) => {
|
||||
if (!user.isAccepted) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to convert pending group additions to group memberships for user ${user.username} because they have not confirmed their account`
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const pendingGroupAdditions = await userGroupMembershipDAL.deletePendingUserGroupMembershipsByUserIds(userIds, tx);
|
||||
|
||||
for await (const pendingGroupAddition of pendingGroupAdditions) {
|
||||
await addAcceptedUsersToGroup({
|
||||
userIds: [pendingGroupAddition.user.id],
|
||||
group: pendingGroupAddition.group,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
if (outerTx) {
|
||||
return processConversion(outerTx);
|
||||
}
|
||||
return userDAL.transaction(async (tx) => {
|
||||
await processConversion(tx);
|
||||
});
|
||||
};
|
@@ -1,22 +1,22 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
|
||||
import { OrgMembershipRole, SecretKeyEncoding, TOrgRoles } from "@app/db/schemas";
|
||||
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { decryptAsymmetric, encryptAsymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TGroupProjectDALFactory } from "../../../services/group-project/group-project-dal";
|
||||
import { TOrgDALFactory } from "../../../services/org/org-dal";
|
||||
import { TProjectDALFactory } from "../../../services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "../../../services/project-bot/project-bot-dal";
|
||||
import { TProjectKeyDALFactory } from "../../../services/project-key/project-key-dal";
|
||||
import { TUserDALFactory } from "../../../services/user/user-dal";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TGroupDALFactory } from "./group-dal";
|
||||
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "./group-fns";
|
||||
import {
|
||||
TAddUserToGroupDTO,
|
||||
TCreateGroupDTO,
|
||||
@@ -28,20 +28,17 @@ import {
|
||||
import { TUserGroupMembershipDALFactory } from "./user-group-membership-dal";
|
||||
|
||||
type TGroupServiceFactoryDep = {
|
||||
userDAL: Pick<TUserDALFactory, "findOne" | "findUserEncKeyByUsername">;
|
||||
groupDAL: Pick<
|
||||
TGroupDALFactory,
|
||||
"create" | "findOne" | "update" | "delete" | "findAllGroupMembers" | "countAllGroupMembers"
|
||||
>;
|
||||
userDAL: Pick<TUserDALFactory, "find" | "findUserEncKeyByUserIdsBatch" | "transaction" | "findOne">;
|
||||
groupDAL: Pick<TGroupDALFactory, "create" | "findOne" | "update" | "delete" | "findAllGroupMembers">;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findMembership">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findMembership" | "countAllOrgMembers">;
|
||||
userGroupMembershipDAL: Pick<
|
||||
TUserGroupMembershipDALFactory,
|
||||
"findOne" | "create" | "delete" | "filterProjectsByUserMembership"
|
||||
"findOne" | "delete" | "filterProjectsByUserMembership" | "transaction" | "insertMany" | "find"
|
||||
>;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "create" | "delete" | "findLatestProjectKey">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "findLatestProjectKey" | "insertMany">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getOrgPermissionByRole">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@@ -227,12 +224,9 @@ export const groupServiceFactory = ({
|
||||
username
|
||||
});
|
||||
|
||||
const totalCount = await groupDAL.countAllGroupMembers({
|
||||
orgId: group.orgId,
|
||||
groupId: group.id
|
||||
});
|
||||
const count = await orgDAL.countAllOrgMembers(group.orgId);
|
||||
|
||||
return { users, totalCount };
|
||||
return { users, totalCount: count };
|
||||
};
|
||||
|
||||
const addUserToGroup = async ({
|
||||
@@ -272,111 +266,22 @@ export const groupServiceFactory = ({
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to add user to more privileged group" });
|
||||
|
||||
// get user with username
|
||||
const user = await userDAL.findUserEncKeyByUsername({
|
||||
username
|
||||
const user = await userDAL.findOne({ username });
|
||||
if (!user) throw new BadRequestError({ message: `Failed to find user with username ${username}` });
|
||||
|
||||
const users = await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: [user.id],
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL
|
||||
});
|
||||
|
||||
if (!user)
|
||||
throw new BadRequestError({
|
||||
message: `Failed to find user with username ${username}`
|
||||
});
|
||||
|
||||
// check if user group membership already exists
|
||||
const existingUserGroupMembership = await userGroupMembershipDAL.findOne({
|
||||
groupId: group.id,
|
||||
userId: user.userId
|
||||
});
|
||||
|
||||
if (existingUserGroupMembership)
|
||||
throw new BadRequestError({
|
||||
message: `User ${username} is already part of the group ${groupSlug}`
|
||||
});
|
||||
|
||||
// check if user is even part of the organization
|
||||
const existingUserOrgMembership = await orgDAL.findMembership({
|
||||
userId: user.userId,
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
if (!existingUserOrgMembership)
|
||||
throw new BadRequestError({
|
||||
message: `User ${username} is not part of the organization`
|
||||
});
|
||||
|
||||
await userGroupMembershipDAL.create({
|
||||
userId: user.userId,
|
||||
groupId: group.id
|
||||
});
|
||||
|
||||
// check which projects the group is part of
|
||||
const projectIds = (
|
||||
await groupProjectDAL.find({
|
||||
groupId: group.id
|
||||
})
|
||||
).map((gp) => gp.projectId);
|
||||
|
||||
const keys = await projectKeyDAL.find({
|
||||
receiverId: user.userId,
|
||||
$in: {
|
||||
projectId: projectIds
|
||||
}
|
||||
});
|
||||
|
||||
const keysSet = new Set(keys.map((k) => k.projectId));
|
||||
const projectsToAddKeyFor = projectIds.filter((p) => !keysSet.has(p));
|
||||
|
||||
for await (const projectId of projectsToAddKeyFor) {
|
||||
const ghostUser = await projectDAL.findProjectGhostUser(projectId);
|
||||
|
||||
if (!ghostUser) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find sudo user"
|
||||
});
|
||||
}
|
||||
|
||||
const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, projectId);
|
||||
|
||||
if (!ghostUserLatestKey) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find sudo user latest key"
|
||||
});
|
||||
}
|
||||
|
||||
const bot = await projectBotDAL.findOne({ projectId });
|
||||
|
||||
if (!bot) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find bot"
|
||||
});
|
||||
}
|
||||
|
||||
const botPrivateKey = infisicalSymmetricDecrypt({
|
||||
keyEncoding: bot.keyEncoding as SecretKeyEncoding,
|
||||
iv: bot.iv,
|
||||
tag: bot.tag,
|
||||
ciphertext: bot.encryptedPrivateKey
|
||||
});
|
||||
|
||||
const plaintextProjectKey = decryptAsymmetric({
|
||||
ciphertext: ghostUserLatestKey.encryptedKey,
|
||||
nonce: ghostUserLatestKey.nonce,
|
||||
publicKey: ghostUserLatestKey.sender.publicKey,
|
||||
privateKey: botPrivateKey
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedKey, nonce } = encryptAsymmetric(plaintextProjectKey, user.publicKey, botPrivateKey);
|
||||
|
||||
await projectKeyDAL.create({
|
||||
encryptedKey,
|
||||
nonce,
|
||||
senderId: ghostUser.id,
|
||||
receiverId: user.userId,
|
||||
projectId
|
||||
});
|
||||
}
|
||||
|
||||
return user;
|
||||
return users[0];
|
||||
};
|
||||
|
||||
const removeUserFromGroup = async ({
|
||||
@@ -416,51 +321,19 @@ export const groupServiceFactory = ({
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to delete user from more privileged group" });
|
||||
|
||||
const user = await userDAL.findOne({
|
||||
username
|
||||
const user = await userDAL.findOne({ username });
|
||||
if (!user) throw new BadRequestError({ message: `Failed to find user with username ${username}` });
|
||||
|
||||
const users = await removeUsersFromGroupByUserIds({
|
||||
group,
|
||||
userIds: [user.id],
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL
|
||||
});
|
||||
|
||||
if (!user)
|
||||
throw new BadRequestError({
|
||||
message: `Failed to find user with username ${username}`
|
||||
});
|
||||
|
||||
// check if user group membership already exists
|
||||
const existingUserGroupMembership = await userGroupMembershipDAL.findOne({
|
||||
groupId: group.id,
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
if (!existingUserGroupMembership)
|
||||
throw new BadRequestError({
|
||||
message: `User ${username} is not part of the group ${groupSlug}`
|
||||
});
|
||||
|
||||
const projectIds = (
|
||||
await groupProjectDAL.find({
|
||||
groupId: group.id
|
||||
})
|
||||
).map((gp) => gp.projectId);
|
||||
|
||||
const t = await userGroupMembershipDAL.filterProjectsByUserMembership(user.id, group.id, projectIds);
|
||||
|
||||
const projectsToDeleteKeyFor = projectIds.filter((p) => !t.has(p));
|
||||
|
||||
if (projectsToDeleteKeyFor.length) {
|
||||
await projectKeyDAL.delete({
|
||||
receiverId: user.id,
|
||||
$in: {
|
||||
projectId: projectsToDeleteKeyFor
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await userGroupMembershipDAL.delete({
|
||||
groupId: group.id,
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
return user;
|
||||
return users[0];
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -1,4 +1,14 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TGroups } from "@app/db/schemas";
|
||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { TGenericPermission } from "@app/lib/types";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
export type TCreateGroupDTO = {
|
||||
name: string;
|
||||
@@ -35,3 +45,54 @@ export type TRemoveUserFromGroupDTO = {
|
||||
groupSlug: string;
|
||||
username: string;
|
||||
} & TGenericPermission;
|
||||
|
||||
// group fns types
|
||||
|
||||
export type TAddUsersToGroup = {
|
||||
userIds: string[];
|
||||
group: TGroups;
|
||||
userDAL: Pick<TUserDALFactory, "findUserEncKeyByUserIdsBatch">;
|
||||
userGroupMembershipDAL: Pick<TUserGroupMembershipDALFactory, "find" | "transaction" | "insertMany">;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
tx: Knex;
|
||||
};
|
||||
|
||||
export type TAddUsersToGroupByUserIds = {
|
||||
group: TGroups;
|
||||
userIds: string[];
|
||||
userDAL: Pick<TUserDALFactory, "find" | "findUserEncKeyByUserIdsBatch" | "transaction">;
|
||||
userGroupMembershipDAL: Pick<TUserGroupMembershipDALFactory, "find" | "transaction" | "insertMany">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findMembership">;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
tx?: Knex;
|
||||
};
|
||||
|
||||
export type TRemoveUsersFromGroupByUserIds = {
|
||||
group: TGroups;
|
||||
userIds: string[];
|
||||
userDAL: Pick<TUserDALFactory, "find" | "transaction">;
|
||||
userGroupMembershipDAL: Pick<TUserGroupMembershipDALFactory, "find" | "filterProjectsByUserMembership" | "delete">;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "delete">;
|
||||
tx?: Knex;
|
||||
};
|
||||
|
||||
export type TConvertPendingGroupAdditionsToGroupMemberships = {
|
||||
userIds: string[];
|
||||
userDAL: Pick<TUserDALFactory, "findUserEncKeyByUserIdsBatch" | "transaction" | "find" | "findById">;
|
||||
userGroupMembershipDAL: Pick<
|
||||
TUserGroupMembershipDALFactory,
|
||||
"find" | "transaction" | "insertMany" | "deletePendingUserGroupMembershipsByUserIds"
|
||||
>;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
tx?: Knex;
|
||||
};
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TUserEncryptionKeys } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
@@ -14,24 +16,28 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
* - The user is a member of a group that is a member of the project, excluding projects that they are part of
|
||||
* through the group with id [groupId].
|
||||
*/
|
||||
const filterProjectsByUserMembership = async (userId: string, groupId: string, projectIds: string[]) => {
|
||||
const userProjectMemberships: string[] = await db(TableName.ProjectMembership)
|
||||
.where(`${TableName.ProjectMembership}.userId`, userId)
|
||||
.whereIn(`${TableName.ProjectMembership}.projectId`, projectIds)
|
||||
.pluck(`${TableName.ProjectMembership}.projectId`);
|
||||
const filterProjectsByUserMembership = async (userId: string, groupId: string, projectIds: string[], tx?: Knex) => {
|
||||
try {
|
||||
const userProjectMemberships: string[] = await (tx || db)(TableName.ProjectMembership)
|
||||
.where(`${TableName.ProjectMembership}.userId`, userId)
|
||||
.whereIn(`${TableName.ProjectMembership}.projectId`, projectIds)
|
||||
.pluck(`${TableName.ProjectMembership}.projectId`);
|
||||
|
||||
const userGroupMemberships: string[] = await db(TableName.UserGroupMembership)
|
||||
.where(`${TableName.UserGroupMembership}.userId`, userId)
|
||||
.whereNot(`${TableName.UserGroupMembership}.groupId`, groupId)
|
||||
.join(
|
||||
TableName.GroupProjectMembership,
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
`${TableName.GroupProjectMembership}.groupId`
|
||||
)
|
||||
.whereIn(`${TableName.GroupProjectMembership}.projectId`, projectIds)
|
||||
.pluck(`${TableName.GroupProjectMembership}.projectId`);
|
||||
const userGroupMemberships: string[] = await (tx || db)(TableName.UserGroupMembership)
|
||||
.where(`${TableName.UserGroupMembership}.userId`, userId)
|
||||
.whereNot(`${TableName.UserGroupMembership}.groupId`, groupId)
|
||||
.join(
|
||||
TableName.GroupProjectMembership,
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
`${TableName.GroupProjectMembership}.groupId`
|
||||
)
|
||||
.whereIn(`${TableName.GroupProjectMembership}.projectId`, projectIds)
|
||||
.pluck(`${TableName.GroupProjectMembership}.projectId`);
|
||||
|
||||
return new Set(userProjectMemberships.concat(userGroupMemberships));
|
||||
return new Set(userProjectMemberships.concat(userGroupMemberships));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Filter projects by user membership" });
|
||||
}
|
||||
};
|
||||
|
||||
// special query
|
||||
@@ -45,7 +51,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
)
|
||||
.join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
|
||||
.whereIn(`${TableName.Users}.username`, usernames) // TODO: pluck usernames
|
||||
.whereIn(`${TableName.Users}.username`, usernames)
|
||||
.pluck(`${TableName.Users}.id`);
|
||||
|
||||
return usernameDocs;
|
||||
@@ -55,7 +61,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
/**
|
||||
* Return list of users that are part of the group with id [groupId]
|
||||
* Return list of completed/accepted users that are part of the group with id [groupId]
|
||||
* that have not yet been added individually to project with id [projectId].
|
||||
*
|
||||
* Note: Filters out users that are part of other groups in the project.
|
||||
@@ -63,18 +69,19 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
* @param projectId
|
||||
* @returns
|
||||
*/
|
||||
const findGroupMembersNotInProject = async (groupId: string, projectId: string) => {
|
||||
const findGroupMembersNotInProject = async (groupId: string, projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
// get list of groups in the project with id [projectId]
|
||||
// that that are not the group with id [groupId]
|
||||
const groups: string[] = await db(TableName.GroupProjectMembership)
|
||||
const groups: string[] = await (tx || db)(TableName.GroupProjectMembership)
|
||||
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
|
||||
.whereNot(`${TableName.GroupProjectMembership}.groupId`, groupId)
|
||||
.pluck(`${TableName.GroupProjectMembership}.groupId`);
|
||||
|
||||
// main query
|
||||
const members = await db(TableName.UserGroupMembership)
|
||||
const members = await (tx || db)(TableName.UserGroupMembership)
|
||||
.where(`${TableName.UserGroupMembership}.groupId`, groupId)
|
||||
.where(`${TableName.UserGroupMembership}.isPending`, false)
|
||||
.join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.ProjectMembership, function () {
|
||||
this.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn(
|
||||
@@ -116,10 +123,49 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const deletePendingUserGroupMembershipsByUserIds = async (userIds: string[], tx?: Knex) => {
|
||||
try {
|
||||
const members = await (tx || db)(TableName.UserGroupMembership)
|
||||
.whereIn(`${TableName.UserGroupMembership}.userId`, userIds)
|
||||
.where(`${TableName.UserGroupMembership}.isPending`, true)
|
||||
.join(TableName.Groups, `${TableName.UserGroupMembership}.groupId`, `${TableName.Groups}.id`)
|
||||
.join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`);
|
||||
|
||||
await userGroupMembershipOrm.delete(
|
||||
{
|
||||
$in: {
|
||||
userId: userIds
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return members.map(({ userId, username, groupId, orgId, name, slug, role, roleId }) => ({
|
||||
user: {
|
||||
id: userId,
|
||||
username
|
||||
},
|
||||
group: {
|
||||
id: groupId,
|
||||
orgId,
|
||||
name,
|
||||
slug,
|
||||
role,
|
||||
roleId,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date()
|
||||
}
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Delete pending user group memberships by user ids" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...userGroupMembershipOrm,
|
||||
filterProjectsByUserMembership,
|
||||
findUserGroupMembershipsInProject,
|
||||
findGroupMembersNotInProject
|
||||
findGroupMembersNotInProject,
|
||||
deletePendingUserGroupMembershipsByUserIds
|
||||
};
|
||||
};
|
||||
|
@@ -2,6 +2,9 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus, SecretKeyEncoding, TLdapConfigsUpdate } from "@app/db/schemas";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
|
||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
@@ -13,8 +16,12 @@ import {
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@@ -23,16 +30,40 @@ import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TLdapConfigDALFactory } from "./ldap-config-dal";
|
||||
import { TCreateLdapCfgDTO, TGetLdapCfgDTO, TLdapLoginDTO, TUpdateLdapCfgDTO } from "./ldap-config-types";
|
||||
import {
|
||||
TCreateLdapCfgDTO,
|
||||
TCreateLdapGroupMapDTO,
|
||||
TDeleteLdapGroupMapDTO,
|
||||
TGetLdapCfgDTO,
|
||||
TGetLdapGroupMapsDTO,
|
||||
TLdapLoginDTO,
|
||||
TTestLdapConnectionDTO,
|
||||
TUpdateLdapCfgDTO
|
||||
} from "./ldap-config-types";
|
||||
import { testLDAPConfig } from "./ldap-fns";
|
||||
import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal";
|
||||
|
||||
type TLdapConfigServiceFactoryDep = {
|
||||
ldapConfigDAL: TLdapConfigDALFactory;
|
||||
ldapConfigDAL: Pick<TLdapConfigDALFactory, "create" | "update" | "findOne">;
|
||||
ldapGroupMapDAL: Pick<TLdapGroupMapDALFactory, "find" | "create" | "delete" | "findLdapGroupMapsByLdapConfigId">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
>;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
userDAL: Pick<TUserDALFactory, "create" | "findOne" | "transaction" | "updateById">;
|
||||
groupDAL: Pick<TGroupDALFactory, "find" | "findOne">;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
userGroupMembershipDAL: Pick<
|
||||
TUserGroupMembershipDALFactory,
|
||||
"find" | "transaction" | "insertMany" | "filterProjectsByUserMembership" | "delete"
|
||||
>;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "findUserEncKeyByUserIdsBatch" | "find"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@@ -42,8 +73,15 @@ export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFacto
|
||||
|
||||
export const ldapConfigServiceFactory = ({
|
||||
ldapConfigDAL,
|
||||
ldapGroupMapDAL,
|
||||
orgDAL,
|
||||
orgBotDAL,
|
||||
groupDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
userGroupMembershipDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
permissionService,
|
||||
@@ -60,6 +98,9 @@ export const ldapConfigServiceFactory = ({
|
||||
bindDN,
|
||||
bindPass,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
groupSearchFilter,
|
||||
caCert
|
||||
}: TCreateLdapCfgDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
@@ -135,6 +176,9 @@ export const ldapConfigServiceFactory = ({
|
||||
bindPassIV,
|
||||
bindPassTag,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
groupSearchFilter,
|
||||
encryptedCACert,
|
||||
caCertIV,
|
||||
caCertTag
|
||||
@@ -154,6 +198,9 @@ export const ldapConfigServiceFactory = ({
|
||||
bindDN,
|
||||
bindPass,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
groupSearchFilter,
|
||||
caCert
|
||||
}: TUpdateLdapCfgDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
@@ -169,7 +216,10 @@ export const ldapConfigServiceFactory = ({
|
||||
const updateQuery: TLdapConfigsUpdate = {
|
||||
isActive,
|
||||
url,
|
||||
searchBase
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
groupSearchFilter
|
||||
};
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId });
|
||||
@@ -271,6 +321,9 @@ export const ldapConfigServiceFactory = ({
|
||||
bindDN,
|
||||
bindPass,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: ldapConfig.searchFilter,
|
||||
groupSearchBase: ldapConfig.groupSearchBase,
|
||||
groupSearchFilter: ldapConfig.groupSearchFilter,
|
||||
caCert
|
||||
};
|
||||
};
|
||||
@@ -304,8 +357,8 @@ export const ldapConfigServiceFactory = ({
|
||||
bindDN: ldapConfig.bindDN,
|
||||
bindCredentials: ldapConfig.bindPass,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: "(uid={{username}})",
|
||||
searchAttributes: ["uid", "uidNumber", "givenName", "sn", "mail"],
|
||||
searchFilter: ldapConfig.searchFilter || "(uid={{username}})",
|
||||
// searchAttributes: ["uid", "uidNumber", "givenName", "sn", "mail"],
|
||||
...(ldapConfig.caCert !== ""
|
||||
? {
|
||||
tlsOptions: {
|
||||
@@ -320,7 +373,17 @@ export const ldapConfigServiceFactory = ({
|
||||
return { opts, ldapConfig };
|
||||
};
|
||||
|
||||
const ldapLogin = async ({ externalId, username, firstName, lastName, emails, orgId, relayState }: TLdapLoginDTO) => {
|
||||
const ldapLogin = async ({
|
||||
ldapConfigId,
|
||||
externalId,
|
||||
username,
|
||||
firstName,
|
||||
lastName,
|
||||
emails,
|
||||
groups,
|
||||
orgId,
|
||||
relayState
|
||||
}: TLdapLoginDTO) => {
|
||||
const appCfg = getConfig();
|
||||
let userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
@@ -394,7 +457,84 @@ export const ldapConfigServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const user = await userDAL.findOne({ id: userAlias.userId });
|
||||
const user = await userDAL.transaction(async (tx) => {
|
||||
const newUser = await userDAL.findOne({ id: userAlias.userId }, tx);
|
||||
if (groups) {
|
||||
const ldapGroupIdsToBePartOf = (
|
||||
await ldapGroupMapDAL.find({
|
||||
ldapConfigId,
|
||||
$in: {
|
||||
ldapGroupCN: groups.map((group) => group.cn)
|
||||
}
|
||||
})
|
||||
).map((groupMap) => groupMap.groupId);
|
||||
|
||||
const groupsToBePartOf = await groupDAL.find({
|
||||
orgId,
|
||||
$in: {
|
||||
id: ldapGroupIdsToBePartOf
|
||||
}
|
||||
});
|
||||
const toBePartOfGroupIdsSet = new Set(groupsToBePartOf.map((groupToBePartOf) => groupToBePartOf.id));
|
||||
|
||||
const allLdapGroupMaps = await ldapGroupMapDAL.find({
|
||||
ldapConfigId
|
||||
});
|
||||
|
||||
const ldapGroupIdsCurrentlyPartOf = (
|
||||
await userGroupMembershipDAL.find({
|
||||
userId: newUser.id,
|
||||
$in: {
|
||||
groupId: allLdapGroupMaps.map((groupMap) => groupMap.groupId)
|
||||
}
|
||||
})
|
||||
).map((userGroupMembership) => userGroupMembership.groupId);
|
||||
|
||||
const userGroupMembershipGroupIdsSet = new Set(ldapGroupIdsCurrentlyPartOf);
|
||||
|
||||
for await (const group of groupsToBePartOf) {
|
||||
if (!userGroupMembershipGroupIdsSet.has(group.id)) {
|
||||
// add user to group that they should be part of
|
||||
await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: [newUser.id],
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const groupsCurrentlyPartOf = await groupDAL.find({
|
||||
orgId,
|
||||
$in: {
|
||||
id: ldapGroupIdsCurrentlyPartOf
|
||||
}
|
||||
});
|
||||
|
||||
for await (const group of groupsCurrentlyPartOf) {
|
||||
if (!toBePartOfGroupIdsSet.has(group.id)) {
|
||||
// remove user from group that they should no longer be part of
|
||||
await removeUsersFromGroupByUserIds({
|
||||
group,
|
||||
userIds: [newUser.id],
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return newUser;
|
||||
});
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
|
||||
@@ -424,6 +564,116 @@ export const ldapConfigServiceFactory = ({
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
};
|
||||
|
||||
const getLdapGroupMaps = async ({
|
||||
ldapConfigId,
|
||||
actor,
|
||||
actorId,
|
||||
orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TGetLdapGroupMapsDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Ldap);
|
||||
|
||||
const ldapConfig = await ldapConfigDAL.findOne({
|
||||
id: ldapConfigId,
|
||||
orgId
|
||||
});
|
||||
|
||||
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
|
||||
|
||||
const groupMaps = await ldapGroupMapDAL.findLdapGroupMapsByLdapConfigId(ldapConfigId);
|
||||
|
||||
return groupMaps;
|
||||
};
|
||||
|
||||
const createLdapGroupMap = async ({
|
||||
ldapConfigId,
|
||||
ldapGroupCN,
|
||||
groupSlug,
|
||||
actor,
|
||||
actorId,
|
||||
orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TCreateLdapGroupMapDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.ldap)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create LDAP group map due to plan restriction. Upgrade plan to create LDAP group map."
|
||||
});
|
||||
|
||||
const ldapConfig = await ldapConfigDAL.findOne({
|
||||
id: ldapConfigId,
|
||||
orgId
|
||||
});
|
||||
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
|
||||
|
||||
const group = await groupDAL.findOne({ slug: groupSlug, orgId });
|
||||
if (!group) throw new BadRequestError({ message: "Failed to find group" });
|
||||
|
||||
const groupMap = await ldapGroupMapDAL.create({
|
||||
ldapConfigId,
|
||||
ldapGroupCN,
|
||||
groupId: group.id
|
||||
});
|
||||
|
||||
return groupMap;
|
||||
};
|
||||
|
||||
const deleteLdapGroupMap = async ({
|
||||
ldapConfigId,
|
||||
ldapGroupMapId,
|
||||
actor,
|
||||
actorId,
|
||||
orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TDeleteLdapGroupMapDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Ldap);
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.ldap)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to delete LDAP group map due to plan restriction. Upgrade plan to delete LDAP group map."
|
||||
});
|
||||
|
||||
const ldapConfig = await ldapConfigDAL.findOne({
|
||||
id: ldapConfigId,
|
||||
orgId
|
||||
});
|
||||
|
||||
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
|
||||
|
||||
const [deletedGroupMap] = await ldapGroupMapDAL.delete({
|
||||
ldapConfigId: ldapConfig.id,
|
||||
id: ldapGroupMapId
|
||||
});
|
||||
|
||||
return deletedGroupMap;
|
||||
};
|
||||
|
||||
const testLDAPConnection = async ({ actor, actorId, orgId, actorAuthMethod, actorOrgId }: TTestLdapConnectionDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.ldap)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to test LDAP connection due to plan restriction. Upgrade plan to test the LDAP connection."
|
||||
});
|
||||
|
||||
const ldapConfig = await getLdapCfg({
|
||||
orgId
|
||||
});
|
||||
|
||||
return testLDAPConfig(ldapConfig);
|
||||
};
|
||||
|
||||
return {
|
||||
createLdapCfg,
|
||||
updateLdapCfg,
|
||||
@@ -431,6 +681,10 @@ export const ldapConfigServiceFactory = ({
|
||||
getLdapCfg,
|
||||
// getLdapPassportOpts,
|
||||
ldapLogin,
|
||||
bootLdap
|
||||
bootLdap,
|
||||
getLdapGroupMaps,
|
||||
createLdapGroupMap,
|
||||
deleteLdapGroupMap,
|
||||
testLDAPConnection
|
||||
};
|
||||
};
|
||||
|
@@ -1,5 +1,18 @@
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
|
||||
export type TLDAPConfig = {
|
||||
id: string;
|
||||
organization: string;
|
||||
isActive: boolean;
|
||||
url: string;
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
searchBase: string;
|
||||
groupSearchBase: string;
|
||||
groupSearchFilter: string;
|
||||
caCert: string;
|
||||
};
|
||||
|
||||
export type TCreateLdapCfgDTO = {
|
||||
orgId: string;
|
||||
isActive: boolean;
|
||||
@@ -7,6 +20,9 @@ export type TCreateLdapCfgDTO = {
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
searchBase: string;
|
||||
searchFilter: string;
|
||||
groupSearchBase: string;
|
||||
groupSearchFilter: string;
|
||||
caCert: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
@@ -18,6 +34,9 @@ export type TUpdateLdapCfgDTO = {
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
searchBase: string;
|
||||
searchFilter: string;
|
||||
groupSearchBase: string;
|
||||
groupSearchFilter: string;
|
||||
caCert: string;
|
||||
}> &
|
||||
TOrgPermission;
|
||||
@@ -27,11 +46,35 @@ export type TGetLdapCfgDTO = {
|
||||
} & TOrgPermission;
|
||||
|
||||
export type TLdapLoginDTO = {
|
||||
ldapConfigId: string;
|
||||
externalId: string;
|
||||
username: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
emails: string[];
|
||||
orgId: string;
|
||||
groups?: {
|
||||
dn: string;
|
||||
cn: string;
|
||||
}[];
|
||||
relayState?: string;
|
||||
};
|
||||
|
||||
export type TGetLdapGroupMapsDTO = {
|
||||
ldapConfigId: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
export type TCreateLdapGroupMapDTO = {
|
||||
ldapConfigId: string;
|
||||
ldapGroupCN: string;
|
||||
groupSlug: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
export type TDeleteLdapGroupMapDTO = {
|
||||
ldapConfigId: string;
|
||||
ldapGroupMapId: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
export type TTestLdapConnectionDTO = {
|
||||
ldapConfigId: string;
|
||||
} & TOrgPermission;
|
||||
|
119
backend/src/ee/services/ldap-config/ldap-fns.ts
Normal file
119
backend/src/ee/services/ldap-config/ldap-fns.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import ldapjs from "ldapjs";
|
||||
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TLDAPConfig } from "./ldap-config-types";
|
||||
|
||||
export const isValidLdapFilter = (filter: string) => {
|
||||
try {
|
||||
ldapjs.parseFilter(filter);
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error("Invalid LDAP filter");
|
||||
logger.error(error);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Test the LDAP configuration by attempting to bind to the LDAP server
|
||||
* @param ldapConfig - The LDAP configuration to test
|
||||
* @returns {Boolean} isConnected - Whether or not the connection was successful
|
||||
*/
|
||||
export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean> => {
|
||||
return new Promise((resolve) => {
|
||||
const ldapClient = ldapjs.createClient({
|
||||
url: ldapConfig.url,
|
||||
bindDN: ldapConfig.bindDN,
|
||||
bindCredentials: ldapConfig.bindPass,
|
||||
...(ldapConfig.caCert !== ""
|
||||
? {
|
||||
tlsOptions: {
|
||||
ca: [ldapConfig.caCert]
|
||||
}
|
||||
}
|
||||
: {})
|
||||
});
|
||||
|
||||
ldapClient.on("error", (err) => {
|
||||
logger.error("LDAP client error:", err);
|
||||
logger.error(err);
|
||||
resolve(false);
|
||||
});
|
||||
|
||||
ldapClient.bind(ldapConfig.bindDN, ldapConfig.bindPass, (err) => {
|
||||
if (err) {
|
||||
logger.error("Error binding to LDAP");
|
||||
logger.error(err);
|
||||
ldapClient.unbind();
|
||||
resolve(false);
|
||||
} else {
|
||||
logger.info("Successfully connected and bound to LDAP.");
|
||||
ldapClient.unbind();
|
||||
resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Search for groups in the LDAP server
|
||||
* @param ldapConfig - The LDAP configuration to use
|
||||
* @param filter - The filter to use when searching for groups
|
||||
* @param base - The base to search from
|
||||
* @returns
|
||||
*/
|
||||
export const searchGroups = async (
|
||||
ldapConfig: TLDAPConfig,
|
||||
filter: string,
|
||||
base: string
|
||||
): Promise<{ dn: string; cn: string }[]> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const ldapClient = ldapjs.createClient({
|
||||
url: ldapConfig.url,
|
||||
bindDN: ldapConfig.bindDN,
|
||||
bindCredentials: ldapConfig.bindPass,
|
||||
...(ldapConfig.caCert !== ""
|
||||
? {
|
||||
tlsOptions: {
|
||||
ca: [ldapConfig.caCert]
|
||||
}
|
||||
}
|
||||
: {})
|
||||
});
|
||||
|
||||
ldapClient.search(
|
||||
base,
|
||||
{
|
||||
filter,
|
||||
scope: "sub"
|
||||
},
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
ldapClient.unbind();
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const groups: { dn: string; cn: string }[] = [];
|
||||
|
||||
res.on("searchEntry", (entry) => {
|
||||
const dn = entry.dn.toString();
|
||||
const regex = /cn=([^,]+)/;
|
||||
const match = dn.match(regex);
|
||||
// parse the cn from the dn
|
||||
const cn = (match && match[1]) as string;
|
||||
|
||||
groups.push({ dn, cn });
|
||||
});
|
||||
res.on("error", (error) => {
|
||||
ldapClient.unbind();
|
||||
reject(error);
|
||||
});
|
||||
res.on("end", () => {
|
||||
ldapClient.unbind();
|
||||
resolve(groups);
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
};
|
41
backend/src/ee/services/ldap-config/ldap-group-map-dal.ts
Normal file
41
backend/src/ee/services/ldap-config/ldap-group-map-dal.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
export type TLdapGroupMapDALFactory = ReturnType<typeof ldapGroupMapDALFactory>;
|
||||
|
||||
export const ldapGroupMapDALFactory = (db: TDbClient) => {
|
||||
const ldapGroupMapOrm = ormify(db, TableName.LdapGroupMap);
|
||||
|
||||
const findLdapGroupMapsByLdapConfigId = async (ldapConfigId: string) => {
|
||||
try {
|
||||
const docs = await db(TableName.LdapGroupMap)
|
||||
.where(`${TableName.LdapGroupMap}.ldapConfigId`, ldapConfigId)
|
||||
.join(TableName.Groups, `${TableName.LdapGroupMap}.groupId`, `${TableName.Groups}.id`)
|
||||
.select(selectAllTableCols(TableName.LdapGroupMap))
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.Groups).as("groupId"),
|
||||
db.ref("name").withSchema(TableName.Groups).as("groupName"),
|
||||
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
|
||||
);
|
||||
|
||||
return docs.map((doc) => {
|
||||
return {
|
||||
id: doc.id,
|
||||
ldapConfigId: doc.ldapConfigId,
|
||||
ldapGroupCN: doc.ldapGroupCN,
|
||||
group: {
|
||||
id: doc.groupId,
|
||||
name: doc.groupName,
|
||||
slug: doc.groupSlug
|
||||
}
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "findGroupMaps" });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...ldapGroupMapOrm, findLdapGroupMapsByLdapConfigId };
|
||||
};
|
@@ -340,11 +340,12 @@ export const samlConfigServiceFactory = ({
|
||||
orgId,
|
||||
inviteEmail: email,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: OrgMembershipStatus.Accepted
|
||||
status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
},
|
||||
tx
|
||||
);
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited) {
|
||||
// Only update the membership to Accepted if the user account is already completed.
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited && user.isAccepted) {
|
||||
await orgDAL.updateMembershipById(
|
||||
orgMembership.id,
|
||||
{
|
||||
|
@@ -4,15 +4,20 @@ import jwt from "jsonwebtoken";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus, TableName, TGroups } from "@app/db/schemas";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
|
||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { TScimDALFactory } from "@app/ee/services/scim/scim-dal";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ScimRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { deleteOrgMembership } from "@app/services/org/org-fns";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
@@ -42,14 +47,21 @@ import {
|
||||
|
||||
type TScimServiceFactoryDep = {
|
||||
scimDAL: Pick<TScimDALFactory, "create" | "find" | "findById" | "deleteById">;
|
||||
userDAL: Pick<TUserDALFactory, "findOne" | "create" | "transaction">;
|
||||
userDAL: Pick<TUserDALFactory, "find" | "findOne" | "create" | "transaction" | "findUserEncKeyByUserIdsBatch">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "findById" | "findMembership" | "deleteMembershipById" | "transaction"
|
||||
>;
|
||||
projectDAL: Pick<TProjectDALFactory, "find">;
|
||||
projectDAL: Pick<TProjectDALFactory, "find" | "findProjectGhostUser">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find" | "delete">;
|
||||
groupDAL: Pick<TGroupDALFactory, "create" | "findOne" | "findAllGroupMembers" | "update" | "delete" | "findGroups">;
|
||||
groupDAL: Pick<
|
||||
TGroupDALFactory,
|
||||
"create" | "findOne" | "findAllGroupMembers" | "update" | "delete" | "findGroups" | "transaction"
|
||||
>;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
userGroupMembershipDAL: TUserGroupMembershipDALFactory; // TODO: Pick
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
smtpService: TSmtpService;
|
||||
@@ -65,6 +77,10 @@ export const scimServiceFactory = ({
|
||||
projectDAL,
|
||||
projectMembershipDAL,
|
||||
groupDAL,
|
||||
groupProjectDAL,
|
||||
userGroupMembershipDAL,
|
||||
projectKeyDAL,
|
||||
projectBotDAL,
|
||||
permissionService,
|
||||
smtpService
|
||||
}: TScimServiceFactoryDep) => {
|
||||
@@ -473,7 +489,19 @@ export const scimServiceFactory = ({
|
||||
};
|
||||
|
||||
const listScimGroups = async ({ orgId, offset, limit }: TListScimGroupsDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to list SCIM groups due to plan restriction. Upgrade plan to list SCIM groups."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
@@ -500,30 +528,76 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
const createScimGroup = async ({ displayName, orgId }: TCreateScimGroupDTO) => {
|
||||
const createScimGroup = async ({ displayName, orgId, members }: TCreateScimGroupDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create a SCIM group due to plan restriction. Upgrade plan to create a SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
detail: "SCIM is disabled for the organization",
|
||||
status: 403
|
||||
});
|
||||
|
||||
const group = await groupDAL.create({
|
||||
name: displayName,
|
||||
slug: slugify(`${displayName}-${alphaNumericNanoId(4)}`),
|
||||
orgId,
|
||||
role: OrgMembershipRole.NoAccess
|
||||
const newGroup = await groupDAL.transaction(async (tx) => {
|
||||
const group = await groupDAL.create(
|
||||
{
|
||||
name: displayName,
|
||||
slug: slugify(`${displayName}-${alphaNumericNanoId(4)}`),
|
||||
orgId,
|
||||
role: OrgMembershipRole.NoAccess
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (members && members.length) {
|
||||
const newMembers = await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: members.map((member) => member.value),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
});
|
||||
|
||||
return { group, newMembers };
|
||||
}
|
||||
|
||||
return { group, newMembers: [] };
|
||||
});
|
||||
|
||||
return buildScimGroup({
|
||||
groupId: group.id,
|
||||
name: group.name,
|
||||
members: []
|
||||
groupId: newGroup.group.id,
|
||||
name: newGroup.group.name,
|
||||
members: newGroup.newMembers.map((member) => ({
|
||||
value: member.id,
|
||||
display: `${member.firstName} ${member.lastName}`
|
||||
}))
|
||||
});
|
||||
};
|
||||
|
||||
const getScimGroup = async ({ groupId, orgId }: TGetScimGroupDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to get SCIM group due to plan restriction. Upgrade plan to get SCIM group."
|
||||
});
|
||||
|
||||
const group = await groupDAL.findOne({
|
||||
id: groupId,
|
||||
orgId
|
||||
@@ -553,35 +627,123 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
const updateScimGroupNamePut = async ({ groupId, orgId, displayName }: TUpdateScimGroupNamePutDTO) => {
|
||||
const [group] = await groupDAL.update(
|
||||
{
|
||||
id: groupId,
|
||||
orgId
|
||||
},
|
||||
{
|
||||
name: displayName
|
||||
}
|
||||
);
|
||||
const updateScimGroupNamePut = async ({ groupId, orgId, displayName, members }: TUpdateScimGroupNamePutDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group."
|
||||
});
|
||||
|
||||
if (!group) {
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Group Not Found",
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
detail: "SCIM is disabled for the organization",
|
||||
status: 403
|
||||
});
|
||||
|
||||
const updatedGroup = await groupDAL.transaction(async (tx) => {
|
||||
const [group] = await groupDAL.update(
|
||||
{
|
||||
id: groupId,
|
||||
orgId
|
||||
},
|
||||
{
|
||||
name: displayName
|
||||
}
|
||||
);
|
||||
|
||||
if (!group) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Group Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (members) {
|
||||
const membersIdsSet = new Set(members.map((member) => member.value));
|
||||
|
||||
const directMemberUserIds = (
|
||||
await userGroupMembershipDAL.find({
|
||||
groupId: group.id,
|
||||
isPending: false
|
||||
})
|
||||
).map((membership) => membership.userId);
|
||||
|
||||
const pendingGroupAdditionsUserIds = (
|
||||
await userGroupMembershipDAL.find({
|
||||
groupId: group.id,
|
||||
isPending: true
|
||||
})
|
||||
).map((pendingGroupAddition) => pendingGroupAddition.userId);
|
||||
|
||||
const allMembersUserIds = directMemberUserIds.concat(pendingGroupAdditionsUserIds);
|
||||
const allMembersUserIdsSet = new Set(allMembersUserIds);
|
||||
|
||||
const toAddUserIds = members.filter((member) => !allMembersUserIdsSet.has(member.value));
|
||||
const toRemoveUserIds = allMembersUserIds.filter((userId) => !membersIdsSet.has(userId));
|
||||
|
||||
if (toAddUserIds.length) {
|
||||
await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: toAddUserIds.map((member) => member.value),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
|
||||
if (toRemoveUserIds.length) {
|
||||
await removeUsersFromGroupByUserIds({
|
||||
group,
|
||||
userIds: toRemoveUserIds,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
tx
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return group;
|
||||
});
|
||||
|
||||
return buildScimGroup({
|
||||
groupId: group.id,
|
||||
name: group.name,
|
||||
members: []
|
||||
groupId: updatedGroup.id,
|
||||
name: updatedGroup.name,
|
||||
members
|
||||
});
|
||||
};
|
||||
|
||||
// TODO: add support for add/remove op
|
||||
const updateScimGroupNamePatch = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
detail: "SCIM is disabled for the organization",
|
||||
@@ -635,6 +797,26 @@ export const scimServiceFactory = ({
|
||||
};
|
||||
|
||||
const deleteScimGroup = async ({ groupId, orgId }: TDeleteScimGroupDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to delete SCIM group due to plan restriction. Upgrade plan to delete SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
detail: "SCIM is disabled for the organization",
|
||||
status: 403
|
||||
});
|
||||
|
||||
const [group] = await groupDAL.delete({
|
||||
id: groupId,
|
||||
orgId
|
||||
|
@@ -81,6 +81,11 @@ export type TListScimGroups = {
|
||||
export type TCreateScimGroupDTO = {
|
||||
displayName: string;
|
||||
orgId: string;
|
||||
members?: {
|
||||
// TODO: account for members with value and display (is this optional?)
|
||||
value: string;
|
||||
display: string;
|
||||
}[];
|
||||
};
|
||||
|
||||
export type TGetScimGroupDTO = {
|
||||
@@ -92,6 +97,10 @@ export type TUpdateScimGroupNamePutDTO = {
|
||||
groupId: string;
|
||||
orgId: string;
|
||||
displayName: string;
|
||||
members: {
|
||||
value: string;
|
||||
display: string;
|
||||
}[];
|
||||
};
|
||||
|
||||
export type TUpdateScimGroupNamePatchDTO = {
|
||||
|
@@ -495,7 +495,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "GenSecretApproval" });
|
||||
if (!folder)
|
||||
throw new BadRequestError({
|
||||
message: "Folder not found for the given environment slug & secret path",
|
||||
name: "GenSecretApproval"
|
||||
});
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
|
@@ -90,16 +90,20 @@ export const secretRotationDbFn = async ({
|
||||
const appCfg = getConfig();
|
||||
|
||||
const ssl = ca ? { rejectUnauthorized: false, ca } : undefined;
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
if (
|
||||
isCloud &&
|
||||
// internal ips
|
||||
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new Error("Invalid db host");
|
||||
if (
|
||||
host === "localhost" ||
|
||||
host === "127.0.0.1" ||
|
||||
// database infisical uses
|
||||
dbHost === host ||
|
||||
// internal ips
|
||||
host === "host.docker.internal" ||
|
||||
host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
host.match(/^192\.168\.\d+\.\d+/)
|
||||
dbHost === host
|
||||
)
|
||||
throw new Error("Invalid db host");
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { TableName, TSecretTagJunctionInsert } from "@app/db/schemas";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
import { TSnapshotDALFactory } from "./snapshot-dal";
|
||||
import { TSnapshotFolderDALFactory } from "./snapshot-folder-dal";
|
||||
import { TSnapshotSecretDALFactory } from "./snapshot-secret-dal";
|
||||
import { getFullFolderPath } from "./snapshot-service-fns";
|
||||
|
||||
type TSecretSnapshotServiceFactoryDep = {
|
||||
snapshotDAL: TSnapshotDALFactory;
|
||||
@@ -33,7 +34,7 @@ type TSecretSnapshotServiceFactoryDep = {
|
||||
secretDAL: Pick<TSecretDALFactory, "delete" | "insertMany">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecret">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findById" | "findBySecretPath" | "delete" | "insertMany">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findById" | "findBySecretPath" | "delete" | "insertMany" | "find">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "isValidLicense">;
|
||||
};
|
||||
@@ -71,6 +72,12 @@ export const secretSnapshotServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
|
||||
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
@@ -98,6 +105,12 @@ export const secretSnapshotServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
|
||||
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
@@ -116,6 +129,19 @@ export const secretSnapshotServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
|
||||
const fullFolderPath = await getFullFolderPath({
|
||||
folderDAL,
|
||||
folderId: snapshot.folderId,
|
||||
envId: snapshot.environment.id
|
||||
});
|
||||
|
||||
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: snapshot.environment.slug, secretPath: fullFolderPath })
|
||||
);
|
||||
|
||||
return snapshot;
|
||||
};
|
||||
|
||||
|
@@ -101,6 +101,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
key: "snapshotId",
|
||||
parentMapper: ({
|
||||
snapshotId: id,
|
||||
folderId,
|
||||
projectId,
|
||||
envId,
|
||||
envSlug,
|
||||
@@ -109,6 +110,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
snapshotUpdatedAt: updatedAt
|
||||
}) => ({
|
||||
id,
|
||||
folderId,
|
||||
projectId,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
|
@@ -0,0 +1,28 @@
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
type GetFullFolderPath = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findById" | "find">; // Added findAllInEnv
|
||||
folderId: string;
|
||||
envId: string;
|
||||
};
|
||||
|
||||
export const getFullFolderPath = async ({ folderDAL, folderId, envId }: GetFullFolderPath): Promise<string> => {
|
||||
// Helper function to remove duplicate slashes
|
||||
const removeDuplicateSlashes = (path: string) => path.replace(/\/{2,}/g, "/");
|
||||
|
||||
// Fetch all folders at once based on environment ID to avoid multiple queries
|
||||
const folders = await folderDAL.find({ envId });
|
||||
const folderMap = new Map(folders.map((folder) => [folder.id, folder]));
|
||||
|
||||
const buildPath = (currFolderId: string): string => {
|
||||
const folder = folderMap.get(currFolderId);
|
||||
if (!folder) return "";
|
||||
const folderPathSegment = !folder.parentId && folder.name === "root" ? "/" : `/${folder.name}`;
|
||||
if (folder.parentId) {
|
||||
return removeDuplicateSlashes(`${buildPath(folder.parentId)}${folderPathSegment}`);
|
||||
}
|
||||
return removeDuplicateSlashes(folderPathSegment);
|
||||
};
|
||||
|
||||
return buildPath(folderId);
|
||||
};
|
@@ -282,6 +282,7 @@ export const RAW_SECRETS = {
|
||||
},
|
||||
CREATE: {
|
||||
secretName: "The name of the secret to create.",
|
||||
projectSlug: "The slug of the project to create the secret in.",
|
||||
environment: "The slug of the environment to create the secret in.",
|
||||
secretComment: "Attach a comment to the secret.",
|
||||
secretPath: "The path to create the secret in.",
|
||||
@@ -301,11 +302,13 @@ export const RAW_SECRETS = {
|
||||
},
|
||||
UPDATE: {
|
||||
secretName: "The name of the secret to update.",
|
||||
secretComment: "Update comment to the secret.",
|
||||
environment: "The slug of the environment where the secret is located.",
|
||||
secretPath: "The path of the secret to update",
|
||||
secretValue: "The new value of the secret.",
|
||||
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
|
||||
type: "The type of the secret to update.",
|
||||
projectSlug: "The slug of the project to update the secret in.",
|
||||
workspaceId: "The ID of the project to update the secret in."
|
||||
},
|
||||
DELETE: {
|
||||
@@ -313,6 +316,7 @@ export const RAW_SECRETS = {
|
||||
environment: "The slug of the environment where the secret is located.",
|
||||
secretPath: "The path of the secret.",
|
||||
type: "The type of the secret to delete.",
|
||||
projectSlug: "The slug of the project to delete the secret in.",
|
||||
workspaceId: "The ID of the project where the secret is located."
|
||||
}
|
||||
} as const;
|
||||
@@ -585,11 +589,13 @@ export const INTEGRATION = {
|
||||
region: "AWS region to sync secrets to.",
|
||||
scope: "Scope of the provider. Used by Github, Qovery",
|
||||
metadata: {
|
||||
secretPrefix: "The prefix for the saved secret. Used by GCP",
|
||||
secretSuffix: "The suffix for the saved secret. Used by GCP",
|
||||
initialSyncBehavoir: "Type of syncing behavoir with the integration",
|
||||
shouldAutoRedeploy: "Used by Render to trigger auto deploy",
|
||||
secretGCPLabel: "The label for the GCP secrets"
|
||||
secretPrefix: "The prefix for the saved secret. Used by GCP.",
|
||||
secretSuffix: "The suffix for the saved secret. Used by GCP.",
|
||||
initialSyncBehavoir: "Type of syncing behavoir with the integration.",
|
||||
shouldAutoRedeploy: "Used by Render to trigger auto deploy.",
|
||||
secretGCPLabel: "The label for GCP secrets.",
|
||||
secretAWSTag: "The tags for AWS secrets.",
|
||||
kmsKeyId: "The ID of the encryption key from AWS KMS."
|
||||
}
|
||||
},
|
||||
UPDATE: {
|
||||
|
@@ -30,12 +30,6 @@ export const fastifySwagger = fp(async (fastify) => {
|
||||
scheme: "bearer",
|
||||
bearerFormat: "JWT",
|
||||
description: "An access token in Infisical"
|
||||
},
|
||||
apiKeyAuth: {
|
||||
type: "apiKey",
|
||||
in: "header",
|
||||
name: "X-API-Key",
|
||||
description: "An API Key in Infisical"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -18,6 +18,7 @@ import { identityProjectAdditionalPrivilegeDALFactory } from "@app/ee/services/i
|
||||
import { identityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { ldapConfigDALFactory } from "@app/ee/services/ldap-config/ldap-config-dal";
|
||||
import { ldapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { ldapGroupMapDALFactory } from "@app/ee/services/ldap-config/ldap-group-map-dal";
|
||||
import { licenseDALFactory } from "@app/ee/services/license/license-dal";
|
||||
import { licenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { permissionDALFactory } from "@app/ee/services/permission/permission-dal";
|
||||
@@ -200,6 +201,7 @@ export const registerRoutes = async (
|
||||
const samlConfigDAL = samlConfigDALFactory(db);
|
||||
const scimDAL = scimDALFactory(db);
|
||||
const ldapConfigDAL = ldapConfigDALFactory(db);
|
||||
const ldapGroupMapDAL = ldapGroupMapDALFactory(db);
|
||||
const sapApproverDAL = secretApprovalPolicyApproverDALFactory(db);
|
||||
const secretApprovalPolicyDAL = secretApprovalPolicyDALFactory(db);
|
||||
const secretApprovalRequestDAL = secretApprovalRequestDALFactory(db);
|
||||
@@ -290,14 +292,25 @@ export const registerRoutes = async (
|
||||
projectDAL,
|
||||
projectMembershipDAL,
|
||||
groupDAL,
|
||||
groupProjectDAL,
|
||||
userGroupMembershipDAL,
|
||||
projectKeyDAL,
|
||||
projectBotDAL,
|
||||
permissionService,
|
||||
smtpService
|
||||
});
|
||||
|
||||
const ldapService = ldapConfigServiceFactory({
|
||||
ldapConfigDAL,
|
||||
ldapGroupMapDAL,
|
||||
orgDAL,
|
||||
orgBotDAL,
|
||||
groupDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
userGroupMembershipDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
permissionService,
|
||||
@@ -344,6 +357,11 @@ export const registerRoutes = async (
|
||||
smtpService,
|
||||
authDAL,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
groupProjectDAL,
|
||||
orgDAL,
|
||||
orgService,
|
||||
licenseService
|
||||
|
@@ -511,6 +511,39 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider)
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:integrationAuthId/aws-secrets-manager/kms-keys",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
integrationAuthId: z.string().trim()
|
||||
}),
|
||||
querystring: z.object({
|
||||
region: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
kmsKeys: z.object({ id: z.string(), alias: z.string() }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const kmsKeys = await server.services.integrationAuth.getAwsKmsKeys({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.integrationAuthId,
|
||||
region: req.query.region
|
||||
});
|
||||
return { kmsKeys };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:integrationAuthId/qovery/projects",
|
||||
|
@@ -56,9 +56,19 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
labelValue: z.string()
|
||||
})
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel)
|
||||
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
|
||||
secretAWSTag: z
|
||||
.array(
|
||||
z.object({
|
||||
key: z.string(),
|
||||
value: z.string()
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
|
||||
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId)
|
||||
})
|
||||
.optional()
|
||||
.default({})
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectEnvironmentsSchema } from "@app/db/schemas";
|
||||
@@ -18,8 +19,7 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Create environment",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -27,7 +27,13 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().trim().describe(ENVIRONMENTS.CREATE.name),
|
||||
slug: z.string().trim().describe(ENVIRONMENTS.CREATE.slug)
|
||||
slug: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(ENVIRONMENTS.CREATE.slug)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -77,8 +83,7 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Update environment",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -86,7 +91,14 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
|
||||
id: z.string().trim().describe(ENVIRONMENTS.UPDATE.id)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z.string().trim().optional().describe(ENVIRONMENTS.UPDATE.slug),
|
||||
slug: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.refine((v) => !v || slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(ENVIRONMENTS.UPDATE.slug),
|
||||
name: z.string().trim().optional().describe(ENVIRONMENTS.UPDATE.name),
|
||||
position: z.number().optional().describe(ENVIRONMENTS.UPDATE.position)
|
||||
}),
|
||||
@@ -144,8 +156,7 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Delete environment",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
|
@@ -26,8 +26,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
description: "Return project user memberships",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -139,8 +138,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
description: "Update project user membership",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -213,8 +211,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
description: "Delete project user membership",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
|
@@ -138,6 +138,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Get project",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
workspaceId: z.string().trim().describe(PROJECTS.GET.workspaceId)
|
||||
}),
|
||||
@@ -170,6 +176,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Delete project",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
workspaceId: z.string().trim().describe(PROJECTS.DELETE.workspaceId)
|
||||
}),
|
||||
@@ -239,6 +251,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Update project",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
workspaceId: z.string().trim().describe(PROJECTS.UPDATE.workspaceId)
|
||||
}),
|
||||
|
@@ -19,8 +19,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
description: "Create folders",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
@@ -76,8 +75,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
description: "Update folder",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -140,8 +138,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
description: "Delete a folder",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -200,8 +197,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
description: "Get folders",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
querystring: z.object({
|
||||
|
@@ -19,8 +19,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
|
||||
description: "Create secret imports",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
@@ -84,8 +83,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
|
||||
description: "Update secret imports",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -159,8 +157,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
|
||||
description: "Delete secret imports",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -223,8 +220,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
|
||||
description: "Get secret imports",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
querystring: z.object({
|
||||
|
@@ -18,8 +18,7 @@ export const registerIdentityOrgRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Return organization identity memberships",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
|
@@ -17,8 +17,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Return organization user memberships",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -45,7 +44,6 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
if (req.auth.actor !== ActorType.USER) return;
|
||||
|
||||
const users = await server.services.org.findAllOrgMembers(
|
||||
req.permission.id,
|
||||
req.params.organizationId,
|
||||
@@ -66,8 +64,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Return projects in organization that user is part of",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -115,8 +112,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Update organization user memberships",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -158,8 +154,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Delete organization user memberships",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
|
@@ -15,6 +15,12 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Invite members to project",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().describe(PROJECTS.INVITE_MEMBER.projectId)
|
||||
}),
|
||||
@@ -64,10 +70,15 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Remove members from project",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
projectId: z.string().describe(PROJECTS.REMOVE_MEMBER.projectId)
|
||||
}),
|
||||
|
||||
body: z.object({
|
||||
emails: z.string().email().array().default([]).describe(PROJECTS.REMOVE_MEMBER.emails),
|
||||
usernames: z.string().array().default([]).describe(PROJECTS.REMOVE_MEMBER.usernames)
|
||||
|
@@ -36,11 +36,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
description: "Return encrypted project key",
|
||||
security: [
|
||||
{
|
||||
apiKeyAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
workspaceId: z.string().trim().describe(PROJECTS.GET_KEY.workspaceId)
|
||||
}),
|
||||
@@ -149,6 +144,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
rateLimit: creationLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Create a new project",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
projectName: z.string().trim().describe(PROJECTS.CREATE.projectName),
|
||||
slug: z
|
||||
@@ -200,6 +201,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Delete project",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
slug: slugSchema.describe("The slug of the project to delete.")
|
||||
}),
|
||||
|
@@ -85,11 +85,6 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
description: "Return organizations that current user is part of",
|
||||
security: [
|
||||
{
|
||||
apiKeyAuth: []
|
||||
}
|
||||
],
|
||||
response: {
|
||||
200: z.object({
|
||||
organizations: OrganizationsSchema.array()
|
||||
@@ -217,11 +212,6 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
description: "Retrieve the current user on the request",
|
||||
security: [
|
||||
{
|
||||
apiKeyAuth: []
|
||||
}
|
||||
],
|
||||
response: {
|
||||
200: z.object({
|
||||
user: UsersSchema.merge(UserEncryptionKeysSchema.omit({ verifier: true }))
|
||||
|
@@ -158,8 +158,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
description: "List secrets",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
querystring: z.object({
|
||||
@@ -280,8 +279,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Get a secret by name",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -375,8 +373,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Create secret",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -464,8 +461,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Update secret",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -550,8 +546,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Delete secret",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
apiKeyAuth: []
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
@@ -1661,4 +1656,263 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
return { secrets };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/batch/raw",
|
||||
config: {
|
||||
rateLimit: secretsLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Create many secrets",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
projectSlug: z.string().trim().describe(RAW_SECRETS.CREATE.projectSlug),
|
||||
environment: z.string().trim().describe(RAW_SECRETS.CREATE.environment),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.default("/")
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(RAW_SECRETS.CREATE.secretPath),
|
||||
secrets: z
|
||||
.object({
|
||||
secretKey: z.string().trim().describe(RAW_SECRETS.CREATE.secretName),
|
||||
secretValue: z
|
||||
.string()
|
||||
.transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim()))
|
||||
.describe(RAW_SECRETS.CREATE.secretValue),
|
||||
secretComment: z.string().trim().optional().default("").describe(RAW_SECRETS.CREATE.secretComment),
|
||||
skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.CREATE.skipMultilineEncoding)
|
||||
})
|
||||
.array()
|
||||
.min(1)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secrets: secretRawSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { environment, projectSlug, secretPath, secrets: inputSecrets } = req.body;
|
||||
|
||||
const secrets = await server.services.secret.createManySecretsRaw({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
secretPath,
|
||||
environment,
|
||||
projectSlug,
|
||||
secrets: inputSecrets
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: secrets[0].workspace,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.CREATE_SECRETS,
|
||||
metadata: {
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
secrets: secrets.map((secret, i) => ({
|
||||
secretId: secret.id,
|
||||
secretKey: inputSecrets[i].secretKey,
|
||||
secretVersion: secret.version
|
||||
}))
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretCreated,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: secrets.length,
|
||||
workspaceId: secrets[0].workspace,
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
return { secrets };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/batch/raw",
|
||||
config: {
|
||||
rateLimit: secretsLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Update many secrets",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
projectSlug: z.string().trim().describe(RAW_SECRETS.UPDATE.projectSlug),
|
||||
environment: z.string().trim().describe(RAW_SECRETS.UPDATE.environment),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.default("/")
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(RAW_SECRETS.UPDATE.secretPath),
|
||||
secrets: z
|
||||
.object({
|
||||
secretKey: z.string().trim().describe(RAW_SECRETS.UPDATE.secretName),
|
||||
secretValue: z
|
||||
.string()
|
||||
.transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim()))
|
||||
.describe(RAW_SECRETS.UPDATE.secretValue),
|
||||
secretComment: z.string().trim().optional().describe(RAW_SECRETS.UPDATE.secretComment),
|
||||
skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.UPDATE.skipMultilineEncoding)
|
||||
})
|
||||
.array()
|
||||
.min(1)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secrets: secretRawSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { environment, projectSlug, secretPath, secrets: inputSecrets } = req.body;
|
||||
const secrets = await server.services.secret.updateManySecretsRaw({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
secretPath,
|
||||
environment,
|
||||
projectSlug,
|
||||
secrets: inputSecrets
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: secrets[0].workspace,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.UPDATE_SECRETS,
|
||||
metadata: {
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
secrets: secrets.map((secret, i) => ({
|
||||
secretId: secret.id,
|
||||
secretKey: inputSecrets[i].secretKey,
|
||||
secretVersion: secret.version
|
||||
}))
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretUpdated,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: secrets.length,
|
||||
workspaceId: secrets[0].workspace,
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
return { secrets };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/batch/raw",
|
||||
config: {
|
||||
rateLimit: secretsLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Delete many secrets",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
projectSlug: z.string().trim().describe(RAW_SECRETS.DELETE.projectSlug),
|
||||
environment: z.string().trim().describe(RAW_SECRETS.DELETE.environment),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.default("/")
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(RAW_SECRETS.DELETE.secretPath),
|
||||
secrets: z
|
||||
.object({
|
||||
secretKey: z.string().trim().describe(RAW_SECRETS.DELETE.secretName)
|
||||
})
|
||||
.array()
|
||||
.min(1)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secrets: secretRawSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { environment, projectSlug, secretPath, secrets: inputSecrets } = req.body;
|
||||
const secrets = await server.services.secret.deleteManySecretsRaw({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
environment,
|
||||
projectSlug,
|
||||
secretPath,
|
||||
secrets: inputSecrets
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: secrets[0].workspace,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.DELETE_SECRETS,
|
||||
metadata: {
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
secrets: secrets.map((secret, i) => ({
|
||||
secretId: secret.id,
|
||||
secretKey: inputSecrets[i].secretKey,
|
||||
secretVersion: secret.version
|
||||
}))
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretDeleted,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: secrets.length,
|
||||
workspaceId: secrets[0].workspace,
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
return { secrets };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -191,7 +191,7 @@ export const authLoginServiceFactory = ({
|
||||
const decodedProviderToken = validateProviderAuthToken(providerAuthToken, email);
|
||||
|
||||
authMethod = decodedProviderToken.authMethod;
|
||||
if (isAuthMethodSaml(authMethod) && decodedProviderToken.orgId) {
|
||||
if ((isAuthMethodSaml(authMethod) || authMethod === AuthMethod.LDAP) && decodedProviderToken.orgId) {
|
||||
organizationId = decodedProviderToken.orgId;
|
||||
}
|
||||
}
|
||||
|
@@ -1,10 +1,17 @@
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { OrgMembershipStatus } from "@app/db/schemas";
|
||||
import { convertPendingGroupAdditionsToGroupMemberships } from "@app/ee/services/group/group-fns";
|
||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { isDisposableEmail } from "@app/lib/validator";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
|
||||
import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
|
||||
import { TokenType } from "../auth-token/auth-token-types";
|
||||
@@ -20,6 +27,14 @@ import { AuthMethod, AuthTokenType } from "./auth-type";
|
||||
type TAuthSignupDep = {
|
||||
authDAL: TAuthDALFactory;
|
||||
userDAL: TUserDALFactory;
|
||||
userGroupMembershipDAL: Pick<
|
||||
TUserGroupMembershipDALFactory,
|
||||
"find" | "transaction" | "insertMany" | "deletePendingUserGroupMembershipsByUserIds"
|
||||
>;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
orgService: Pick<TOrgServiceFactory, "createOrganization">;
|
||||
orgDAL: TOrgDALFactory;
|
||||
tokenService: TAuthTokenServiceFactory;
|
||||
@@ -31,6 +46,11 @@ export type TAuthSignupFactory = ReturnType<typeof authSignupServiceFactory>;
|
||||
export const authSignupServiceFactory = ({
|
||||
authDAL,
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
groupProjectDAL,
|
||||
tokenService,
|
||||
smtpService,
|
||||
orgService,
|
||||
@@ -120,9 +140,11 @@ export const authSignupServiceFactory = ({
|
||||
throw new Error("Failed to complete account for complete user");
|
||||
}
|
||||
|
||||
let organizationId;
|
||||
let organizationId: string | null = null;
|
||||
let authMethod: AuthMethod | null = null;
|
||||
if (providerAuthToken) {
|
||||
const { orgId } = validateProviderAuthToken(providerAuthToken, user.username);
|
||||
const { orgId, authMethod: userAuthMethod } = validateProviderAuthToken(providerAuthToken, user.username);
|
||||
authMethod = userAuthMethod;
|
||||
organizationId = orgId;
|
||||
} else {
|
||||
validateSignUpAuthorization(authorization, user.id);
|
||||
@@ -146,6 +168,26 @@ export const authSignupServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
// If it's SAML Auth and the organization ID is present, we should check if the user has a pending invite for this org, and accept it
|
||||
if (isAuthMethodSaml(authMethod) && organizationId) {
|
||||
const [pendingOrgMembership] = await orgDAL.findMembership({
|
||||
inviteEmail: email,
|
||||
userId: user.id,
|
||||
status: OrgMembershipStatus.Invited,
|
||||
orgId: organizationId
|
||||
});
|
||||
|
||||
if (pendingOrgMembership) {
|
||||
await orgDAL.updateMembershipById(
|
||||
pendingOrgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return { info: us, key: userEncKey };
|
||||
});
|
||||
|
||||
@@ -168,6 +210,16 @@ export const authSignupServiceFactory = ({
|
||||
const uniqueOrgId = [...new Set(updatedMembersips.map(({ orgId }) => orgId))];
|
||||
await Promise.allSettled(uniqueOrgId.map((orgId) => licenseService.updateSubscriptionOrgMemberCount(orgId)));
|
||||
|
||||
await convertPendingGroupAdditionsToGroupMemberships({
|
||||
userIds: [user.id],
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL
|
||||
});
|
||||
|
||||
const tokenSession = await tokenService.getUserTokenSession({
|
||||
userAgent,
|
||||
ip,
|
||||
@@ -270,6 +322,17 @@ export const authSignupServiceFactory = ({
|
||||
const uniqueOrgId = [...new Set(updatedMembersips.map(({ orgId }) => orgId))];
|
||||
await Promise.allSettled(uniqueOrgId.map((orgId) => licenseService.updateSubscriptionOrgMemberCount(orgId)));
|
||||
|
||||
await convertPendingGroupAdditionsToGroupMemberships({
|
||||
userIds: [user.id],
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
tx
|
||||
});
|
||||
|
||||
return { info: us, key: userEncKey };
|
||||
});
|
||||
|
||||
|
@@ -32,7 +32,7 @@ type TGroupProjectServiceFactoryDep = {
|
||||
TGroupProjectMembershipRoleDALFactory,
|
||||
"create" | "transaction" | "insertMany" | "delete"
|
||||
>;
|
||||
userGroupMembershipDAL: TUserGroupMembershipDALFactory;
|
||||
userGroupMembershipDAL: Pick<TUserGroupMembershipDALFactory, "findGroupMembersNotInProject">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "findProjectGhostUser">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "findLatestProjectKey" | "delete" | "insertMany" | "transaction">;
|
||||
projectRoleDAL: Pick<TProjectRoleDALFactory, "find">;
|
||||
@@ -116,68 +116,69 @@ export const groupProjectServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
// share project key with users in group that have not
|
||||
// individually been added to the project and that are not part of
|
||||
// other groups that are in the project
|
||||
const groupMembers = await userGroupMembershipDAL.findGroupMembersNotInProject(group.id, project.id, tx);
|
||||
|
||||
if (groupMembers.length) {
|
||||
const ghostUser = await projectDAL.findProjectGhostUser(project.id, tx);
|
||||
|
||||
if (!ghostUser) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find sudo user"
|
||||
});
|
||||
}
|
||||
|
||||
const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, project.id, tx);
|
||||
|
||||
if (!ghostUserLatestKey) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find sudo user latest key"
|
||||
});
|
||||
}
|
||||
|
||||
const bot = await projectBotDAL.findOne({ projectId: project.id }, tx);
|
||||
|
||||
if (!bot) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find bot"
|
||||
});
|
||||
}
|
||||
|
||||
const botPrivateKey = infisicalSymmetricDecrypt({
|
||||
keyEncoding: bot.keyEncoding as SecretKeyEncoding,
|
||||
iv: bot.iv,
|
||||
tag: bot.tag,
|
||||
ciphertext: bot.encryptedPrivateKey
|
||||
});
|
||||
|
||||
const plaintextProjectKey = decryptAsymmetric({
|
||||
ciphertext: ghostUserLatestKey.encryptedKey,
|
||||
nonce: ghostUserLatestKey.nonce,
|
||||
publicKey: ghostUserLatestKey.sender.publicKey,
|
||||
privateKey: botPrivateKey
|
||||
});
|
||||
|
||||
const projectKeyData = groupMembers.map(({ user: { publicKey, id } }) => {
|
||||
const { ciphertext: encryptedKey, nonce } = encryptAsymmetric(plaintextProjectKey, publicKey, botPrivateKey);
|
||||
|
||||
return {
|
||||
encryptedKey,
|
||||
nonce,
|
||||
senderId: ghostUser.id,
|
||||
receiverId: id,
|
||||
projectId: project.id
|
||||
};
|
||||
});
|
||||
|
||||
await projectKeyDAL.insertMany(projectKeyData, tx);
|
||||
}
|
||||
|
||||
return groupProjectMembership;
|
||||
});
|
||||
|
||||
// share project key with users in group that have not
|
||||
// individually been added to the project and that are not part of
|
||||
// other groups that are in the project
|
||||
const groupMembers = await userGroupMembershipDAL.findGroupMembersNotInProject(group.id, project.id);
|
||||
|
||||
if (groupMembers.length) {
|
||||
const ghostUser = await projectDAL.findProjectGhostUser(project.id);
|
||||
|
||||
if (!ghostUser) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find sudo user"
|
||||
});
|
||||
}
|
||||
|
||||
const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, project.id);
|
||||
|
||||
if (!ghostUserLatestKey) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find sudo user latest key"
|
||||
});
|
||||
}
|
||||
|
||||
const bot = await projectBotDAL.findOne({ projectId: project.id });
|
||||
|
||||
if (!bot) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find bot"
|
||||
});
|
||||
}
|
||||
|
||||
const botPrivateKey = infisicalSymmetricDecrypt({
|
||||
keyEncoding: bot.keyEncoding as SecretKeyEncoding,
|
||||
iv: bot.iv,
|
||||
tag: bot.tag,
|
||||
ciphertext: bot.encryptedPrivateKey
|
||||
});
|
||||
|
||||
const plaintextProjectKey = decryptAsymmetric({
|
||||
ciphertext: ghostUserLatestKey.encryptedKey,
|
||||
nonce: ghostUserLatestKey.nonce,
|
||||
publicKey: ghostUserLatestKey.sender.publicKey,
|
||||
privateKey: botPrivateKey
|
||||
});
|
||||
|
||||
const projectKeyData = groupMembers.map(({ user: { publicKey, id } }) => {
|
||||
const { ciphertext: encryptedKey, nonce } = encryptAsymmetric(plaintextProjectKey, publicKey, botPrivateKey);
|
||||
|
||||
return {
|
||||
encryptedKey,
|
||||
nonce,
|
||||
senderId: ghostUser.id,
|
||||
receiverId: id,
|
||||
projectId: project.id
|
||||
};
|
||||
});
|
||||
|
||||
await projectKeyDAL.insertMany(projectKeyData);
|
||||
}
|
||||
|
||||
return projectGroup;
|
||||
};
|
||||
|
||||
@@ -287,20 +288,26 @@ export const groupProjectServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Groups);
|
||||
|
||||
const groupMembers = await userGroupMembershipDAL.findGroupMembersNotInProject(group.id, project.id);
|
||||
const deletedProjectGroup = await groupProjectDAL.transaction(async (tx) => {
|
||||
const groupMembers = await userGroupMembershipDAL.findGroupMembersNotInProject(group.id, project.id, tx);
|
||||
|
||||
if (groupMembers.length) {
|
||||
await projectKeyDAL.delete({
|
||||
projectId: project.id,
|
||||
$in: {
|
||||
receiverId: groupMembers.map(({ user: { id } }) => id)
|
||||
}
|
||||
});
|
||||
}
|
||||
if (groupMembers.length) {
|
||||
await projectKeyDAL.delete(
|
||||
{
|
||||
projectId: project.id,
|
||||
$in: {
|
||||
receiverId: groupMembers.map(({ user: { id } }) => id)
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
const [deletedGroup] = await groupProjectDAL.delete({ groupId: group.id, projectId: project.id });
|
||||
const [projectGroup] = await groupProjectDAL.delete({ groupId: group.id, projectId: project.id }, tx);
|
||||
return projectGroup;
|
||||
});
|
||||
|
||||
return deletedGroup;
|
||||
return deletedProjectGroup;
|
||||
};
|
||||
|
||||
const listGroupsInProject = async ({
|
||||
|
@@ -129,26 +129,55 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
* Return list of names of apps for Vercel integration
|
||||
*/
|
||||
const getAppsVercel = async ({ accessToken, teamId }: { teamId?: string | null; accessToken: string }) => {
|
||||
const res = (
|
||||
await request.get<{ projects: { name: string; id: string }[] }>(`${IntegrationUrls.VERCEL_API_URL}/v9/projects`, {
|
||||
const apps: Array<{ name: string; appId: string }> = [];
|
||||
|
||||
const limit = "20";
|
||||
let hasMorePages = true;
|
||||
let next: number | null = null;
|
||||
|
||||
interface Response {
|
||||
projects: { name: string; id: string }[];
|
||||
pagination: {
|
||||
count: number;
|
||||
next: number | null;
|
||||
prev: number;
|
||||
};
|
||||
}
|
||||
|
||||
while (hasMorePages) {
|
||||
const params: { [key: string]: string } = {
|
||||
limit
|
||||
};
|
||||
|
||||
if (teamId) {
|
||||
params.teamId = teamId;
|
||||
}
|
||||
|
||||
if (next) {
|
||||
params.until = String(next);
|
||||
}
|
||||
|
||||
const { data } = await request.get<Response>(`${IntegrationUrls.VERCEL_API_URL}/v9/projects`, {
|
||||
params: new URLSearchParams(params),
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
},
|
||||
...(teamId
|
||||
? {
|
||||
params: {
|
||||
teamId
|
||||
}
|
||||
}
|
||||
: {})
|
||||
})
|
||||
).data;
|
||||
}
|
||||
});
|
||||
|
||||
const apps = res.projects.map((a) => ({
|
||||
name: a.name,
|
||||
appId: a.id
|
||||
}));
|
||||
data.projects.forEach((a) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id
|
||||
});
|
||||
});
|
||||
|
||||
next = data.pagination.next;
|
||||
|
||||
if (data.pagination.next === null) {
|
||||
hasMorePages = false;
|
||||
}
|
||||
}
|
||||
|
||||
return apps;
|
||||
};
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import AWS from "aws-sdk";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding, TIntegrationAuths, TIntegrationAuthsInsert } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
@@ -23,6 +24,7 @@ import {
|
||||
TGetIntegrationAuthTeamCityBuildConfigDTO,
|
||||
THerokuPipelineCoupling,
|
||||
TIntegrationAuthAppsDTO,
|
||||
TIntegrationAuthAwsKmsKeyDTO,
|
||||
TIntegrationAuthBitbucketWorkspaceDTO,
|
||||
TIntegrationAuthChecklyGroupsDTO,
|
||||
TIntegrationAuthGithubEnvsDTO,
|
||||
@@ -534,6 +536,52 @@ export const integrationAuthServiceFactory = ({
|
||||
return data.results.map(({ name, id: orgId }) => ({ name, orgId }));
|
||||
};
|
||||
|
||||
const getAwsKmsKeys = async ({
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
id,
|
||||
region
|
||||
}: TIntegrationAuthAwsKmsKeyDTO) => {
|
||||
const integrationAuth = await integrationAuthDAL.findById(id);
|
||||
if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
integrationAuth.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
const botKey = await projectBotService.getBotKey(integrationAuth.projectId);
|
||||
const { accessId, accessToken } = await getIntegrationAccessToken(integrationAuth, botKey);
|
||||
|
||||
AWS.config.update({
|
||||
region,
|
||||
credentials: {
|
||||
accessKeyId: String(accessId),
|
||||
secretAccessKey: accessToken
|
||||
}
|
||||
});
|
||||
const kms = new AWS.KMS();
|
||||
|
||||
const aliases = await kms.listAliases({}).promise();
|
||||
const keys = await kms.listKeys({}).promise();
|
||||
const response = keys
|
||||
.Keys!.map((key) => {
|
||||
const keyAlias = aliases.Aliases!.find((alias) => key.KeyId === alias.TargetKeyId);
|
||||
if (!keyAlias?.AliasName?.includes("alias/aws/") || keyAlias?.AliasName?.includes("alias/aws/secretsmanager")) {
|
||||
return { id: String(key.KeyId), alias: String(keyAlias?.AliasName || key.KeyId) };
|
||||
}
|
||||
return { id: "null", alias: "null" };
|
||||
})
|
||||
.filter((elem) => elem.id !== "null");
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
const getQoveryProjects = async ({
|
||||
actorId,
|
||||
actor,
|
||||
@@ -1133,6 +1181,7 @@ export const integrationAuthServiceFactory = ({
|
||||
getIntegrationApps,
|
||||
getVercelBranches,
|
||||
getApps,
|
||||
getAwsKmsKeys,
|
||||
getGithubOrgs,
|
||||
getGithubEnvs,
|
||||
getChecklyGroups,
|
||||
|
@@ -63,6 +63,11 @@ export type TIntegrationAuthQoveryProjectDTO = {
|
||||
orgId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TIntegrationAuthAwsKmsKeyDTO = {
|
||||
id: string;
|
||||
region: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TIntegrationAuthQoveryEnvironmentsDTO = {
|
||||
id: string;
|
||||
} & TProjectPermission;
|
||||
|
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
@@ -457,6 +458,8 @@ const syncSecretsAWSParameterStore = async ({
|
||||
});
|
||||
ssm.config.update(config);
|
||||
|
||||
const metadata = z.record(z.any()).parse(integration.metadata || {});
|
||||
|
||||
const params = {
|
||||
Path: integration.path as string,
|
||||
Recursive: false,
|
||||
@@ -486,7 +489,10 @@ const syncSecretsAWSParameterStore = async ({
|
||||
Name: `${integration.path}${key}`,
|
||||
Type: "SecureString",
|
||||
Value: secrets[key].value,
|
||||
Overwrite: true
|
||||
// Overwrite: true,
|
||||
Tags: metadata.secretAWSTag
|
||||
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({ Key: tag.key, Value: tag.value }))
|
||||
: []
|
||||
})
|
||||
.promise();
|
||||
// case: secret exists in AWS parameter store
|
||||
@@ -499,6 +505,7 @@ const syncSecretsAWSParameterStore = async ({
|
||||
Type: "SecureString",
|
||||
Value: secrets[key].value,
|
||||
Overwrite: true
|
||||
// Tags: metadata.secretAWSTag ? [{ Key: metadata.secretAWSTag.key, Value: metadata.secretAWSTag.value }] : []
|
||||
})
|
||||
.promise();
|
||||
}
|
||||
@@ -537,6 +544,7 @@ const syncSecretsAWSSecretManager = async ({
|
||||
}) => {
|
||||
let secretsManager;
|
||||
const secKeyVal = getSecretKeyValuePair(secrets);
|
||||
const metadata = z.record(z.any()).parse(integration.metadata || {});
|
||||
try {
|
||||
if (!accessId) return;
|
||||
|
||||
@@ -573,7 +581,11 @@ const syncSecretsAWSSecretManager = async ({
|
||||
await secretsManager.send(
|
||||
new CreateSecretCommand({
|
||||
Name: integration.app as string,
|
||||
SecretString: JSON.stringify(secKeyVal)
|
||||
SecretString: JSON.stringify(secKeyVal),
|
||||
KmsKeyId: metadata.kmsKeyId ? metadata.kmsKeyId : null,
|
||||
Tags: metadata.secretAWSTag
|
||||
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({ Key: tag.key, Value: tag.value }))
|
||||
: []
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -2145,16 +2157,29 @@ const syncSecretsQovery = async ({
|
||||
* @param {String} obj.accessToken - access token for Terraform Cloud API
|
||||
*/
|
||||
const syncSecretsTerraformCloud = async ({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
accessToken,
|
||||
integrationDAL
|
||||
}: {
|
||||
integration: TIntegrations;
|
||||
secrets: Record<string, { value: string; comment?: string }>;
|
||||
createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
integration: TIntegrations & {
|
||||
projectId: string;
|
||||
environment: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
};
|
||||
secrets: Record<string, { value: string; comment?: string } | null>;
|
||||
accessToken: string;
|
||||
integrationDAL: Pick<TIntegrationDALFactory, "updateById">;
|
||||
}) => {
|
||||
// get secrets from Terraform Cloud
|
||||
const getSecretsRes = (
|
||||
const terraformSecrets = (
|
||||
await request.get<{ data: { attributes: { key: string; value: string }; id: string }[] }>(
|
||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars`,
|
||||
{
|
||||
@@ -2172,9 +2197,74 @@ const syncSecretsTerraformCloud = async ({
|
||||
{} as Record<string, { attributes: { key: string; value: string }; id: string }>
|
||||
);
|
||||
|
||||
const secretsToAdd: { [key: string]: string } = {};
|
||||
const secretsToUpdate: { [key: string]: string } = {};
|
||||
|
||||
const metadata = z.record(z.any()).parse(integration.metadata);
|
||||
|
||||
Object.keys(terraformSecrets).forEach((key) => {
|
||||
if (!integration.lastUsed) {
|
||||
// first time using integration
|
||||
// -> apply initial sync behavior
|
||||
switch (metadata.initialSyncBehavior) {
|
||||
case IntegrationInitialSyncBehavior.PREFER_TARGET: {
|
||||
if (!(key in secrets)) {
|
||||
secretsToAdd[key] = terraformSecrets[key].attributes.value;
|
||||
} else if (secrets[key]?.value !== terraformSecrets[key].attributes.value) {
|
||||
secretsToUpdate[key] = terraformSecrets[key].attributes.value;
|
||||
}
|
||||
secrets[key] = {
|
||||
value: terraformSecrets[key].attributes.value
|
||||
};
|
||||
break;
|
||||
}
|
||||
case IntegrationInitialSyncBehavior.PREFER_SOURCE: {
|
||||
if (!(key in secrets)) {
|
||||
secrets[key] = {
|
||||
value: terraformSecrets[key].attributes.value
|
||||
};
|
||||
secretsToAdd[key] = terraformSecrets[key].attributes.value;
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (!(key in secrets)) secrets[key] = null;
|
||||
});
|
||||
|
||||
if (Object.keys(secretsToAdd).length) {
|
||||
await createManySecretsRawFn({
|
||||
projectId: integration.projectId,
|
||||
environment: integration.environment.slug,
|
||||
path: integration.secretPath,
|
||||
secrets: Object.keys(secretsToAdd).map((key) => ({
|
||||
secretName: key,
|
||||
secretValue: secretsToAdd[key],
|
||||
type: SecretType.Shared,
|
||||
secretComment: ""
|
||||
}))
|
||||
});
|
||||
}
|
||||
|
||||
if (Object.keys(secretsToUpdate).length) {
|
||||
await updateManySecretsRawFn({
|
||||
projectId: integration.projectId,
|
||||
environment: integration.environment.slug,
|
||||
path: integration.secretPath,
|
||||
secrets: Object.keys(secretsToUpdate).map((key) => ({
|
||||
secretName: key,
|
||||
secretValue: secretsToUpdate[key],
|
||||
type: SecretType.Shared,
|
||||
secretComment: ""
|
||||
}))
|
||||
});
|
||||
}
|
||||
|
||||
// create or update secrets on Terraform Cloud
|
||||
for await (const key of Object.keys(secrets)) {
|
||||
if (!(key in getSecretsRes)) {
|
||||
if (!(key in terraformSecrets)) {
|
||||
// case: secret does not exist in Terraform Cloud
|
||||
// -> add secret
|
||||
await request.post(
|
||||
@@ -2184,7 +2274,7 @@ const syncSecretsTerraformCloud = async ({
|
||||
type: "vars",
|
||||
attributes: {
|
||||
key,
|
||||
value: secrets[key].value,
|
||||
value: secrets[key]?.value,
|
||||
category: integration.targetService
|
||||
}
|
||||
}
|
||||
@@ -2198,17 +2288,17 @@ const syncSecretsTerraformCloud = async ({
|
||||
}
|
||||
);
|
||||
// case: secret exists in Terraform Cloud
|
||||
} else if (secrets[key].value !== getSecretsRes[key].attributes.value) {
|
||||
} else if (secrets[key]?.value !== terraformSecrets[key].attributes.value) {
|
||||
// -> update secret
|
||||
await request.patch(
|
||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${getSecretsRes[key].id}`,
|
||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${terraformSecrets[key].id}`,
|
||||
{
|
||||
data: {
|
||||
type: "vars",
|
||||
id: getSecretsRes[key].id,
|
||||
id: terraformSecrets[key].id,
|
||||
attributes: {
|
||||
...getSecretsRes[key],
|
||||
value: secrets[key].value
|
||||
...terraformSecrets[key],
|
||||
value: secrets[key]?.value
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -2223,11 +2313,11 @@ const syncSecretsTerraformCloud = async ({
|
||||
}
|
||||
}
|
||||
|
||||
for await (const key of Object.keys(getSecretsRes)) {
|
||||
for await (const key of Object.keys(terraformSecrets)) {
|
||||
if (!(key in secrets)) {
|
||||
// case: delete secret
|
||||
await request.delete(
|
||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${getSecretsRes[key].id}`,
|
||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${terraformSecrets[key].id}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
@@ -2238,6 +2328,10 @@ const syncSecretsTerraformCloud = async ({
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await integrationDAL.updateById(integration.id, {
|
||||
lastUsed: new Date()
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -3279,9 +3373,12 @@ export const syncIntegrationSecrets = async ({
|
||||
break;
|
||||
case Integrations.TERRAFORM_CLOUD:
|
||||
await syncSecretsTerraformCloud({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
accessToken,
|
||||
integrationDAL
|
||||
});
|
||||
break;
|
||||
case Integrations.HASHICORP_VAULT:
|
||||
|
@@ -22,6 +22,11 @@ export type TCreateIntegrationDTO = {
|
||||
labelName: string;
|
||||
labelValue: string;
|
||||
};
|
||||
secretAWSTag?: {
|
||||
key: string;
|
||||
value: string;
|
||||
}[];
|
||||
kmsKeyId?: string;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
|
@@ -89,6 +89,25 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const countAllOrgMembers = async (orgId: string) => {
|
||||
try {
|
||||
interface CountResult {
|
||||
count: string;
|
||||
}
|
||||
|
||||
const count = await db(TableName.OrgMembership)
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.count("*")
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.where({ isGhost: false })
|
||||
.first();
|
||||
|
||||
return parseInt((count as unknown as CountResult).count || "0", 10);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Count all org members" });
|
||||
}
|
||||
};
|
||||
|
||||
const findOrgMembersByUsername = async (orgId: string, usernames: string[]) => {
|
||||
try {
|
||||
const members = await db(TableName.OrgMembership)
|
||||
@@ -269,6 +288,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
...orgOrm,
|
||||
findOrgByProjectId,
|
||||
findAllOrgMembers,
|
||||
countAllOrgMembers,
|
||||
findOrgById,
|
||||
findAllOrgsByUserId,
|
||||
ghostUserExists,
|
||||
|
@@ -248,7 +248,7 @@ export const orgServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Scim);
|
||||
}
|
||||
|
||||
if (authEnforced || scimEnabled) {
|
||||
if (authEnforced) {
|
||||
const samlCfg = await samlConfigDAL.findEnforceableSamlCfg(orgId);
|
||||
if (!samlCfg)
|
||||
throw new BadRequestError({
|
||||
|
@@ -134,8 +134,7 @@ export const projectMembershipServiceFactory = ({
|
||||
const projectMemberships = await projectMembershipDAL.insertMany(
|
||||
orgMembers.map(({ userId }) => ({
|
||||
projectId,
|
||||
userId: userId as string,
|
||||
role: ProjectMembershipRole.Member
|
||||
userId: userId as string
|
||||
})),
|
||||
tx
|
||||
);
|
||||
@@ -267,8 +266,7 @@ export const projectMembershipServiceFactory = ({
|
||||
const projectMemberships = await projectMembershipDAL.insertMany(
|
||||
orgMembers.map(({ user }) => ({
|
||||
projectId,
|
||||
userId: user.id,
|
||||
role: ProjectMembershipRole.Member
|
||||
userId: user.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
@@ -81,9 +81,9 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findProjectGhostUser = async (projectId: string) => {
|
||||
const findProjectGhostUser = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const ghostUser = await db(TableName.ProjectMembership)
|
||||
const ghostUser = await (tx || db)(TableName.ProjectMembership)
|
||||
.where({ projectId })
|
||||
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
|
||||
.select(selectAllTableCols(TableName.Users))
|
||||
|
@@ -1,33 +1,66 @@
|
||||
import { SecretType, TSecretImports } from "@app/db/schemas";
|
||||
import { SecretType, TSecretImports, TSecrets } from "@app/db/schemas";
|
||||
import { groupBy } from "@app/lib/fn";
|
||||
|
||||
import { TSecretDALFactory } from "../secret/secret-dal";
|
||||
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
|
||||
import { TSecretImportDALFactory } from "./secret-import-dal";
|
||||
|
||||
type TSecretImportSecrets = {
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
environmentInfo: {
|
||||
id: string;
|
||||
slug: string;
|
||||
name: string;
|
||||
};
|
||||
folderId: string | undefined;
|
||||
importFolderId: string;
|
||||
secrets: (TSecrets & { workspace: string; environment: string; _id: string })[];
|
||||
};
|
||||
|
||||
const LEVEL_BREAK = 10;
|
||||
const getImportUniqKey = (envSlug: string, path: string) => `${envSlug}=${path}`;
|
||||
export const fnSecretsFromImports = async ({
|
||||
allowedImports,
|
||||
allowedImports: possibleCyclicImports,
|
||||
folderDAL,
|
||||
secretDAL
|
||||
secretDAL,
|
||||
secretImportDAL,
|
||||
depth = 0,
|
||||
cyclicDetector = new Set()
|
||||
}: {
|
||||
allowedImports: (Omit<TSecretImports, "importEnv"> & {
|
||||
importEnv: { id: string; slug: string; name: string };
|
||||
})[];
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath">;
|
||||
secretDAL: Pick<TSecretDALFactory, "find">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "findByFolderIds">;
|
||||
depth?: number;
|
||||
cyclicDetector?: Set<string>;
|
||||
}) => {
|
||||
const importedFolders = await folderDAL.findByManySecretPath(
|
||||
allowedImports.map(({ importEnv, importPath }) => ({
|
||||
envId: importEnv.id,
|
||||
secretPath: importPath
|
||||
}))
|
||||
// avoid going more than a depth
|
||||
if (depth >= LEVEL_BREAK) return [];
|
||||
|
||||
const allowedImports = possibleCyclicImports.filter(
|
||||
({ importPath, importEnv }) => !cyclicDetector.has(getImportUniqKey(importEnv.slug, importPath))
|
||||
);
|
||||
const folderIds = importedFolders.map((el) => el?.id).filter(Boolean) as string[];
|
||||
if (!folderIds.length) {
|
||||
|
||||
const importedFolders = (
|
||||
await folderDAL.findByManySecretPath(
|
||||
allowedImports.map(({ importEnv, importPath }) => ({
|
||||
envId: importEnv.id,
|
||||
secretPath: importPath
|
||||
}))
|
||||
)
|
||||
).filter(Boolean); // remove undefined ones
|
||||
if (!importedFolders.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const importedFolderIds = importedFolders.map((el) => el?.id) as string[];
|
||||
const importedFolderGroupBySourceImport = groupBy(importedFolders, (i) => `${i?.envId}-${i?.path}`);
|
||||
const importedSecrets = await secretDAL.find(
|
||||
{
|
||||
$in: { folderId: folderIds },
|
||||
$in: { folderId: importedFolderIds },
|
||||
type: SecretType.Shared
|
||||
},
|
||||
{
|
||||
@@ -35,18 +68,50 @@ export const fnSecretsFromImports = async ({
|
||||
}
|
||||
);
|
||||
|
||||
const importedSecsGroupByFolderId = groupBy(importedSecrets, (i) => i.folderId);
|
||||
return allowedImports.map(({ importPath, importEnv }, i) => ({
|
||||
secretPath: importPath,
|
||||
environment: importEnv.slug,
|
||||
environmentInfo: importEnv,
|
||||
folderId: importedFolders?.[i]?.id,
|
||||
// this will ensure for cases when secrets are empty. Could be due to missing folder for a path or when emtpy secrets inside a given path
|
||||
secrets: (importedSecsGroupByFolderId?.[importedFolders?.[i]?.id as string] || []).map((item) => ({
|
||||
...item,
|
||||
const importedSecretsGroupByFolderId = groupBy(importedSecrets, (i) => i.folderId);
|
||||
|
||||
allowedImports.forEach(({ importPath, importEnv }) => {
|
||||
cyclicDetector.add(getImportUniqKey(importEnv.slug, importPath));
|
||||
});
|
||||
// now we need to check recursively deeper imports made inside other imports
|
||||
// we go level wise meaning we take all imports of a tree level and then go deeper ones level by level
|
||||
const deeperImports = await secretImportDAL.findByFolderIds(importedFolderIds);
|
||||
let secretsFromDeeperImports: TSecretImportSecrets[] = [];
|
||||
if (deeperImports.length) {
|
||||
secretsFromDeeperImports = await fnSecretsFromImports({
|
||||
allowedImports: deeperImports,
|
||||
secretImportDAL,
|
||||
folderDAL,
|
||||
secretDAL,
|
||||
depth: depth + 1,
|
||||
cyclicDetector
|
||||
});
|
||||
}
|
||||
const secretsFromdeeperImportGroupedByFolderId = groupBy(secretsFromDeeperImports, (i) => i.importFolderId);
|
||||
|
||||
const secrets = allowedImports.map(({ importPath, importEnv, id, folderId }, i) => {
|
||||
const sourceImportFolder = importedFolderGroupBySourceImport[`${importEnv.id}-${importPath}`][0];
|
||||
const folderDeeperImportSecrets =
|
||||
secretsFromdeeperImportGroupedByFolderId?.[sourceImportFolder?.id || ""]?.[0]?.secrets || [];
|
||||
|
||||
return {
|
||||
secretPath: importPath,
|
||||
environment: importEnv.slug,
|
||||
workspace: "", // This field should not be used, it's only here to keep the older Python SDK versions backwards compatible with the new Postgres backend.
|
||||
_id: item.id // The old Python SDK depends on the _id field being returned. We return this to keep the older Python SDK versions backwards compatible with the new Postgres backend.
|
||||
}))
|
||||
}));
|
||||
environmentInfo: importEnv,
|
||||
folderId: importedFolders?.[i]?.id,
|
||||
id,
|
||||
importFolderId: folderId,
|
||||
// this will ensure for cases when secrets are empty. Could be due to missing folder for a path or when emtpy secrets inside a given path
|
||||
secrets: (importedSecretsGroupByFolderId?.[importedFolders?.[i]?.id as string] || [])
|
||||
.map((item) => ({
|
||||
...item,
|
||||
environment: importEnv.slug,
|
||||
workspace: "", // This field should not be used, it's only here to keep the older Python SDK versions backwards compatible with the new Postgres backend.
|
||||
_id: item.id // The old Python SDK depends on the _id field being returned. We return this to keep the older Python SDK versions backwards compatible with the new Postgres backend.
|
||||
}))
|
||||
.concat(folderDeeperImportSecrets)
|
||||
};
|
||||
});
|
||||
|
||||
return secrets;
|
||||
};
|
||||
|
@@ -290,7 +290,7 @@ export const secretImportServiceFactory = ({
|
||||
})
|
||||
)
|
||||
);
|
||||
return fnSecretsFromImports({ allowedImports, folderDAL, secretDAL });
|
||||
return fnSecretsFromImports({ allowedImports, folderDAL, secretDAL, secretImportDAL });
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -575,7 +575,11 @@ export const createManySecretsRawFnFactory = ({
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create secret" });
|
||||
if (!folder)
|
||||
throw new BadRequestError({
|
||||
message: "Folder not found for the given environment slug & secret path",
|
||||
name: "Create secret"
|
||||
});
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
@@ -680,7 +684,11 @@ export const updateManySecretsRawFnFactory = ({
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Update secret" });
|
||||
if (!folder)
|
||||
throw new BadRequestError({
|
||||
message: "Folder not found for the given environment slug & secret path",
|
||||
name: "Update secret"
|
||||
});
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
|
@@ -33,9 +33,11 @@ import { TSecretQueueFactory } from "./secret-queue";
|
||||
import {
|
||||
TAttachSecretTagsDTO,
|
||||
TCreateBulkSecretDTO,
|
||||
TCreateManySecretRawDTO,
|
||||
TCreateSecretDTO,
|
||||
TCreateSecretRawDTO,
|
||||
TDeleteBulkSecretDTO,
|
||||
TDeleteManySecretRawDTO,
|
||||
TDeleteSecretDTO,
|
||||
TDeleteSecretRawDTO,
|
||||
TFnSecretBlindIndexCheckV2,
|
||||
@@ -46,6 +48,7 @@ import {
|
||||
TGetSecretsRawDTO,
|
||||
TGetSecretVersionsDTO,
|
||||
TUpdateBulkSecretDTO,
|
||||
TUpdateManySecretRawDTO,
|
||||
TUpdateSecretDTO,
|
||||
TUpdateSecretRawDTO
|
||||
} from "./secret-types";
|
||||
@@ -179,7 +182,11 @@ export const secretServiceFactory = ({
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create secret" });
|
||||
if (!folder)
|
||||
throw new BadRequestError({
|
||||
message: "Folder not found for the given environment slug & secret path",
|
||||
name: "Create secret"
|
||||
});
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
@@ -275,7 +282,11 @@ export const secretServiceFactory = ({
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create secret" });
|
||||
if (!folder)
|
||||
throw new BadRequestError({
|
||||
message: "Folder not found for the given environment slug & secret path",
|
||||
name: "Create secret"
|
||||
});
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
@@ -391,7 +402,11 @@ export const secretServiceFactory = ({
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create secret" });
|
||||
if (!folder)
|
||||
throw new BadRequestError({
|
||||
message: "Folder not found for the given environment slug & secret path",
|
||||
name: "Create secret"
|
||||
});
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
@@ -510,7 +525,8 @@ export const secretServiceFactory = ({
|
||||
const importedSecrets = await fnSecretsFromImports({
|
||||
allowedImports,
|
||||
secretDAL,
|
||||
folderDAL
|
||||
folderDAL,
|
||||
secretImportDAL
|
||||
});
|
||||
|
||||
return {
|
||||
@@ -559,7 +575,11 @@ export const secretServiceFactory = ({
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create secret" });
|
||||
if (!folder)
|
||||
throw new BadRequestError({
|
||||
message: "Folder not found for the given environment slug & secret path",
|
||||
name: "Create secret"
|
||||
});
|
||||
const folderId = folder.id;
|
||||
|
||||
const secretBlindIndex = await interalGenSecBlindIndexByName(projectId, secretName);
|
||||
@@ -611,7 +631,8 @@ export const secretServiceFactory = ({
|
||||
const importedSecrets = await fnSecretsFromImports({
|
||||
allowedImports,
|
||||
secretDAL,
|
||||
folderDAL
|
||||
folderDAL,
|
||||
secretImportDAL
|
||||
});
|
||||
for (let i = importedSecrets.length - 1; i >= 0; i -= 1) {
|
||||
for (let j = 0; j < importedSecrets[i].secrets.length; j += 1) {
|
||||
@@ -655,7 +676,11 @@ export const secretServiceFactory = ({
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create secret" });
|
||||
if (!folder)
|
||||
throw new BadRequestError({
|
||||
message: "Folder not found for the given environment slug & secret path",
|
||||
name: "Create secret"
|
||||
});
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
@@ -724,7 +749,11 @@ export const secretServiceFactory = ({
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Update secret" });
|
||||
if (!folder)
|
||||
throw new BadRequestError({
|
||||
message: "Folder not found for the given environment slug & secret path",
|
||||
name: "Update secret"
|
||||
});
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
@@ -810,7 +839,11 @@ export const secretServiceFactory = ({
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create secret" });
|
||||
if (!folder)
|
||||
throw new BadRequestError({
|
||||
message: "Folder not found for the given environment slug & secret path",
|
||||
name: "Create secret"
|
||||
});
|
||||
const folderId = folder.id;
|
||||
|
||||
const blindIndexCfg = await secretBlindIndexDAL.findOne({ projectId });
|
||||
@@ -1036,6 +1069,143 @@ export const secretServiceFactory = ({
|
||||
return decryptSecretRaw(secret, botKey);
|
||||
};
|
||||
|
||||
const createManySecretsRaw = async ({
|
||||
actorId,
|
||||
projectSlug,
|
||||
environment,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
secretPath,
|
||||
secrets: inputSecrets = []
|
||||
}: TCreateManySecretRawDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
const projectId = project.id;
|
||||
|
||||
const botKey = await projectBotService.getBotKey(projectId);
|
||||
if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" });
|
||||
|
||||
const secrets = await createManySecret({
|
||||
projectId,
|
||||
environment,
|
||||
path: secretPath,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
secrets: inputSecrets.map(({ secretComment, secretKey, secretValue, skipMultilineEncoding }) => {
|
||||
const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secretKey, botKey);
|
||||
const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secretValue || "", botKey);
|
||||
const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secretComment || "", botKey);
|
||||
return {
|
||||
secretName: secretKey,
|
||||
skipMultilineEncoding,
|
||||
secretKeyCiphertext: secretKeyEncrypted.ciphertext,
|
||||
secretKeyIV: secretKeyEncrypted.iv,
|
||||
secretKeyTag: secretKeyEncrypted.tag,
|
||||
secretValueCiphertext: secretValueEncrypted.ciphertext,
|
||||
secretValueIV: secretValueEncrypted.iv,
|
||||
secretValueTag: secretValueEncrypted.tag,
|
||||
secretCommentCiphertext: secretCommentEncrypted.ciphertext,
|
||||
secretCommentIV: secretCommentEncrypted.iv,
|
||||
secretCommentTag: secretCommentEncrypted.tag
|
||||
};
|
||||
})
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(secrets[0].folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
|
||||
|
||||
return secrets.map((secret) => decryptSecretRaw({ ...secret, workspace: projectId, environment }, botKey));
|
||||
};
|
||||
|
||||
const updateManySecretsRaw = async ({
|
||||
actorId,
|
||||
projectSlug,
|
||||
environment,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
secretPath,
|
||||
secrets: inputSecrets = []
|
||||
}: TUpdateManySecretRawDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
const projectId = project.id;
|
||||
|
||||
const botKey = await projectBotService.getBotKey(projectId);
|
||||
if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" });
|
||||
|
||||
const secrets = await updateManySecret({
|
||||
projectId,
|
||||
environment,
|
||||
path: secretPath,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
secrets: inputSecrets.map(({ secretComment, secretKey, secretValue, skipMultilineEncoding }) => {
|
||||
const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secretKey, botKey);
|
||||
const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secretValue || "", botKey);
|
||||
const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secretComment || "", botKey);
|
||||
return {
|
||||
secretName: secretKey,
|
||||
type: SecretType.Shared,
|
||||
skipMultilineEncoding,
|
||||
secretKeyCiphertext: secretKeyEncrypted.ciphertext,
|
||||
secretKeyIV: secretKeyEncrypted.iv,
|
||||
secretKeyTag: secretKeyEncrypted.tag,
|
||||
secretValueCiphertext: secretValueEncrypted.ciphertext,
|
||||
secretValueIV: secretValueEncrypted.iv,
|
||||
secretValueTag: secretValueEncrypted.tag,
|
||||
secretCommentCiphertext: secretCommentEncrypted.ciphertext,
|
||||
secretCommentIV: secretCommentEncrypted.iv,
|
||||
secretCommentTag: secretCommentEncrypted.tag
|
||||
};
|
||||
})
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(secrets[0].folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
|
||||
|
||||
return secrets.map((secret) => decryptSecretRaw({ ...secret, workspace: projectId, environment }, botKey));
|
||||
};
|
||||
|
||||
const deleteManySecretsRaw = async ({
|
||||
actorId,
|
||||
projectSlug,
|
||||
environment,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
secretPath,
|
||||
secrets: inputSecrets = []
|
||||
}: TDeleteManySecretRawDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
const projectId = project.id;
|
||||
|
||||
const botKey = await projectBotService.getBotKey(projectId);
|
||||
if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" });
|
||||
|
||||
const secrets = await deleteManySecret({
|
||||
projectId,
|
||||
environment,
|
||||
path: secretPath,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
secrets: inputSecrets.map(({ secretKey }) => ({ secretName: secretKey, type: SecretType.Shared }))
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(secrets[0].folderId);
|
||||
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
|
||||
|
||||
return secrets.map((secret) => decryptSecretRaw({ ...secret, workspace: projectId, environment }, botKey));
|
||||
};
|
||||
|
||||
const getSecretVersions = async ({
|
||||
actorId,
|
||||
actor,
|
||||
@@ -1280,6 +1450,9 @@ export const secretServiceFactory = ({
|
||||
createSecretRaw,
|
||||
updateSecretRaw,
|
||||
deleteSecretRaw,
|
||||
createManySecretsRaw,
|
||||
updateManySecretsRaw,
|
||||
deleteManySecretsRaw,
|
||||
getSecretVersions,
|
||||
// external services function
|
||||
fnSecretBulkDelete,
|
||||
|
@@ -181,6 +181,39 @@ export type TDeleteSecretRawDTO = TProjectPermission & {
|
||||
type: SecretType;
|
||||
};
|
||||
|
||||
export type TCreateManySecretRawDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
secretPath: string;
|
||||
projectSlug: string;
|
||||
environment: string;
|
||||
secrets: {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
skipMultilineEncoding?: boolean;
|
||||
}[];
|
||||
};
|
||||
|
||||
export type TUpdateManySecretRawDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
secretPath: string;
|
||||
projectSlug: string;
|
||||
environment: string;
|
||||
secrets: {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
skipMultilineEncoding?: boolean;
|
||||
}[];
|
||||
};
|
||||
|
||||
export type TDeleteManySecretRawDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
secretPath: string;
|
||||
projectSlug: string;
|
||||
environment: string;
|
||||
secrets: {
|
||||
secretKey: string;
|
||||
}[];
|
||||
};
|
||||
|
||||
export type TGetSecretVersionsDTO = Omit<TProjectPermission, "projectId"> & {
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
|
@@ -34,6 +34,19 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findUserEncKeyByUserIdsBatch = async ({ userIds }: { userIds: string[] }, tx?: Knex) => {
|
||||
try {
|
||||
return await (tx || db)(TableName.Users)
|
||||
.where({
|
||||
isGhost: false
|
||||
})
|
||||
.whereIn(`${TableName.Users}.id`, userIds)
|
||||
.join(TableName.UserEncryptionKey, `${TableName.Users}.id`, `${TableName.UserEncryptionKey}.userId`);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find user enc by user ids batch" });
|
||||
}
|
||||
};
|
||||
|
||||
const findUserEncKeyByUserId = async (userId: string) => {
|
||||
try {
|
||||
const user = await db(TableName.Users)
|
||||
@@ -123,6 +136,7 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
...userOrm,
|
||||
findUserByUsername,
|
||||
findUserEncKeyByUsername,
|
||||
findUserEncKeyByUserIdsBatch,
|
||||
findUserEncKeyByUserId,
|
||||
updateUserEncryptionByUserId,
|
||||
findUserByProjectMembershipId,
|
||||
|
@@ -381,6 +381,12 @@ func ProcessTemplate(templateId int, templatePath string, data interface{}, acce
|
||||
funcs := template.FuncMap{
|
||||
"secret": secretFunction,
|
||||
"dynamic_secret": dynamicSecretFunction,
|
||||
"minus": func(a, b int) int {
|
||||
return a - b
|
||||
},
|
||||
"add": func(a, b int) int {
|
||||
return a + b
|
||||
},
|
||||
}
|
||||
|
||||
templateName := path.Base(templatePath)
|
||||
|
@@ -7,6 +7,7 @@ import (
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
@@ -59,6 +60,11 @@ var exportCmd = &cobra.Command{
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
templatePath, err := cmd.Flags().GetString("template")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@@ -93,6 +99,31 @@ var exportCmd = &cobra.Command{
|
||||
request.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
if templatePath != "" {
|
||||
sigChan := make(chan os.Signal, 1)
|
||||
dynamicSecretLeases := NewDynamicSecretLeaseManager(sigChan)
|
||||
newEtag := ""
|
||||
|
||||
accessToken := ""
|
||||
if token != nil {
|
||||
accessToken = token.Token
|
||||
} else {
|
||||
log.Debug().Msg("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details")
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
accessToken = loggedInUserDetails.UserCredentials.JTWToken
|
||||
}
|
||||
|
||||
processedTemplate, err := ProcessTemplate(1, templatePath, nil, accessToken, "", &newEtag, dynamicSecretLeases)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
fmt.Print(processedTemplate.String())
|
||||
return
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(request, "")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to fetch secrets")
|
||||
@@ -142,6 +173,7 @@ func init() {
|
||||
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
|
||||
exportCmd.Flags().String("projectId", "", "manually set the projectId to fetch secrets from")
|
||||
exportCmd.Flags().String("path", "/", "get secrets within a folder path")
|
||||
exportCmd.Flags().String("template", "", "The path to the template file used to render secrets")
|
||||
}
|
||||
|
||||
// Format according to the format flag
|
||||
|
@@ -22,10 +22,6 @@ var folderCmd = &cobra.Command{
|
||||
var getCmd = &cobra.Command{
|
||||
Use: "get",
|
||||
Short: "Get folders in a directory",
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLocalWorkspaceFile()
|
||||
util.RequireLogin()
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
|
@@ -7,6 +7,7 @@ import (
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
@@ -204,8 +205,10 @@ var secretsSetCmd = &cobra.Command{
|
||||
// decrypt workspace key
|
||||
plainTextEncryptionKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey)
|
||||
|
||||
infisicalTokenEnv := os.Getenv(util.INFISICAL_TOKEN_NAME)
|
||||
|
||||
// pull current secrets
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, SecretsPath: secretsPath}, "")
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, SecretsPath: secretsPath, InfisicalToken: infisicalTokenEnv}, "")
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to retrieve secrets")
|
||||
}
|
||||
|
@@ -2,7 +2,6 @@ package util
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
@@ -13,13 +12,11 @@ import (
|
||||
|
||||
func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder, error) {
|
||||
|
||||
if params.InfisicalToken == "" {
|
||||
params.InfisicalToken = os.Getenv(INFISICAL_TOKEN_NAME)
|
||||
}
|
||||
|
||||
var foldersToReturn []models.SingleFolder
|
||||
var folderErr error
|
||||
if params.InfisicalToken == "" && params.UniversalAuthAccessToken == "" {
|
||||
RequireLogin()
|
||||
RequireLocalWorkspaceFile()
|
||||
|
||||
log.Debug().Msg("GetAllFolders: Trying to fetch folders using logged in details")
|
||||
|
||||
|
@@ -307,10 +307,6 @@ func FilterSecretsByTag(plainTextSecrets []models.SingleEnvironmentVariable, tag
|
||||
}
|
||||
|
||||
func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectConfigFilePath string) ([]models.SingleEnvironmentVariable, error) {
|
||||
if params.InfisicalToken == "" {
|
||||
params.InfisicalToken = os.Getenv(INFISICAL_TOKEN_NAME)
|
||||
}
|
||||
|
||||
isConnected := CheckIsConnectedToInternet()
|
||||
var secretsToReturn []models.SingleEnvironmentVariable
|
||||
// var serviceTokenDetails api.GetServiceTokenDetailsResponse
|
||||
|
8
docs/api-reference/endpoints/secrets/create-many.mdx
Normal file
8
docs/api-reference/endpoints/secrets/create-many.mdx
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
title: "Bulk Create"
|
||||
openapi: "POST /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
8
docs/api-reference/endpoints/secrets/delete-many.mdx
Normal file
8
docs/api-reference/endpoints/secrets/delete-many.mdx
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
title: "Bulk Delete"
|
||||
openapi: "DELETE /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
8
docs/api-reference/endpoints/secrets/update-many.mdx
Normal file
8
docs/api-reference/endpoints/secrets/update-many.mdx
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
title: "Bulk Update"
|
||||
openapi: "PATCH /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
@@ -33,6 +33,9 @@ Export environment variables from the platform into a file format.
|
||||
|
||||
# Export variables to a YAML file
|
||||
infisical export --format=yaml > secrets.yaml
|
||||
|
||||
# Render secrets using a custom template file
|
||||
infisical export --template=<path to template>
|
||||
```
|
||||
|
||||
### Environment variables
|
||||
@@ -57,6 +60,26 @@ Export environment variables from the platform into a file format.
|
||||
</Accordion>
|
||||
|
||||
### flags
|
||||
<Accordion title="--template">
|
||||
The `--template` flag specifies the path to the template file used for rendering secrets. When using templates, you can omit the other format flags.
|
||||
|
||||
```text my-template-file
|
||||
{{$secrets := secret "<infisical-project-id>" "<environment-slug>" "<folder-path>"}}
|
||||
{{$length := len $secrets}}
|
||||
{{- "{"}}
|
||||
{{- with $secrets }}
|
||||
{{- range $index, $secret := . }}
|
||||
"{{ $secret.Key }}": "{{ $secret.Value }}"{{if lt $index (minus $length 1)}},{{end}}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{ "}" -}}
|
||||
```
|
||||
|
||||
```bash
|
||||
# Example
|
||||
infisical export --template="/path/to/template/file"
|
||||
```
|
||||
</Accordion>
|
||||
<Accordion title="--env">
|
||||
Used to set the environment that secrets are pulled from.
|
||||
|
||||
|
114
docs/documentation/guides/microsoft-power-apps.mdx
Normal file
114
docs/documentation/guides/microsoft-power-apps.mdx
Normal file
@@ -0,0 +1,114 @@
|
||||
---
|
||||
title: "Microsoft Power Apps"
|
||||
description: "Learn how to manage secrets in Microsoft Power Apps with Infisical."
|
||||
---
|
||||
In recent years, there has been a shift towards so-called low-code and no-code platforms. These platforms are particularly appealing to businesses without internal development capabilities, yet teams often discover that some coding is necessary to fully satisfy their business needs.
|
||||
|
||||
Low-code platforms have become increasingly sophisticated and useful, leading to a rise in their adoption by businesses. A prime example is Microsoft Power Apps, which offers a range of data sources and service integrations right out of the box. However, even with advanced tools, you might not always find a ready-made solution for every challenge. This means that low-code doesn't equate to no-code, as some coding and customization are still required to cater to specific needs.
|
||||
|
||||
Consider the need for data integrations where an HTTP-based call to a web service might be necessary, typically requiring authentication through an API key or another type of secret.
|
||||
|
||||
Importantly, it's crucial to avoid hardcoding these secrets, as they would then be accessible to anyone with collaboration rights to the code. This underscores the importance of using a secret management solution like Infisical.
|
||||
|
||||
In this article, we'll demonstrate how to retrieve app secrets from Infisical for use in a Power Apps application. We'll create a simple application with a dedicated data connector to illustrate the ease of integrating Infisical with Power Apps. This tutorial assumes some prior programming experience in C#.
|
||||
|
||||
Prerequisites:
|
||||
- Created Microsoft Power App.
|
||||
|
||||
<Steps>
|
||||
<Step title="Integrate an Azure Function Call">
|
||||
First, let’s create a new Azure Function using the Azure Management Portal. Get the [Function App](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/Microsoft.FunctionApp?tab=Overview) from the [Azure Marketplace](https://azuremarketplace.microsoft.com/en-us/).
|
||||

|
||||
|
||||
Place it in a subscription using any resource group. The name of the function is arbitrary. We'll use .NET as a runtime stack, but you can use whatever you're most comfortable with. The OS choice is also up to you. While Linux may look like a lightweight solution, Windows actually has more Azure Functions support. For instance, you cannot edit a Linux-based Azure Function within the Azure management portal.
|
||||
|
||||
By using a consumption plan, we'll only pay for the resources we use when they are requested. This is the classic “serverless” approach, where you do not pay for running servers, only for interactivity.
|
||||
|
||||
Once the new Azure Functions instance is ready, we add a function. In this case, we can do that already from the Azure Management Portal. Use the “HTTP trigger” template and choose the “function” authorization level.
|
||||
|
||||
The code for the first function can be as simple as:
|
||||
|
||||
```
|
||||
using System.Net;
|
||||
|
||||
public static async Task<HttpResponseMessage> Run(HttpRequestMessage req, TraceWriter log)
|
||||
{
|
||||
log.Info("C# HTTP trigger function processed a request.");
|
||||
return req.CreateResponse(HttpStatusCode.OK, "Hello World");
|
||||
}
|
||||
```
|
||||
|
||||
<Note>
|
||||
The code above is written for the older runtime. As a result, you may need to change the runtime version to 1 for the Azure Power Apps integration to work. If we start at a newer version (for example, 3) this triggers a warning before the migration.
|
||||
</Note>
|
||||
|
||||
Finally, we also need to publish the Swagger (or API) definitions and enable cross-origin resource sharing (CORS). While the API definitions are rather easy to set up, the correct CORS value may be tricky. For now, we can use the wildcard option to allow all hosts.
|
||||
|
||||
</Step>
|
||||
<Step title="Create Custom Connector">
|
||||
|
||||
Once we set all this up, it’s time to create the custom connector.
|
||||
|
||||
You can create the custom connector via the data pane. When we use “Create from Azure Service (Preview)”, this yields a dialog similar to the following:
|
||||
|
||||

|
||||
|
||||
We can now fill out the fields using the information for our created function. The combination boxes are automatically filled in order. Once we select one of the reachable subscriptions (tied to the same account we’ve used to log in to create a Power App), the available services are displayed. Once we select our Azure Functions service, we select the function for retrieving the secret.
|
||||
|
||||
</Step>
|
||||
<Step title="Set Up Infisical in an Azure Function">
|
||||
|
||||
You can add Infisical in an Azure Function quite easily using the [Infisical SDK for .NET](https://infisical.com/docs/sdks/languages/csharp) (or other languages). This enables the function to communicate with Infisical to obtain secrets, among other things.
|
||||
|
||||
In short, we can simply bring all the necessary classes over and start using the Client class. Essentially, this enables us to write code like this:
|
||||
|
||||
```
|
||||
var settings = new ClientSettings
|
||||
{
|
||||
ClientId = "CLIENT_ID",
|
||||
ClientSecret = "CLIENT_SECRET",
|
||||
// SiteUrl = "http://localhost:8080", <-- This line can be omitted if you're using Infisical Cloud.
|
||||
};
|
||||
var infisical = new InfisicalClient(settings);
|
||||
|
||||
var options = new GetSecretOptions
|
||||
{
|
||||
SecretName = "TEST",
|
||||
ProjectId = "PROJECT_ID",
|
||||
Environment = "dev",
|
||||
};
|
||||
var secret = infisical.GetSecret(options);
|
||||
```
|
||||
|
||||
Knowing the URL of Infisical as well as the Client Id and Client Secret, we can now access the desired values.
|
||||
|
||||
Now it’s time to actually use the secret within a Power App. There are two ways to request a desired target service with a secret retrieved from the function:
|
||||
|
||||
1. Call the function first, retrieve the secret, then call the target service, for example, via another custom connector with the secret as input.
|
||||
|
||||
2. Perform the final API request within the function call — not returning a secret at all, just the response from invoking the target service.
|
||||
|
||||
While the first option is more flexible (and presumably cheaper!), the second option is definitely easier. In the end, you should mostly decide based on whether the function should be reused for other purposes. If the single Power App is the only consumer of the function, it may make more sense to go with the second option. Otherwise, you should use the first option.
|
||||
|
||||
For our simple example, we don’t need to reuse the function. We also don’t want the additional complexity of maintaining two different custom connectors, where we only use one to pass data to the other one.
|
||||
|
||||
Based on the previous snippet, we create the following code (for proxying a GET request from an API accessible via the URL specified in the apiEndpoint variable).
|
||||
|
||||
```
|
||||
using (var client = new HttpClient())
|
||||
{
|
||||
client.DefaultRequestHeaders
|
||||
.Accept
|
||||
.Add(new MediaTypeWithQualityHeaderValue("application/json"));
|
||||
|
||||
client.DefaultRequestHeaders.Add("X-API-KEY", secret);
|
||||
|
||||
var result = await client.GetAsync(apiEndpoint);
|
||||
var resultContent = await result.Content.ReadAsStringAsync();
|
||||
req.CreateResponse(HttpStatusCode.OK, resultContent);
|
||||
}
|
||||
```
|
||||
This creates a request to the resource protected by an API key that is retrieved from Infisical.
|
||||
|
||||
</Step>
|
||||
</Steps>
|
129
docs/documentation/platform/dynamic-secrets/cassandra.mdx
Normal file
129
docs/documentation/platform/dynamic-secrets/cassandra.mdx
Normal file
@@ -0,0 +1,129 @@
|
||||
---
|
||||
title: "Cassandra"
|
||||
description: "How to dynamically generate Cassandra database users."
|
||||
---
|
||||
|
||||
The Infisical Cassandra dynamic secret allows you to generate Cassandra database credentials on demand based on configured role.
|
||||
|
||||
## Prerequisite
|
||||
|
||||
Infisical requires a Cassandra user in your instance with the necessary permissions. This user will facilitate the creation of new accounts as needed.
|
||||
Ensure the user possesses privileges for creating, dropping, and granting permissions to roles for it to be able to create dynamic secrets.
|
||||
|
||||
<Tip>
|
||||
In your Cassandra configuration file `cassandra.yaml`, make sure you have the following settings:
|
||||
|
||||
```yaml
|
||||
authenticator: PasswordAuthenticator
|
||||
authorizer: CassandraAuthorizer
|
||||
```
|
||||
</Tip>
|
||||
|
||||
The above configuration allows user creation and granting permissions.
|
||||
|
||||
## Set up Dynamic Secrets with Cassandra
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select Cassandra">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Host" type="string" required>
|
||||
Cassandra Host. You can specify multiple Cassandra hosts by separating them with commas.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Port" type="number" required>
|
||||
Cassandra port
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="User" type="string" required>
|
||||
Username that will be used to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Password" type="string" required>
|
||||
Password that will be used to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Local Data Center" type="string" required>
|
||||
Specify the local data center in Cassandra that you want to use. This choice should align with your Cassandra cluster setup.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Keyspace" type="string">
|
||||
Keyspace name where you want to create dynamic secrets. This ensures that the user is limited to that keyspace.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="CA(SSL)" type="string">
|
||||
A CA may be required if your cassandra requires it for incoming connections.
|
||||
</ParamField>
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="(Optional) Modify CQL Statements">
|
||||
If you want to provide specific privileges for the generated dynamic credentials, you can modify the CQL statement to your needs. This is useful if you want to only give access to a specific key-space(s).
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Click 'Submit'">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certficate.
|
||||
</Note>
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret in step 4.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the lease details and delete the lease ahead of its expiration time.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
115
docs/documentation/platform/dynamic-secrets/mysql.mdx
Normal file
115
docs/documentation/platform/dynamic-secrets/mysql.mdx
Normal file
@@ -0,0 +1,115 @@
|
||||
---
|
||||
title: "MySQL"
|
||||
description: "Learn how to dynamically generate MySQL Database user passwords."
|
||||
---
|
||||
|
||||
The Infisical MySQL dynamic secret allows you to generate MySQL Database credentials on demand based on configured role.
|
||||
|
||||
## Prerequisite
|
||||
Create a user with the required permission in your SQL instance. This user will be used to create new accounts on-demand.
|
||||
|
||||
|
||||
## Set up Dynamic Secrets with MySQL
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select 'SQL Database'">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Service" type="string" required>
|
||||
Choose the service you want to generate dynamic secrets for. This must be selected as **MySQL**.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Host" type="string" required>
|
||||
Database host
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Port" type="number" required>
|
||||
Database port
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="User" type="string" required>
|
||||
Username that will be used to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Password" type="string" required>
|
||||
Password that will be used to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Database Name" type="string" required>
|
||||
Name of the database for which you want to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="CA(SSL)" type="string">
|
||||
A CA may be required if your DB requires it for incoming connections. AWS RDS instances with default settings will requires a CA which can be downloaded [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL.html#UsingWithRDS.SSL.CertificatesAllRegions).
|
||||
</ParamField>
|
||||
|
||||
</Step>
|
||||
<Step title="(Optional) Modify SQL Statements">
|
||||
If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL statement to your needs. This is useful if you want to only give access to a specific table(s).
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Click `Submit`">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certificate.
|
||||
</Note>
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials from it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
115
docs/documentation/platform/dynamic-secrets/oracle.mdx
Normal file
115
docs/documentation/platform/dynamic-secrets/oracle.mdx
Normal file
@@ -0,0 +1,115 @@
|
||||
---
|
||||
title: "Oracle"
|
||||
description: "Learn how to dynamically generate Oracle Database user passwords."
|
||||
---
|
||||
|
||||
The Infisical Oracle dynamic secret allows you to generate Oracle Database credentials on demand based on configured role.
|
||||
|
||||
## Prerequisite
|
||||
Create a user with the required permission in your SQL instance. This user will be used to create new accounts on-demand.
|
||||
|
||||
|
||||
## Set up Dynamic Secrets with Oracle
|
||||
|
||||
<Steps>
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select `SQL Database`">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Service" type="string" required>
|
||||
Choose the service you want to generate dynamic secrets for. This must be selected as **Oracle**.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Host" type="string" required>
|
||||
Database host
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Port" type="number" required>
|
||||
Database port
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="User" type="string" required>
|
||||
Username that will be used to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Password" type="string" required>
|
||||
Password that will be used to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Database Name" type="string" required>
|
||||
Name of the database for which you want to create dynamic secrets
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="CA(SSL)" type="string">
|
||||
A CA may be required if your DB requires it for incoming connections. AWS RDS instances with default settings will requires a CA which can be downloaded [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL.html#UsingWithRDS.SSL.CertificatesAllRegions).
|
||||
</ParamField>
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="(Optional) Modify SQL Statements">
|
||||
If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL statement to your needs. This is useful if you want to only give access to a specific table(s).
|
||||
</Step>
|
||||
<Step title="Click 'Submit'">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you may have to add the CA certficate.
|
||||
</Note>
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete a lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
@@ -1,14 +1,13 @@
|
||||
---
|
||||
title: "PostgreSQL"
|
||||
description: "Learn how to dynamically generate PostgreSQL Database user passwords."
|
||||
description: "How to dynamically generate PostgreSQL database users."
|
||||
---
|
||||
|
||||
The Infisical PostgreSQL secret rotation allows you to automatically rotate your PostgreSQL database user's password at a predefined interval.
|
||||
|
||||
The Infisical PostgreSQL dynamic secret allows you to generate PostgreSQL database credentials on demand based on configured role.
|
||||
|
||||
## Prerequisite
|
||||
|
||||
1. Create a user with the required permission in your SQL instance.
|
||||
Create a user with the required permission in your SQL instance. This user will be used to create new accounts on-demand.
|
||||
|
||||
|
||||
## Set up Dynamic Secrets with PostgreSQL
|
||||
@@ -17,7 +16,7 @@ The Infisical PostgreSQL secret rotation allows you to automatically rotate your
|
||||
<Step title="Open Secret Overview Dashboard">
|
||||
Open the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret.
|
||||
</Step>
|
||||
<Step title="Click on the `Add Dynamic Secret` button">
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select `SQL Database`">
|
||||
@@ -37,7 +36,7 @@ The Infisical PostgreSQL secret rotation allows you to automatically rotate your
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Service" type="string" required>
|
||||
Choose the service you want to generate dynamic secrets for
|
||||
Choose the service you want to generate dynamic secrets for. This must be selected as **PostgreSQL**.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Host" type="string" required>
|
||||
@@ -68,31 +67,52 @@ The Infisical PostgreSQL secret rotation allows you to automatically rotate your
|
||||
|
||||
</Step>
|
||||
<Step title="(Optional) Modify SQL Statements">
|
||||
If you want to provide specific privileges for the future generated dynamic secrets, you are able to specify them as SQL statements.
|
||||
If you want to provide specific privileges for the generated dynamic credentials, you can modify the SQL statement to your needs. This is useful if you want to only give access to a specific table(s).
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Click `Submit`">
|
||||
After submitting the form, you will see a dynamic secret creates in the dashboard.
|
||||
<Step title="Click 'Submit'">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||
<Note>
|
||||
If this step fails, you might have to add the CA certficate.
|
||||
If this step fails, you may have to add the CA certficate.
|
||||
</Note>
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Now that the dynamic secret is created, you can start generating unique secret values by specifying the Time-to-live within the predefined range.
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||

|
||||
|
||||
After you click the `Submit` button, a new secret lease will be generated and the Database User and Database Password will be shown.
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Audit or Revoke Leases">
|
||||
As soon as you have generated a few secret leases, you will be able to access them by clicking `Generate` on the dynamic secret row. In this modal, you are able to see the expiration time or delete a secret preemptively.
|
||||

|
||||
|
||||

|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the expiration time of the lease or delete the lease before it's set time to live.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret leases past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
||||
|
@@ -9,9 +9,9 @@ A **user identity** (also known as **user**) represents a developer, admin, or a
|
||||
|
||||
Users can be added manually (through Web UI) or programmatically (e.g., API) to [organizations](../organization) and [projects](../projects).
|
||||
|
||||
Upon being added to an organizaztion and projects, users assume a certain set of roles and permissions that represents their identity.
|
||||
Upon being added to an organization and projects, users assume a certain set of roles and permissions that represents their identity.
|
||||
|
||||

|
||||

|
||||
|
||||
## Authentication methods
|
||||
|
||||
|
@@ -1,36 +0,0 @@
|
||||
---
|
||||
title: "LDAP"
|
||||
description: "Log in to Infisical with LDAP"
|
||||
---
|
||||
|
||||
<Info>
|
||||
LDAP is a paid feature.
|
||||
|
||||
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
|
||||
then you should contact sales@infisical.com to purchase an enterprise license to use it.
|
||||
</Info>
|
||||
|
||||
You can configure your organization in Infisical to have members authenticate with the platform via [LDAP](https://en.wikipedia.org/wiki/Lightweight_Directory_Access_Protocol).
|
||||
|
||||
<Steps>
|
||||
<Step title="Prepare the LDAP configuration in Infisical">
|
||||
In Infisical, head to your Organization Settings > Authentication > LDAP Configuration and select **Set up LDAP**.
|
||||
|
||||
Next, input your LDAP server settings.
|
||||
|
||||

|
||||
|
||||
Here's some guidance for each field:
|
||||
|
||||
- URL: The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.
|
||||
- Bind DN: The distinguished name of object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`.
|
||||
- Bind Pass: The password to use along with `Bind DN` when performing the user search.
|
||||
- Search Base / User DN: Base DN under which to perform user search such as `ou=Users,dc=example,dc=com`
|
||||
- CA Certificate: The CA certificate to use when verifying the LDAP server certificate.
|
||||
</Step>
|
||||
<Step title="Enable LDAP in Infisical">
|
||||
Enabling LDAP allows members in your organization to log into Infisical via LDAP.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
@@ -4,16 +4,17 @@ description: "Learn how to log in to Infisical with LDAP."
|
||||
---
|
||||
|
||||
<Info>
|
||||
LDAP is a paid feature.
|
||||
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
|
||||
then you should contact sales@infisical.com to purchase an enterprise license to use it.
|
||||
LDAP is a paid feature. If you're using Infisical Cloud, then it is available
|
||||
under the **Enterprise Tier**. If you're self-hosting Infisical, then you
|
||||
should contact sales@infisical.com to purchase an enterprise license to use
|
||||
it.
|
||||
</Info>
|
||||
|
||||
You can configure your organization in Infisical to have members authenticate with the platform via [LDAP](https://en.wikipedia.org/wiki/Lightweight_Directory_Access_Protocol)
|
||||
|
||||
<Steps>
|
||||
<Step title="Prepare the LDAP configuration in Infisical">
|
||||
In Infisical, head to your Organization Settings > Authentication > LDAP Configuration and select **Set up LDAP**.
|
||||
In Infisical, head to your Organization Settings > Security > LDAP and select **Manage**.
|
||||
|
||||
Next, input your LDAP server settings.
|
||||
|
||||
@@ -24,11 +25,50 @@ You can configure your organization in Infisical to have members authenticate wi
|
||||
- URL: The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.
|
||||
- Bind DN: The distinguished name of object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`.
|
||||
- Bind Pass: The password to use along with `Bind DN` when performing the user search.
|
||||
- Search Base / User DN: Base DN under which to perform user search such as `ou=Users,dc=example,dc=com`
|
||||
- User Search Base / User DN: Base DN under which to perform user search such as `ou=Users,dc=acme,dc=com`.
|
||||
- User Search Filter (optional): Template used to construct the LDAP user search filter such as `(uid={{username}})`; use literal `{{username}}` to have the given username used in the search. The default is `(uid={{username}})` which is compatible with several common directory schemas.
|
||||
- Group Search Base / Group DN (optional): LDAP search base to use for group membership search such as `ou=Groups,dc=acme,dc=com`.
|
||||
- Group Filter (optional): Template used when constructing the group membership query such as `(&(objectClass=posixGroup)(memberUid={{.Username}}))`. The template can access the following context variables: [`UserDN`, `UserName`]. The default is `(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))` which is compatible with several common directory schemas.
|
||||
- CA Certificate: The CA certificate to use when verifying the LDAP server certificate.
|
||||
|
||||
<Note>
|
||||
The **Group Search Base / Group DN** and **Group Filter** fields are both required if you wish to sync LDAP groups to Infisical.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Test the LDAP connection">
|
||||
Once you've filled out the LDAP configuration, you can test that part of the configuration is correct by pressing the **Test Connection** button.
|
||||
|
||||
Infisical will attempt to bind to the LDAP server using the provided **URL**, **Bind DN**, and **Bind Pass**. If the operation is successful, then Infisical will display a success message; if not, then Infisical will display an error message and provide a fuller error in the server logs.
|
||||
|
||||

|
||||
</Step>
|
||||
|
||||
<Step title="Define mappings from LDAP groups to groups in Infisical">
|
||||
In order to sync LDAP groups to Infisical, head to the **LDAP Group Mappings** section to define mappings from LDAP groups to groups in Infisical.
|
||||
|
||||

|
||||
|
||||
Group mappings ensure that users who log into Infisical via LDAP are added to or removed from the Infisical group(s) that corresponds to the LDAP group(s) they are a member of.
|
||||
|
||||

|
||||
|
||||
Each group mapping consists of two parts:
|
||||
- LDAP Group CN: The common name of the LDAP group to map.
|
||||
- Infisical Group: The Infisical group to map the LDAP group to.
|
||||
|
||||
For example, suppose you want to automatically add a user who is part of the LDAP group with CN `Engineers` to the Infisical group `Engineers` when the user sets up their account with Infisical.
|
||||
|
||||
In this case, you would specify a mapping from the LDAP group with CN `Engineers` to the Infisical group `Engineers`.
|
||||
Now when the user logs into Infisical via LDAP, Infisical will check the LDAP groups that the user is a part of whilst referencing the group mappings you created earlier. Since the user is a member of the LDAP group with CN `Engineers`, they will be added to the Infisical group `Engineers`.
|
||||
In the future, if the user is no longer part of the LDAP group with CN `Engineers`, they will be removed from the Infisical group `Engineers` upon their next login.
|
||||
<Note>
|
||||
Prior to defining any group mappings, ensure that you've created the Infisical groups that you want to map the LDAP groups to.
|
||||
You can read more about creating (user) groups in Infisical [here](/documentation/platform/groups).
|
||||
</Note>
|
||||
</Step>
|
||||
<Step title="Enable LDAP in Infisical">
|
||||
Enabling LDAP allows members in your organization to log into Infisical via LDAP.
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
</Steps>
|
||||
|
@@ -4,9 +4,10 @@ description: "Learn how to configure JumpCloud LDAP for authenticating into Infi
|
||||
---
|
||||
|
||||
<Info>
|
||||
LDAP is a paid feature.
|
||||
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
|
||||
then you should contact sales@infisical.com to purchase an enterprise license to use it.
|
||||
LDAP is a paid feature. If you're using Infisical Cloud, then it is available
|
||||
under the **Enterprise Tier**. If you're self-hosting Infisical, then you
|
||||
should contact sales@infisical.com to purchase an enterprise license to use
|
||||
it.
|
||||
</Info>
|
||||
|
||||
<Steps>
|
||||
@@ -17,13 +18,12 @@ description: "Learn how to configure JumpCloud LDAP for authenticating into Infi
|
||||
When creating the user, input their **First Name**, **Last Name**, **Username** (required), **Company Email** (required), and **Description**.
|
||||
Also, create a password for the user.
|
||||
|
||||
Next, under User Security Settings and Permissions > Permission Settings, check the box next to **Enable as LDAP Bind DN**.
|
||||
Next, under User Security Settings and Permissions > Permission Settings, check the box next to **Enable as LDAP Bind DN**.
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="Prepare the LDAP configuration in Infisical">
|
||||
In Infisical, head to your Organization Settings > Authentication > LDAP Configuration and select **Set up LDAP**.
|
||||
In Infisical, head to your Organization Settings > Security > LDAP and select **Manage**.
|
||||
|
||||
Next, input your JumpCloud LDAP server settings.
|
||||
|
||||
@@ -34,21 +34,57 @@ description: "Learn how to configure JumpCloud LDAP for authenticating into Infi
|
||||
- URL: The LDAP server to connect to (`ldaps://ldap.jumpcloud.com:636`).
|
||||
- Bind DN: The distinguished name of object to bind when performing the user search (`uid=<ldap-user-username>,ou=Users,o=<your-org-id>,dc=jumpcloud,dc=com`).
|
||||
- Bind Pass: The password to use along with `Bind DN` when performing the user search.
|
||||
- Search Base / User DN: Base DN under which to perform user search (`ou=Users,o=<your-org-id>,dc=jumpcloud,dc=com`).
|
||||
- User Search Base / User DN: Base DN under which to perform user search (`ou=Users,o=<your-org-id>,dc=jumpcloud,dc=com`).
|
||||
- User Search Filter (optional): Template used to construct the LDAP user search filter (`(uid={{username}})`).
|
||||
- Group Search Base / Group DN (optional): LDAP search base to use for group membership search (`ou=Users,o=<your-org-id>,dc=jumpcloud,dc=com`).
|
||||
- Group Filter (optional): Template used when constructing the group membership query (`(&(objectClass=groupOfNames)(member=uid={{.Username}},ou=Users,o=<your-org-id>,dc=jumpcloud,dc=com))`)
|
||||
- CA Certificate: The CA certificate to use when verifying the LDAP server certificate (instructions to obtain the certificate for JumpCloud [here](https://jumpcloud.com/support/connect-to-ldap-with-tls-ssl)).
|
||||
|
||||
<Tip>
|
||||
When filling out the **Bind DN** and **Bind Pass** fields, refer to the username and password of the user created in Step 1.
|
||||
|
||||
Also, for the **Bind DN** and **Search Base / User DN** fields, you'll want to use the organization ID that appears
|
||||
Also, for the **Bind DN** and **Search Base / User DN** fields, you'll want to use the organization ID that appears
|
||||
in your LDAP instance **ORG DN**.
|
||||
</Tip>
|
||||
</Step>
|
||||
<Step title="Test the LDAP connection">
|
||||
Once you've filled out the LDAP configuration, you can test that part of the configuration is correct by pressing the **Test Connection** button.
|
||||
|
||||
Infisical will attempt to bind to the LDAP server using the provided **URL**, **Bind DN**, and **Bind Pass**. If the operation is successful, then Infisical will display a success message; if not, then Infisical will display an error message and provide a fuller error in the server logs.
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Define mappings from LDAP groups to groups in Infisical">
|
||||
In order to sync LDAP groups to Infisical, head to the **LDAP Group Mappings** section to define mappings from LDAP groups to groups in Infisical.
|
||||
|
||||

|
||||
|
||||
Group mappings ensure that users who log into Infisical via LDAP are added to or removed from the Infisical group(s) that corresponds to the LDAP group(s) they are a member of.
|
||||
|
||||

|
||||
|
||||
Each group mapping consists of two parts:
|
||||
- LDAP Group CN: The common name of the LDAP group to map.
|
||||
- Infisical Group: The Infisical group to map the LDAP group to.
|
||||
|
||||
For example, suppose you want to automatically add a user who is part of the LDAP group with CN `Engineers` to the Infisical group `Engineers` when the user sets up their account with Infisical.
|
||||
|
||||
In this case, you would specify a mapping from the LDAP group with CN `Engineers` to the Infisical group `Engineers`.
|
||||
Now when the user logs into Infisical via LDAP, Infisical will check the LDAP groups that the user is a part of whilst referencing the group mappings you created earlier. Since the user is a member of the LDAP group with CN `Engineers`, they will be added to the Infisical group `Engineers`.
|
||||
In the future, if the user is no longer part of the LDAP group with CN `Engineers`, they will be removed from the Infisical group `Engineers` upon their next login.
|
||||
<Note>
|
||||
Prior to defining any group mappings, ensure that you've created the Infisical groups that you want to map the LDAP groups to.
|
||||
You can read more about creating (user) groups in Infisical [here](/documentation/platform/groups).
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
<Step title="Enable LDAP in Infisical">
|
||||
Enabling LDAP allows members in your organization to log into Infisical via LDAP.
|
||||

|
||||
</Step>
|
||||
|
||||
</Steps>
|
||||
|
||||
Resources:
|
||||
- [JumpCloud Cloud LDAP Guide](https://jumpcloud.com/support/use-cloud-ldap)
|
||||
|
||||
- [JumpCloud Cloud LDAP Guide](https://jumpcloud.com/support/use-cloud-ldap)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user