mirror of
https://github.com/Infisical/infisical.git
synced 2025-04-06 22:14:48 +00:00
Compare commits
446 Commits
infisical-
...
pki-teleme
Author | SHA1 | Date | |
---|---|---|---|
8c318f51e4 | |||
d55ddcd577 | |||
37cbb4c55b | |||
506b56b657 | |||
351304fda6 | |||
b6d67df966 | |||
3897f0ece5 | |||
7719ebb112 | |||
f03f02786d | |||
c60840e979 | |||
6fe7a5f069 | |||
14b7d763ad | |||
bc1b7ddcc5 | |||
dff729ffc1 | |||
786f5d9e09 | |||
ef6abedfe0 | |||
9a5633fda4 | |||
f8a96576c9 | |||
88d3d62894 | |||
ac40dcc2c6 | |||
6482e88dfc | |||
a01249e903 | |||
7b3e1f12bd | |||
031c8d67b1 | |||
778b0d4368 | |||
95b57e144d | |||
1d26269993 | |||
ffee1701fc | |||
871be7132a | |||
5fe3c9868f | |||
c936aa7157 | |||
05005f4258 | |||
c179d7e5ae | |||
c8553fba2b | |||
26a9d68823 | |||
af5b3aa171 | |||
d4728e31c1 | |||
f9a5b46365 | |||
d65deab0af | |||
61591742e4 | |||
54b13a9daa | |||
4adf0aa1e2 | |||
3d3ee746cf | |||
07e4358d00 | |||
962dd5d919 | |||
52bd1afb0a | |||
d918dd8967 | |||
e2e0f6a346 | |||
326cb99732 | |||
341b63c61c | |||
81b026865c | |||
f50c72c033 | |||
e1046e2d56 | |||
ed3fa8add1 | |||
d123283849 | |||
d7fd44b845 | |||
3ffee049ee | |||
9924ef3a71 | |||
524462d7bc | |||
351e573fea | |||
f1bc26e2e5 | |||
8aeb607f6e | |||
e530b7a788 | |||
bf61090b5a | |||
106b068a51 | |||
6f0a97a2fa | |||
5d604be091 | |||
905cf47d90 | |||
2c40d316f4 | |||
32521523c1 | |||
3a2e8939b1 | |||
e5947fcab9 | |||
a6d9c74054 | |||
f7cf2bb78f | |||
ff24e76a32 | |||
6ac802b6c9 | |||
ff92e00503 | |||
b20474c505 | |||
e19ffc91c6 | |||
61eb66efca | |||
15999daa24 | |||
82520a7f0a | |||
af236ba892 | |||
ec31211bca | |||
0ecf6044d9 | |||
6c512f47bf | |||
c4b7d4618d | |||
003f2b003d | |||
33b135f02c | |||
eed7cc6408 | |||
440ada464f | |||
6b7abbbeb9 | |||
3944e20a5b | |||
747b5ec68d | |||
2079913511 | |||
ed0dc324a3 | |||
1c13ed54af | |||
049f0f56a0 | |||
9ad725fd6c | |||
9a954c8f15 | |||
81a64d081c | |||
43804f62e6 | |||
67089af17a | |||
8abfea0409 | |||
ce4adccc80 | |||
dcd3b5df56 | |||
d83240749f | |||
36144d8c42 | |||
f6425480ca | |||
a3e9392a2f | |||
633a2ae985 | |||
4478dc8659 | |||
510ddf2b1a | |||
5363f8c6ff | |||
7d9de6acba | |||
bac944133a | |||
f059d65b45 | |||
c487b2b34a | |||
015a193330 | |||
8e20531b40 | |||
d91add2e7b | |||
8ead2aa774 | |||
1b2128e3cc | |||
6d72524896 | |||
1ec11d5963 | |||
ad6f285b59 | |||
d4842dd273 | |||
78f83cb478 | |||
e67a8f9c05 | |||
c8a871de7c | |||
64c0951df3 | |||
c185414a3c | |||
f9695741f1 | |||
b7c4b11260 | |||
ad110f490c | |||
81f3613393 | |||
a7fe79c046 | |||
ed6306747a | |||
64569ab44b | |||
9eb89bb46d | |||
c4da1ce32d | |||
2d1d6f5ce8 | |||
add97c9b38 | |||
768ba4f4dc | |||
18c32d872c | |||
1fd40ab6ab | |||
9d258f57ce | |||
45ccbaf4c9 | |||
6ef358b172 | |||
838c1af448 | |||
8de7261c9a | |||
67b1b79fe3 | |||
31477f4d2b | |||
f200372d74 | |||
f955b68519 | |||
9269b63943 | |||
8f96653273 | |||
7dffc08eba | |||
126b0ce7e7 | |||
0b71f7f297 | |||
e53439d586 | |||
c86e508817 | |||
6426b85c1e | |||
cc7d0d752f | |||
b89212a0c9 | |||
d4c69d8e5d | |||
3d6da1e548 | |||
7e46fe8148 | |||
3756a1901d | |||
9c8adf75ec | |||
f461eaa432 | |||
a1fbc140ee | |||
ea27870ce3 | |||
48943b4d78 | |||
fd1afc2cbe | |||
6905029455 | |||
e89fb33981 | |||
2ef77c737a | |||
0f31fa3128 | |||
1da5a5f417 | |||
5ebf142e3e | |||
94d7d2b029 | |||
e39d1a0530 | |||
4c5f3859d6 | |||
16866d46bf | |||
4f4764dfcd | |||
bdceea4c91 | |||
32fa6866e4 | |||
b4faef797c | |||
08732cab62 | |||
81d5f639ae | |||
25b83d4b86 | |||
155e59e571 | |||
8fbd3f2fce | |||
a500f00a49 | |||
6842f7aa8b | |||
ad207786e2 | |||
ace8c37c25 | |||
f15e61dbd9 | |||
4c82408b51 | |||
8146dcef16 | |||
2e90addbc5 | |||
427201a634 | |||
0b55ac141c | |||
aecfa268ae | |||
fdfc020efc | |||
62aa80a104 | |||
cf9d8035bd | |||
d0c9f1ca53 | |||
2ecc7424d9 | |||
c04b97c689 | |||
7600a86dfc | |||
8924eaf251 | |||
82e9504285 | |||
c4e10df754 | |||
ce60e96008 | |||
930b59cb4f | |||
ec363a5ad4 | |||
c0de4ae3ee | |||
de7e92ccfc | |||
522d81ae1a | |||
ef22b39421 | |||
02153ffb32 | |||
1d14cdf334 | |||
39b323dd9c | |||
b0b55344ce | |||
d9d62384e7 | |||
76f34501dc | |||
7415bb93b8 | |||
7a1c08a7f2 | |||
568aadef75 | |||
84f9eb5f9f | |||
87ac723fcb | |||
a6dab47552 | |||
79d8a9debb | |||
08bac83bcc | |||
46c90f03f0 | |||
d7722f7587 | |||
a42bcb3393 | |||
192dba04a5 | |||
0cc3240956 | |||
667580546b | |||
9fd662b7f7 | |||
a56cbbc02f | |||
dc30465afb | |||
f1caab2d00 | |||
1d186b1950 | |||
9cf5908cc1 | |||
f1b6c3764f | |||
4e6c860c69 | |||
eda9ed257e | |||
38cf43176e | |||
f5c7943f2f | |||
3c59f7f350 | |||
84cc7bcd6c | |||
159c27ac67 | |||
de5a432745 | |||
387780aa94 | |||
3887ce800b | |||
1a06b3e1f5 | |||
5f0dd31334 | |||
7e14c58931 | |||
627e17b3ae | |||
39b7a4a111 | |||
e7c512999e | |||
c19016e6e6 | |||
20477ce2b0 | |||
e04b2220be | |||
edf6a37fe5 | |||
f5749e326a | |||
75e0a68b68 | |||
71b8e3dbce | |||
4dc56033b1 | |||
ed37b99756 | |||
6fa41a609b | |||
e46f10292c | |||
acb22cdf36 | |||
c9da8477c8 | |||
5e4b478b74 | |||
765be2d99d | |||
719a18c218 | |||
16d3bbb67a | |||
872a3fe48d | |||
c7414e00f9 | |||
ad1dd55b8b | |||
497761a0e5 | |||
483fb458dd | |||
17cf602a65 | |||
23f6f5dfd4 | |||
b9b76579ac | |||
761965696b | |||
ace2500885 | |||
4eff7d8ea5 | |||
c4512ae111 | |||
78c349c09a | |||
09df440613 | |||
a8fc0e540a | |||
46ce46b5a0 | |||
dc88115d43 | |||
955657e172 | |||
f1ba64aa66 | |||
d74197aeb4 | |||
97567d06d4 | |||
3986df8e8a | |||
3fcd84b592 | |||
29e39b558b | |||
9458c8b04f | |||
3b95c5d859 | |||
de8f315211 | |||
9960d58e1b | |||
0057404562 | |||
47ca1b3011 | |||
716cd090c4 | |||
e870bb3ade | |||
98c9e98082 | |||
a814f459ab | |||
66817a40db | |||
20bd2ca71c | |||
004a8b71a2 | |||
f0fce3086e | |||
a9e7db6fc0 | |||
2bd681d58f | |||
51fef3ce60 | |||
df9e7bf6ee | |||
04479bb70a | |||
cdc90411e5 | |||
dcb05a3093 | |||
b055cda64d | |||
f68602280e | |||
f9483afe95 | |||
d742534f6a | |||
99eb8eb8ed | |||
1dea024880 | |||
699e03c1a9 | |||
f6372249b4 | |||
0f42fcd688 | |||
2e02f8bea8 | |||
8203158c63 | |||
ada04ed4fc | |||
cc9cc70125 | |||
045debeaf3 | |||
3fb8ad2fac | |||
795d9e4413 | |||
67f2e4671a | |||
cbe3acde74 | |||
de480b5771 | |||
07b93c5cec | |||
77431b4719 | |||
50610945be | |||
57f54440d6 | |||
9711e73a06 | |||
214f837041 | |||
58ebebb162 | |||
65ddddb6de | |||
a55b26164a | |||
6cd448b8a5 | |||
c48c9ae628 | |||
7003ad608a | |||
104edca6f1 | |||
75345d91c0 | |||
abc2ffca57 | |||
b7640f2d03 | |||
2ee4d68fd0 | |||
3ca931acf1 | |||
7f6715643d | |||
8e311658d4 | |||
9116acd37b | |||
0513307d98 | |||
28c2f1874e | |||
efc3b6d474 | |||
07e1d1b130 | |||
7f76779124 | |||
30bcf1f204 | |||
706feafbf2 | |||
fc4e3f1f72 | |||
dcd5f20325 | |||
58f3e116a3 | |||
7bc5aad8ec | |||
a16dc3aef6 | |||
da7746c639 | |||
cd5b6da541 | |||
2dda7180a9 | |||
30ccfbfc8e | |||
aa76924ee6 | |||
d8f679e72d | |||
bf6cfbac7a | |||
8e82813894 | |||
df21a1fb81 | |||
bdbb6346cb | |||
ea9da6d2a8 | |||
3c2c70912f | |||
b607429b99 | |||
16c1516979 | |||
f5dbbaf1fd | |||
2a292455ef | |||
4d040706a9 | |||
5183f76397 | |||
4b3efb43b0 | |||
96046726b2 | |||
a86a951acc | |||
5e70860160 | |||
abbd427ee2 | |||
8fd5fdbc6a | |||
77e1ccc8d7 | |||
711cc438f6 | |||
8447190bf8 | |||
12b447425b | |||
9cb1a31287 | |||
b00413817d | |||
2a8bd74e88 | |||
f28f4f7561 | |||
f0b05c683b | |||
3e8f02a4f9 | |||
50ee60a3ea | |||
21bdecdf2a | |||
bf09461416 | |||
1ff615913c | |||
281cedf1a2 | |||
a8d847f139 | |||
2a0c0590f1 | |||
2e6d525d27 | |||
7fd4249d00 | |||
90cfc44592 | |||
8c403780c2 | |||
b69c091f2f | |||
4a66395ce6 | |||
8c18753e3f | |||
85c5d69c36 | |||
94fe577046 | |||
a0a579834c | |||
b5575f4c20 | |||
f98f212ecf | |||
b331a4a708 | |||
e351a16b5a | |||
2cfca823f2 | |||
a8398a7009 | |||
8c054cedfc | |||
d1ad605ac4 | |||
9dd5857ff5 | |||
babbacdc96 | |||
c54eafc128 | |||
757942aefc | |||
1d57629036 | |||
8061066e27 | |||
c993b1bbe3 | |||
2cbf33ac14 |
3
.envrc
Normal file
3
.envrc
Normal file
@ -0,0 +1,3 @@
|
||||
# Learn more at https://direnv.net
|
||||
# We instruct direnv to use our Nix flake for a consistent development environment.
|
||||
use flake
|
@ -35,7 +35,20 @@ jobs:
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api
|
||||
|
||||
echo "Examining built image:"
|
||||
docker image inspect infisical-api | grep -A 5 "Entrypoint"
|
||||
|
||||
docker run --name infisical-api -d -p 4000:4000 \
|
||||
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
|
||||
-e REDIS_URL=$REDIS_URL \
|
||||
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
|
||||
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
|
||||
--env-file .env \
|
||||
infisical-api
|
||||
|
||||
echo "Container status right after creation:"
|
||||
docker ps -a | grep infisical-api
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
@ -49,29 +62,42 @@ jobs:
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
if docker ps | grep infisical-api | grep -q healthy; then
|
||||
echo "Container is healthy."
|
||||
HEALTHY=1
|
||||
# Check if container is running
|
||||
if docker ps | grep infisical-api; then
|
||||
# Try to access the API endpoint
|
||||
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
|
||||
echo "API endpoint is responding. Container seems healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "Container is not running!"
|
||||
docker ps -a | grep infisical-api
|
||||
break
|
||||
fi
|
||||
|
||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||
|
||||
docker logs infisical-api
|
||||
|
||||
sleep 2
|
||||
SECONDS=$((SECONDS+2))
|
||||
sleep 5
|
||||
SECONDS=$((SECONDS+5))
|
||||
done
|
||||
|
||||
|
||||
if [ $HEALTHY -ne 1 ]; then
|
||||
echo "Container did not become healthy in time"
|
||||
echo "Container status:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "Container logs (if any):"
|
||||
docker logs infisical-api || echo "No logs available"
|
||||
echo "Container inspection:"
|
||||
docker inspect infisical-api | grep -A 5 "State"
|
||||
exit 1
|
||||
fi
|
||||
- name: Install openapi-diff
|
||||
run: go install github.com/tufin/oasdiff@latest
|
||||
run: go install github.com/oasdiff/oasdiff@latest
|
||||
- name: Running OpenAPI Spec diff action
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
||||
docker stop infisical-api || true
|
||||
docker rm infisical-api || true
|
27
.github/workflows/release-k8-operator-helm.yml
vendored
Normal file
27
.github/workflows/release-k8-operator-helm.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: Release K8 Operator Helm Chart
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
release-helm:
|
||||
name: Release Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
|
||||
- name: Build and push helm package to CloudSmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
139
.github/workflows/release_docker_k8_operator.yaml
vendored
139
.github/workflows/release_docker_k8_operator.yaml
vendored
@ -1,52 +1,103 @@
|
||||
name: Release image + Helm chart K8s Operator
|
||||
name: Release K8 Operator Docker Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
- uses: actions/checkout@v2
|
||||
release-image:
|
||||
name: Generate Helm Chart PR
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
# Dependency for helm generation
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
# Dependency for helm generation
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: 1.21
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
# Install binaries for helm generation
|
||||
- name: Install dependencies
|
||||
working-directory: k8-operator
|
||||
run: |
|
||||
make helmify
|
||||
make kustomize
|
||||
make controller-gen
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
- name: Generate Helm Chart
|
||||
working-directory: k8-operator
|
||||
run: make helm
|
||||
|
||||
- name: Update Helm Chart Version
|
||||
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Debug - Check file changes
|
||||
run: |
|
||||
echo "Current git status:"
|
||||
git status
|
||||
echo ""
|
||||
echo "Modified files:"
|
||||
git diff --name-only
|
||||
|
||||
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
|
||||
if [ -z "$(git diff --name-only)" ]; then
|
||||
echo "No helm changes or version changes. Invalid release detected, Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create Helm Chart PR
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
branch: helm-update-${{ steps.extract_version.outputs.version }}
|
||||
delete-branch: true
|
||||
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
body: |
|
||||
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
|
||||
Additionally the helm chart has been updated to match the latest operator code changes.
|
||||
|
||||
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
Once you have approved this PR, you can trigger the helm release workflow manually.
|
||||
base: main
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
|
8
.github/workflows/run-backend-tests.yml
vendored
8
.github/workflows/run-backend-tests.yml
vendored
@ -34,7 +34,10 @@ jobs:
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start integration test
|
||||
- name: Run unit test
|
||||
run: npm run test:unit
|
||||
working-directory: backend
|
||||
- name: Run integration test
|
||||
run: npm run test:e2e
|
||||
working-directory: backend
|
||||
env:
|
||||
@ -44,4 +47,5 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
|
||||
|
@ -120,4 +120,3 @@ export default {
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
166
backend/package-lock.json
generated
166
backend/package-lock.json
generated
@ -31,7 +31,7 @@
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
@ -6747,32 +6747,35 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/node-saml": {
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-4.0.5.tgz",
|
||||
"integrity": "sha512-J5DglElbY1tjOuaR1NPtjOXkXY5bpUhDoKVoeucYN98A3w4fwgjIOPqIGcb6cQsqFq2zZ6vTCeKn5C/hvefSaw==",
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.0.1.tgz",
|
||||
"integrity": "sha512-YQzFPEC+CnsfO9AFYnwfYZKIzOLx3kITaC1HrjHVLTo6hxcQhc+LgHODOMvW4VCV95Gwrz1MshRUWCPzkDqmnA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/debug": "^4.1.7",
|
||||
"@types/passport": "^1.0.11",
|
||||
"@types/xml-crypto": "^1.4.2",
|
||||
"@types/xml-encryption": "^1.2.1",
|
||||
"@types/xml2js": "^0.4.11",
|
||||
"@xmldom/xmldom": "^0.8.6",
|
||||
"@types/debug": "^4.1.12",
|
||||
"@types/qs": "^6.9.11",
|
||||
"@types/xml-encryption": "^1.2.4",
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"@xmldom/is-dom-node": "^1.0.1",
|
||||
"@xmldom/xmldom": "^0.8.10",
|
||||
"debug": "^4.3.4",
|
||||
"xml-crypto": "^3.0.1",
|
||||
"xml-crypto": "^6.0.1",
|
||||
"xml-encryption": "^3.0.2",
|
||||
"xml2js": "^0.5.0",
|
||||
"xmlbuilder": "^15.1.1"
|
||||
"xml2js": "^0.6.2",
|
||||
"xmlbuilder": "^15.1.1",
|
||||
"xpath": "^0.0.34"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/node-saml/node_modules/debug": {
|
||||
"version": "4.3.4",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
|
||||
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
|
||||
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "2.1.2"
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
@ -6783,25 +6786,43 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/node-saml/node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
"node_modules/@node-saml/node-saml/node_modules/xml2js": {
|
||||
"version": "0.6.2",
|
||||
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz",
|
||||
"integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"sax": ">=0.6.0",
|
||||
"xmlbuilder": "~11.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/node-saml/node_modules/xml2js/node_modules/xmlbuilder": {
|
||||
"version": "11.0.1",
|
||||
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz",
|
||||
"integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/passport-saml": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-4.0.4.tgz",
|
||||
"integrity": "sha512-xFw3gw0yo+K1mzlkW15NeBF7cVpRHN/4vpjmBKzov5YFImCWh/G0LcTZ8krH3yk2/eRPc3Or8LRPudVJBjmYaw==",
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.0.1.tgz",
|
||||
"integrity": "sha512-fMztg3zfSnjLEgxvpl6HaDMNeh0xeQX4QHiF9e2Lsie2dc4qFE37XYbQZhVmn8XJ2awPpSWLQ736UskYgGU8lQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@node-saml/node-saml": "^4.0.4",
|
||||
"@types/express": "^4.17.14",
|
||||
"@types/passport": "^1.0.11",
|
||||
"@types/passport-strategy": "^0.2.35",
|
||||
"passport": "^0.6.0",
|
||||
"@node-saml/node-saml": "^5.0.1",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/passport": "^1.0.16",
|
||||
"@types/passport-strategy": "^0.2.38",
|
||||
"passport": "^0.7.0",
|
||||
"passport-strategy": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@nodelib/fs.scandir": {
|
||||
@ -9606,6 +9627,7 @@
|
||||
"version": "4.1.12",
|
||||
"resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
|
||||
"integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/ms": "*"
|
||||
}
|
||||
@ -9725,9 +9747,10 @@
|
||||
"integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w=="
|
||||
},
|
||||
"node_modules/@types/ms": {
|
||||
"version": "0.7.34",
|
||||
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.34.tgz",
|
||||
"integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g=="
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz",
|
||||
"integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.9.5",
|
||||
@ -9907,9 +9930,10 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/qs": {
|
||||
"version": "6.9.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.10.tgz",
|
||||
"integrity": "sha512-3Gnx08Ns1sEoCrWssEgTSJs/rsT2vhGP+Ja9cnnk9k4ALxinORlQneLXFeFKOTJMOeZUFD1s7w+w2AphTpvzZw=="
|
||||
"version": "6.9.18",
|
||||
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.18.tgz",
|
||||
"integrity": "sha512-kK7dgTYDyGqS+e2Q4aK9X3D7q234CIZ1Bv0q/7Z5IwRDoADNU81xXJK/YVyLbLTZCoIwUoDoffFeF+p/eIklAA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/range-parser": {
|
||||
"version": "1.2.7",
|
||||
@ -10058,19 +10082,11 @@
|
||||
"@types/webidl-conversions": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/xml-crypto": {
|
||||
"version": "1.4.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/xml-crypto/-/xml-crypto-1.4.6.tgz",
|
||||
"integrity": "sha512-A6jEW2FxLZo1CXsRWnZHUX2wzR3uDju2Bozt6rDbSmU/W8gkilaVbwFEVN0/NhnUdMVzwYobWtM6bU1QJJFb7Q==",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"xpath": "0.0.27"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/xml-encryption": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/xml-encryption/-/xml-encryption-1.2.4.tgz",
|
||||
"integrity": "sha512-I69K/WW1Dv7j6O3jh13z0X8sLWJRXbu5xnHDl9yHzUNDUBtUoBY058eb5s+x/WG6yZC1h8aKdI2EoyEPjyEh+Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
@ -10079,6 +10095,7 @@
|
||||
"version": "0.4.14",
|
||||
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz",
|
||||
"integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
@ -10522,10 +10539,20 @@
|
||||
"url": "https://opencollective.com/vitest"
|
||||
}
|
||||
},
|
||||
"node_modules/@xmldom/is-dom-node": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@xmldom/is-dom-node/-/is-dom-node-1.0.1.tgz",
|
||||
"integrity": "sha512-CJDxIgE5I0FH+ttq/Fxy6nRpxP70+e2O048EPe85J2use3XKdatVM7dDVvFNjQudd9B49NPoZ+8PG49zj4Er8Q==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 16"
|
||||
}
|
||||
},
|
||||
"node_modules/@xmldom/xmldom": {
|
||||
"version": "0.8.10",
|
||||
"resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz",
|
||||
"integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
}
|
||||
@ -18222,9 +18249,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/passport": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/passport/-/passport-0.6.0.tgz",
|
||||
"integrity": "sha512-0fe+p3ZnrWRW74fe8+SvCyf4a3Pb2/h7gFkQ8yTJpAO50gDzlfjZUZTO1k5Eg9kUct22OxHLqDZoKUWRHOh9ug==",
|
||||
"version": "0.7.0",
|
||||
"resolved": "https://registry.npmjs.org/passport/-/passport-0.7.0.tgz",
|
||||
"integrity": "sha512-cPLl+qZpSc+ireUvt+IzqbED1cHHkDoVYMo30jbJIdOOjQ1MQYZBPiNvmi8UM6lJuOpTPXJGZQk0DtC4y61MYQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"passport-strategy": "1.x.x",
|
||||
"pause": "0.0.1",
|
||||
@ -23692,42 +23720,44 @@
|
||||
}
|
||||
},
|
||||
"node_modules/xml-crypto": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-3.2.0.tgz",
|
||||
"integrity": "sha512-qVurBUOQrmvlgmZqIVBqmb06TD2a/PpEUfFPgD7BuBfjmoH4zgkqaWSIJrnymlCvM2GGt9x+XtJFA+ttoAufqg==",
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.0.1.tgz",
|
||||
"integrity": "sha512-v05aU7NS03z4jlZ0iZGRFeZsuKO1UfEbbYiaeRMiATBFs6Jq9+wqKquEMTn4UTrYZ9iGD8yz3KT4L9o2iF682w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@xmldom/xmldom": "^0.8.8",
|
||||
"xpath": "0.0.32"
|
||||
"@xmldom/is-dom-node": "^1.0.1",
|
||||
"@xmldom/xmldom": "^0.8.10",
|
||||
"xpath": "^0.0.33"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4.0.0"
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/xml-crypto/node_modules/xpath": {
|
||||
"version": "0.0.32",
|
||||
"resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz",
|
||||
"integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==",
|
||||
"version": "0.0.33",
|
||||
"resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.33.tgz",
|
||||
"integrity": "sha512-NNXnzrkDrAzalLhIUc01jO2mOzXGXh1JwPgkihcLLzw98c0WgYDmmjSh1Kl3wzaxSVWMuA+fe0WTWOBDWCBmNA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/xml-encryption": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/xml-encryption/-/xml-encryption-3.0.2.tgz",
|
||||
"integrity": "sha512-VxYXPvsWB01/aqVLd6ZMPWZ+qaj0aIdF+cStrVJMcFj3iymwZeI0ABzB3VqMYv48DkSpRhnrXqTUkR34j+UDyg==",
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/xml-encryption/-/xml-encryption-3.1.0.tgz",
|
||||
"integrity": "sha512-PV7qnYpoAMXbf1kvQkqMScLeQpjCMixddAKq9PtqVrho8HnYbBOWNfG0kA4R7zxQDo7w9kiYAyzS/ullAyO55Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@xmldom/xmldom": "^0.8.5",
|
||||
"escape-html": "^1.0.3",
|
||||
"xpath": "0.0.32"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/xml-encryption/node_modules/xpath": {
|
||||
"version": "0.0.32",
|
||||
"resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz",
|
||||
"integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.6.0"
|
||||
}
|
||||
@ -23764,6 +23794,7 @@
|
||||
"version": "15.1.1",
|
||||
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-15.1.1.tgz",
|
||||
"integrity": "sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8.0"
|
||||
}
|
||||
@ -23774,9 +23805,10 @@
|
||||
"integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw=="
|
||||
},
|
||||
"node_modules/xpath": {
|
||||
"version": "0.0.27",
|
||||
"resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.27.tgz",
|
||||
"integrity": "sha512-fg03WRxtkCV6ohClePNAECYsmpKKTv5L8y/X3Dn1hQrec3POx2jHZ/0P2qQ6HvsrU1BmeqXcof3NGGueG6LxwQ==",
|
||||
"version": "0.0.34",
|
||||
"resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.34.tgz",
|
||||
"integrity": "sha512-FxF6+rkr1rNSQrhUNYrAFJpRXNzlDoMxeXN5qI84939ylEv3qqPFKa85Oxr6tDaJKqwW6KKyo2v26TSv3k6LeA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.6.0"
|
||||
}
|
||||
|
@ -40,6 +40,7 @@
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
"lint": "eslint 'src/**/*.ts'",
|
||||
"test:unit": "vitest run -c vitest.unit.config.ts",
|
||||
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
@ -70,6 +71,7 @@
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
||||
"seed-dev": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
},
|
||||
"keywords": [],
|
||||
@ -146,7 +148,7 @@
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
|
@ -1,7 +0,0 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
7
backend/src/@types/fastify.d.ts
vendored
7
backend/src/@types/fastify.d.ts
vendored
@ -100,6 +100,13 @@ import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integ
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
identityAuthInfo?: {
|
||||
identityId: string;
|
||||
oidc?: {
|
||||
claims: Record<string, string>;
|
||||
};
|
||||
};
|
||||
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -85,7 +85,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId");
|
||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
// not setting a foreign constraint so that cascade effects are not triggered
|
||||
if (!doesGatewayColExist) {
|
||||
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.string("comment");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.dropColumn("comment");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,45 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
|
||||
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
|
||||
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
|
||||
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||
if (!hasSecretVersionV2UserActorId) {
|
||||
t.uuid("userActorId");
|
||||
t.foreign("userActorId").references("id").inTable(TableName.Users);
|
||||
}
|
||||
if (!hasSecretVersionV2IdentityActorId) {
|
||||
t.uuid("identityActorId");
|
||||
t.foreign("identityActorId").references("id").inTable(TableName.Identity);
|
||||
}
|
||||
if (!hasSecretVersionV2ActorType) {
|
||||
t.string("actorType");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
|
||||
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
|
||||
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
|
||||
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||
if (hasSecretVersionV2UserActorId) {
|
||||
t.dropColumn("userActorId");
|
||||
}
|
||||
if (hasSecretVersionV2IdentityActorId) {
|
||||
t.dropColumn("identityActorId");
|
||||
}
|
||||
if (hasSecretVersionV2ActorType) {
|
||||
t.dropColumn("actorType");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
||||
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
|
||||
TableName.Organization,
|
||||
"allowSecretSharingOutsideOrganization"
|
||||
);
|
||||
|
||||
if (!hasSecretShareToAnyoneCol) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("allowSecretSharingOutsideOrganization").defaultTo(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
||||
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
|
||||
TableName.Organization,
|
||||
"allowSecretSharingOutsideOrganization"
|
||||
);
|
||||
if (hasSecretShareToAnyoneCol) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.dropColumn("allowSecretSharingOutsideOrganization");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
|
||||
if (!hasMappingField) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.jsonb("claimMetadataMapping");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
|
||||
if (hasMappingField) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.dropColumn("claimMetadataMapping");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas/models";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds"))) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.specificType("adminIdentityIds", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("adminIdentityIds");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
|
||||
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
|
||||
if (doesParentColumExist && doesNameColumnExist) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.index(["parentId", "name"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
|
||||
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
|
||||
if (doesParentColumExist && doesNameColumnExist) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropIndex(["parentId", "name"]);
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasReviewerJwtCol = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
if (hasReviewerJwtCol) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
t.binary("encryptedKubernetesTokenReviewerJwt").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
// we can't make it back to non nullable, it will fail
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas/models";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
|
||||
});
|
||||
}
|
||||
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals"))) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.dropColumn("allowedSelfApprovals");
|
||||
});
|
||||
}
|
||||
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals")) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.dropColumn("allowedSelfApprovals");
|
||||
});
|
||||
}
|
||||
}
|
@ -16,7 +16,8 @@ export const AccessApprovalPoliciesSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional()
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;
|
||||
|
@ -28,7 +28,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
allowedNamespaces: z.string(),
|
||||
allowedNames: z.string(),
|
||||
allowedAudience: z.string(),
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer,
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
|
@ -26,7 +26,8 @@ export const IdentityOidcAuthsSchema = z.object({
|
||||
boundSubject: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional()
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional(),
|
||||
claimMetadataMapping: z.unknown().nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
||||
|
@ -22,7 +22,8 @@ export const OrganizationsSchema = z.object({
|
||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
defaultMembershipRole: z.string().default("member"),
|
||||
enforceMfa: z.boolean().default(false),
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
selectedMfaMethod: z.string().nullable().optional(),
|
||||
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||
|
@ -16,7 +16,8 @@ export const SecretApprovalPoliciesSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional()
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
|
||||
|
@ -13,7 +13,8 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
|
||||
requestId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
reviewerUserId: z.string().uuid()
|
||||
reviewerUserId: z.string().uuid(),
|
||||
comment: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;
|
||||
|
@ -12,7 +12,6 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const SecretSharingSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedValue: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
iv: z.string().nullable().optional(),
|
||||
tag: z.string().nullable().optional(),
|
||||
hashedHex: z.string().nullable().optional(),
|
||||
@ -27,7 +26,8 @@ export const SecretSharingSchema = z.object({
|
||||
lastViewedAt: z.date().nullable().optional(),
|
||||
password: z.string().nullable().optional(),
|
||||
encryptedSecret: zodBuffer.nullable().optional(),
|
||||
identifier: z.string().nullable().optional()
|
||||
identifier: z.string().nullable().optional(),
|
||||
type: z.string().default("share")
|
||||
});
|
||||
|
||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||
|
@ -25,7 +25,10 @@ export const SecretVersionsV2Schema = z.object({
|
||||
folderId: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
userActorId: z.string().uuid().nullable().optional(),
|
||||
identityActorId: z.string().uuid().nullable().optional(),
|
||||
actorType: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretVersionsV2 = z.infer<typeof SecretVersionsV2Schema>;
|
||||
|
@ -25,7 +25,8 @@ export const SuperAdminSchema = z.object({
|
||||
encryptedSlackClientId: zodBuffer.nullable().optional(),
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
|
||||
authConsentContent: z.string().nullable().optional(),
|
||||
pageFrameContent: z.string().nullable().optional()
|
||||
pageFrameContent: z.string().nullable().optional(),
|
||||
adminIdentityIds: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@ -29,7 +29,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -147,7 +148,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).optional(),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -110,7 +110,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
secretPath: z.string().nullish(),
|
||||
envId: z.string(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish()
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
}),
|
||||
reviewers: z
|
||||
.object({
|
||||
|
@ -1,10 +1,10 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
import { DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
|
@ -1,4 +1,3 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
@ -6,6 +5,7 @@ import { DynamicSecretProviderSchema } from "@app/ee/services/dynamic-secret/pro
|
||||
import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
|
@ -1,11 +1,11 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
|
||||
import { backfillPermissionV1SchemaToV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
|
@ -1,10 +1,10 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { KmipClientsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { KmipPermission } from "@app/ee/services/kmip/kmip-enum";
|
||||
import { KmipClientOrderBy } from "@app/ee/services/kmip/kmip-types";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
|
@ -25,7 +25,7 @@ type TSAMLConfig = {
|
||||
callbackUrl: string;
|
||||
entryPoint: string;
|
||||
issuer: string;
|
||||
cert: string;
|
||||
idpCert: string;
|
||||
audience: string;
|
||||
wantAuthnResponseSigned?: boolean;
|
||||
wantAssertionsSigned?: boolean;
|
||||
@ -72,7 +72,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
callbackUrl: `${appCfg.SITE_URL}/api/v1/sso/saml2/${ssoConfig.id}`,
|
||||
entryPoint: ssoConfig.entryPoint,
|
||||
issuer: ssoConfig.issuer,
|
||||
cert: ssoConfig.cert,
|
||||
idpCert: ssoConfig.cert,
|
||||
audience: appCfg.SITE_URL || ""
|
||||
};
|
||||
if (ssoConfig.authProvider === SamlProviders.JUMPCLOUD_SAML) {
|
||||
@ -302,15 +302,21 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const saml = await server.services.saml.createSamlCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
orgId: req.body.organizationId,
|
||||
...req.body
|
||||
const { isActive, authProvider, issuer, entryPoint, cert } = req.body;
|
||||
const { permission } = req;
|
||||
|
||||
return server.services.saml.createSamlCfg({
|
||||
isActive,
|
||||
authProvider,
|
||||
issuer,
|
||||
entryPoint,
|
||||
idpCert: cert,
|
||||
actor: permission.type,
|
||||
actorId: permission.id,
|
||||
actorAuthMethod: permission.authMethod,
|
||||
actorOrgId: permission.orgId,
|
||||
orgId: req.body.organizationId
|
||||
});
|
||||
return saml;
|
||||
}
|
||||
});
|
||||
|
||||
@ -337,15 +343,21 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const saml = await server.services.saml.updateSamlCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
orgId: req.body.organizationId,
|
||||
...req.body
|
||||
const { isActive, authProvider, issuer, entryPoint, cert } = req.body;
|
||||
const { permission } = req;
|
||||
|
||||
return server.services.saml.updateSamlCfg({
|
||||
isActive,
|
||||
authProvider,
|
||||
issuer,
|
||||
entryPoint,
|
||||
idpCert: cert,
|
||||
actor: permission.type,
|
||||
actorId: permission.id,
|
||||
actorAuthMethod: permission.authMethod,
|
||||
actorOrgId: permission.orgId,
|
||||
orgId: req.body.organizationId
|
||||
});
|
||||
return saml;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -35,7 +35,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -85,7 +86,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.nullable()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val))
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional()
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -1,16 +1,11 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
SecretApprovalRequestsReviewersSchema,
|
||||
SecretApprovalRequestsSchema,
|
||||
SecretTagsSchema,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { SecretApprovalRequestsReviewersSchema, SecretApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
|
||||
@ -54,7 +49,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish()
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
}),
|
||||
committerUser: approvalRequestUser,
|
||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||
@ -159,7 +155,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED])
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED]),
|
||||
comment: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -175,8 +172,25 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
approvalId: req.params.id,
|
||||
status: req.body.status
|
||||
status: req.body.status,
|
||||
comment: req.body.comment
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId: review.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW,
|
||||
metadata: {
|
||||
secretApprovalRequestId: review.requestId,
|
||||
reviewedBy: review.reviewerUserId,
|
||||
status: review.status as ApprovalStatus,
|
||||
comment: review.comment || ""
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { review };
|
||||
}
|
||||
});
|
||||
@ -232,14 +246,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
}
|
||||
});
|
||||
|
||||
const tagSchema = SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.array()
|
||||
.optional();
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
@ -262,18 +268,19 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
approvers: approvalRequestUser.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish()
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
}),
|
||||
environment: z.string(),
|
||||
statusChangedByUser: approvalRequestUser.optional(),
|
||||
committerUser: approvalRequestUser,
|
||||
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
|
||||
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
|
||||
secretPath: z.string(),
|
||||
commits: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true, version: true })
|
||||
.extend({
|
||||
op: z.string(),
|
||||
tags: tagSchema,
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.nullish(),
|
||||
secret: z
|
||||
.object({
|
||||
@ -292,7 +299,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
secretKey: z.string(),
|
||||
secretValue: z.string().optional(),
|
||||
secretComment: z.string().optional(),
|
||||
tags: tagSchema,
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.nullish()
|
||||
})
|
||||
.optional()
|
||||
|
@ -1,6 +1,6 @@
|
||||
import z from "zod";
|
||||
|
||||
import { ProjectPermissionActions } from "@app/ee/services/permission/project-permission";
|
||||
import { ProjectPermissionSecretActions } from "@app/ee/services/permission/project-permission";
|
||||
import { RAW_SECRETS } from "@app/lib/api-docs";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
@ -9,7 +9,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const AccessListEntrySchema = z
|
||||
.object({
|
||||
allowedActions: z.nativeEnum(ProjectPermissionActions).array(),
|
||||
allowedActions: z.nativeEnum(ProjectPermissionSecretActions).array(),
|
||||
id: z.string(),
|
||||
membershipId: z.string(),
|
||||
name: z.string()
|
||||
|
@ -22,7 +22,11 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secretVersions: secretRawSchema.array()
|
||||
secretVersions: secretRawSchema
|
||||
.extend({
|
||||
secretValueHidden: z.boolean()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -37,6 +41,7 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
|
||||
offset: req.query.offset,
|
||||
secretId: req.params.secretId
|
||||
});
|
||||
|
||||
return { secretVersions };
|
||||
}
|
||||
});
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretSnapshotsSchema, SecretTagsSchema } from "@app/db/schemas";
|
||||
import { SecretSnapshotsSchema } from "@app/db/schemas";
|
||||
import { PROJECTS } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
@ -31,12 +31,9 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
secretVersions: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true })
|
||||
.extend({
|
||||
secretValueHidden: z.boolean(),
|
||||
secretId: z.string(),
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
}).array()
|
||||
tags: SanitizedTagSchema.array()
|
||||
})
|
||||
.array(),
|
||||
folderVersion: z.object({ id: z.string(), name: z.string() }).array(),
|
||||
@ -55,6 +52,7 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.secretSnapshotId
|
||||
});
|
||||
|
||||
return { secretSnapshot };
|
||||
}
|
||||
});
|
||||
|
@ -1,13 +1,15 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@ -73,6 +75,16 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SignSshKey,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
certificateTemplateId: req.body.certificateTemplateId,
|
||||
principals: req.body.principals,
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
signedKey: signedPublicKey
|
||||
@ -152,6 +164,16 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.IssueSshCreds,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
certificateTemplateId: req.body.certificateTemplateId,
|
||||
principals: req.body.principals,
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
signedKey: signedPublicKey,
|
||||
|
@ -1,5 +1,4 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
@ -10,6 +9,7 @@ import {
|
||||
isValidUserPattern
|
||||
} from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-validators";
|
||||
import { SSH_CERTIFICATE_TEMPLATES } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
@ -1,10 +1,11 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
|
||||
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
@ -23,7 +24,9 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
body: z.object({
|
||||
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
isTemporary: z.literal(false)
|
||||
@ -81,7 +84,8 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({ isTemporary: z.literal(false).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary) }),
|
||||
z.object({
|
||||
|
@ -1,10 +1,11 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types";
|
||||
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
@ -30,7 +31,9 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
|
||||
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
isTemporary: z.literal(false)
|
||||
@ -94,7 +97,8 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({ isTemporary: z.literal(false).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary) }),
|
||||
z.object({
|
||||
|
@ -2,6 +2,7 @@ import { packRules } from "@casl/ability/extra";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
|
||||
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@ -37,7 +38,9 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(PROJECT_ROLE.CREATE.slug),
|
||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(PROJECT_ROLE.CREATE.permissions)
|
||||
.refine(checkForInvalidPermissionCombination)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -92,7 +95,10 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(PROJECT_ROLE.UPDATE.slug),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(PROJECT_ROLE.UPDATE.permissions)
|
||||
.optional()
|
||||
.superRefine(checkForInvalidPermissionCombination)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -65,7 +65,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvers,
|
||||
projectSlug,
|
||||
environment,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
}: TCreateAccessApprovalPolicy) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
@ -153,7 +154,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -216,7 +218,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
}: TUpdateAccessApprovalPolicy) => {
|
||||
const groupApprovers = approvers
|
||||
.filter((approver) => approver.type === ApproverType.Group)
|
||||
@ -262,7 +265,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -26,6 +26,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
projectSlug: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
allowedSelfApprovals: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateAccessApprovalPolicy = {
|
||||
@ -35,6 +36,7 @@ export type TUpdateAccessApprovalPolicy = {
|
||||
secretPath?: string;
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
allowedSelfApprovals: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteAccessApprovalPolicy = {
|
||||
|
@ -61,6 +61,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
|
||||
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||
)
|
||||
@ -119,6 +120,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approvals: doc.policyApprovals,
|
||||
secretPath: doc.policySecretPath,
|
||||
enforcementLevel: doc.policyEnforcementLevel,
|
||||
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
|
||||
envId: doc.policyEnvId,
|
||||
deletedAt: doc.policyDeletedAt
|
||||
},
|
||||
@ -254,6 +256,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||
);
|
||||
@ -275,6 +278,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals,
|
||||
deletedAt: el.policyDeletedAt
|
||||
},
|
||||
requestedByUser: {
|
||||
|
@ -1,9 +1,10 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import msFn from "ms";
|
||||
|
||||
import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@ -246,7 +247,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
requesterEmail: requestedByUser.email,
|
||||
isTemporary,
|
||||
...(isTemporary && {
|
||||
expiresIn: ms(ms(temporaryRange || ""), { long: true })
|
||||
expiresIn: msFn(ms(temporaryRange || ""), { long: true })
|
||||
}),
|
||||
secretPath,
|
||||
environment: envSlug,
|
||||
@ -319,6 +320,11 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
message: "The policy associated with this access request has been deleted."
|
||||
});
|
||||
}
|
||||
if (!policy.allowedSelfApprovals && actorId === accessApprovalRequest.requestedByUserId) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to review access approval request. Users are not authorized to review their own request."
|
||||
});
|
||||
}
|
||||
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
|
@ -1,8 +1,10 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { requestContext } from "@fastify/request-context";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
@ -81,8 +83,12 @@ export const auditLogServiceFactory = ({
|
||||
if (!data.projectId && !data.orgId)
|
||||
throw new BadRequestError({ message: "Must specify either project id or org id" });
|
||||
}
|
||||
|
||||
return auditLogQueue.pushToLog(data);
|
||||
const el = { ...data };
|
||||
if (el.actor.type === ActorType.USER || el.actor.type === ActorType.IDENTITY) {
|
||||
const permissionMetadata = requestContext.get("identityPermissionMetadata");
|
||||
el.actor.metadata.permission = permissionMetadata;
|
||||
}
|
||||
return auditLogQueue.pushToLog(el);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -22,6 +22,7 @@ import {
|
||||
} from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
import { KmipPermission } from "../kmip/kmip-enum";
|
||||
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
filter: {
|
||||
@ -165,6 +166,7 @@ export enum EventType {
|
||||
SECRET_APPROVAL_REQUEST = "secret-approval-request",
|
||||
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
|
||||
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
|
||||
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review",
|
||||
SIGN_SSH_KEY = "sign-ssh-key",
|
||||
ISSUE_SSH_CREDS = "issue-ssh-creds",
|
||||
CREATE_SSH_CA = "create-ssh-certificate-authority",
|
||||
@ -288,6 +290,7 @@ interface UserActorMetadata {
|
||||
userId: string;
|
||||
email?: string | null;
|
||||
username: string;
|
||||
permission?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface ServiceActorMetadata {
|
||||
@ -298,6 +301,7 @@ interface ServiceActorMetadata {
|
||||
interface IdentityActorMetadata {
|
||||
identityId: string;
|
||||
name: string;
|
||||
permission?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface ScimClientActorMetadata {}
|
||||
@ -976,6 +980,7 @@ interface AddIdentityOidcAuthEvent {
|
||||
boundIssuer: string;
|
||||
boundAudiences: string;
|
||||
boundClaims: Record<string, string>;
|
||||
claimMetadataMapping: Record<string, string>;
|
||||
boundSubject: string;
|
||||
accessTokenTTL: number;
|
||||
accessTokenMaxTTL: number;
|
||||
@ -1000,6 +1005,7 @@ interface UpdateIdentityOidcAuthEvent {
|
||||
boundIssuer?: string;
|
||||
boundAudiences?: string;
|
||||
boundClaims?: Record<string, string>;
|
||||
claimMetadataMapping?: Record<string, string>;
|
||||
boundSubject?: string;
|
||||
accessTokenTTL?: number;
|
||||
accessTokenMaxTTL?: number;
|
||||
@ -1314,6 +1320,16 @@ interface SecretApprovalRequest {
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretApprovalRequestReview {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW;
|
||||
metadata: {
|
||||
secretApprovalRequestId: string;
|
||||
reviewedBy: string;
|
||||
status: ApprovalStatus;
|
||||
comment: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SignSshKey {
|
||||
type: EventType.SIGN_SSH_KEY;
|
||||
metadata: {
|
||||
@ -2482,4 +2498,5 @@ export type Event =
|
||||
| KmipOperationRevokeEvent
|
||||
| KmipOperationLocateEvent
|
||||
| KmipOperationRegisterEvent
|
||||
| CreateSecretRequestEvent;
|
||||
| CreateSecretRequestEvent
|
||||
| SecretApprovalRequestReview;
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
@ -11,6 +10,7 @@ import {
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
@ -1,31 +1,51 @@
|
||||
import crypto from "node:crypto";
|
||||
import dns from "node:dns/promises";
|
||||
import net from "node:net";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { isPrivateIp } from "@app/lib/ip/ipRange";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
|
||||
export const verifyHostInputValidity = (host: string, isGateway = false) => {
|
||||
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
|
||||
const appCfg = getConfig();
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
// no need for validation when it's dev
|
||||
if (appCfg.NODE_ENV === "development") return;
|
||||
// if (appCfg.NODE_ENV === "development") return; // incase you want to remove this check in dev
|
||||
|
||||
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
|
||||
const reservedHosts = [appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI)].concat(
|
||||
(appCfg.DB_READ_REPLICAS || []).map((el) => getDbConnectionHost(el.DB_CONNECTION_URI)),
|
||||
getDbConnectionHost(appCfg.REDIS_URL)
|
||||
);
|
||||
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
!isGateway &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (
|
||||
host === "localhost" ||
|
||||
host === "127.0.0.1" ||
|
||||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
// get host db ip
|
||||
const exclusiveIps: string[] = [];
|
||||
for await (const el of reservedHosts) {
|
||||
if (el) {
|
||||
if (net.isIPv4(el)) {
|
||||
exclusiveIps.push(el);
|
||||
} else {
|
||||
const resolvedIps = await dns.resolve4(el);
|
||||
exclusiveIps.push(...resolvedIps);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const normalizedHost = host.split(":")[0];
|
||||
const inputHostIps: string[] = [];
|
||||
if (net.isIPv4(host)) {
|
||||
inputHostIps.push(host);
|
||||
} else {
|
||||
if (normalizedHost === "localhost" || normalizedHost === "host.docker.internal") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
const resolvedIps = await dns.resolve4(host);
|
||||
inputHostIps.push(...resolvedIps);
|
||||
}
|
||||
|
||||
if (!isGateway) {
|
||||
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
|
||||
if (isInternalIp) throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
const isAppUsedIps = inputHostIps.some((el) => exclusiveIps.includes(el));
|
||||
if (isAppUsedIps) throw new BadRequestError({ message: "Invalid db host" });
|
||||
return inputHostIps;
|
||||
};
|
||||
|
@ -13,6 +13,7 @@ import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
@ -144,6 +145,14 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
// We can't return the parsed statements here because we need to use the handlebars template to generate the username and password, before we can use the parsed statements.
|
||||
CreateElastiCacheUserSchema.parse(JSON.parse(providerInputs.creationStatement));
|
||||
DeleteElasticCacheUserSchema.parse(JSON.parse(providerInputs.revocationStatement));
|
||||
validateHandlebarTemplate("AWS ElastiCache creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.revocationStatement) {
|
||||
validateHandlebarTemplate("AWS ElastiCache revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
@ -3,9 +3,10 @@ import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
@ -20,14 +21,28 @@ const generateUsername = () => {
|
||||
export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
const hostIps = await Promise.all(
|
||||
providerInputs.host
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((el) => verifyHostInputValidity(el).then((ip) => ip[0]))
|
||||
);
|
||||
validateHandlebarTemplate("Cassandra creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration", "keyspace"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("Cassandra renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration", "keyspace"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("Cassandra revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
|
||||
return providerInputs;
|
||||
return { ...providerInputs, hostIps };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema> & { hostIps: string[] }) => {
|
||||
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const client = new cassandra.Client({
|
||||
sslOptions,
|
||||
@ -40,7 +55,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
},
|
||||
keyspace: providerInputs.keyspace,
|
||||
localDataCenter: providerInputs?.localDataCenter,
|
||||
contactPoints: providerInputs.host.split(",").filter(Boolean)
|
||||
contactPoints: providerInputs.hostIps
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
@ -19,15 +19,14 @@ const generateUsername = () => {
|
||||
export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema> & { hostIp: string }) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
url: new URL(`${providerInputs.hostIp}:${providerInputs.port}`),
|
||||
...(providerInputs.ca && {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
|
@ -1,5 +1,16 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export type PasswordRequirements = {
|
||||
length: number;
|
||||
required: {
|
||||
lowercase: number;
|
||||
uppercase: number;
|
||||
digits: number;
|
||||
symbols: number;
|
||||
};
|
||||
allowedSymbols?: string;
|
||||
};
|
||||
|
||||
export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2",
|
||||
@ -100,6 +111,28 @@ export const DynamicSecretSqlDBSchema = z.object({
|
||||
database: z.string().trim(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
passwordRequirements: z
|
||||
.object({
|
||||
length: z.number().min(1).max(250),
|
||||
required: z
|
||||
.object({
|
||||
lowercase: z.number().min(0),
|
||||
uppercase: z.number().min(0),
|
||||
digits: z.number().min(0),
|
||||
symbols: z.number().min(0)
|
||||
})
|
||||
.refine((data) => {
|
||||
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
|
||||
return total <= 250;
|
||||
}, "Sum of required characters cannot exceed 250"),
|
||||
allowedSymbols: z.string().optional()
|
||||
})
|
||||
.refine((data) => {
|
||||
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
|
||||
return total <= data.length;
|
||||
}, "Sum of required characters cannot exceed the total length")
|
||||
.optional()
|
||||
.describe("Password generation requirements"),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
|
@ -19,15 +19,15 @@ const generateUsername = () => {
|
||||
export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema> & { hostIp: string }) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
|
||||
? `mongodb+srv://${providerInputs.hostIp}`
|
||||
: `mongodb://${providerInputs.hostIp}:${providerInputs.port}`;
|
||||
|
||||
const client = new MongoClient(uri, {
|
||||
auth: {
|
||||
|
@ -3,7 +3,6 @@ import https from "https";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
@ -79,14 +78,13 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
|
||||
export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema> & { hostIp: string }) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||
baseURL: `${providerInputs.hostIp}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||
@ -51,16 +52,28 @@ const executeTransactions = async (connection: Redis, commands: string[]): Promi
|
||||
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("Redis creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("Redis renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("Redis revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema> & { hostIp: string }) => {
|
||||
let connection: Redis | null = null;
|
||||
try {
|
||||
connection = new Redis({
|
||||
username: providerInputs.username,
|
||||
host: providerInputs.host,
|
||||
host: providerInputs.hostIp,
|
||||
port: providerInputs.port,
|
||||
password: providerInputs.password,
|
||||
...(providerInputs.ca && {
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
|
||||
@ -27,14 +28,25 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("SAP ASE creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password"].includes(val)
|
||||
});
|
||||
if (providerInputs.revocationStatement) {
|
||||
validateHandlebarTemplate("SAP ASE revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
}
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
|
||||
const $getClient = async (
|
||||
providerInputs: z.infer<typeof DynamicSecretSapAseSchema> & { hostIp: string },
|
||||
useMaster?: boolean
|
||||
) => {
|
||||
const connectionString =
|
||||
`DRIVER={FreeTDS};` +
|
||||
`SERVER=${providerInputs.host};` +
|
||||
`SERVER=${providerInputs.hostIp};` +
|
||||
`PORT=${providerInputs.port};` +
|
||||
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
|
||||
`UID=${providerInputs.username};` +
|
||||
|
@ -11,6 +11,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
|
||||
@ -28,13 +29,24 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("SAP Hana creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("SAP Hana renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("SAP Hana revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema> & { hostIp: string }) => {
|
||||
const client = hdb.createClient({
|
||||
host: providerInputs.host,
|
||||
host: providerInputs.hostIp,
|
||||
port: providerInputs.port,
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
@ -31,6 +32,18 @@ const getDaysToExpiry = (expiryDate: Date) => {
|
||||
export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSnowflakeSchema.parseAsync(inputs);
|
||||
validateHandlebarTemplate("Snowflake creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("Snowflake renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("Snowflake revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
|
@ -1,23 +1,107 @@
|
||||
import { randomInt } from "crypto";
|
||||
import handlebars from "handlebars";
|
||||
import knex from "knex";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { withGatewayProxy } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
|
||||
import { DynamicSecretSqlDBSchema, PasswordRequirements, SqlProviders, TDynamicProviderFns } from "./models";
|
||||
|
||||
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
||||
|
||||
const generatePassword = (provider: SqlProviders) => {
|
||||
// oracle has limit of 48 password length
|
||||
const size = provider === SqlProviders.Oracle ? 30 : 48;
|
||||
const DEFAULT_PASSWORD_REQUIREMENTS = {
|
||||
length: 48,
|
||||
required: {
|
||||
lowercase: 1,
|
||||
uppercase: 1,
|
||||
digits: 1,
|
||||
symbols: 0
|
||||
},
|
||||
allowedSymbols: "-_.~!*"
|
||||
};
|
||||
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
const ORACLE_PASSWORD_REQUIREMENTS = {
|
||||
...DEFAULT_PASSWORD_REQUIREMENTS,
|
||||
length: 30
|
||||
};
|
||||
|
||||
const generatePassword = (provider: SqlProviders, requirements?: PasswordRequirements) => {
|
||||
const defaultReqs = provider === SqlProviders.Oracle ? ORACLE_PASSWORD_REQUIREMENTS : DEFAULT_PASSWORD_REQUIREMENTS;
|
||||
const finalReqs = requirements || defaultReqs;
|
||||
|
||||
try {
|
||||
const { length, required, allowedSymbols } = finalReqs;
|
||||
|
||||
const chars = {
|
||||
lowercase: "abcdefghijklmnopqrstuvwxyz",
|
||||
uppercase: "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
|
||||
digits: "0123456789",
|
||||
symbols: allowedSymbols || "-_.~!*"
|
||||
};
|
||||
|
||||
const parts: string[] = [];
|
||||
|
||||
if (required.lowercase > 0) {
|
||||
parts.push(
|
||||
...Array(required.lowercase)
|
||||
.fill(0)
|
||||
.map(() => chars.lowercase[randomInt(chars.lowercase.length)])
|
||||
);
|
||||
}
|
||||
|
||||
if (required.uppercase > 0) {
|
||||
parts.push(
|
||||
...Array(required.uppercase)
|
||||
.fill(0)
|
||||
.map(() => chars.uppercase[randomInt(chars.uppercase.length)])
|
||||
);
|
||||
}
|
||||
|
||||
if (required.digits > 0) {
|
||||
parts.push(
|
||||
...Array(required.digits)
|
||||
.fill(0)
|
||||
.map(() => chars.digits[randomInt(chars.digits.length)])
|
||||
);
|
||||
}
|
||||
|
||||
if (required.symbols > 0) {
|
||||
parts.push(
|
||||
...Array(required.symbols)
|
||||
.fill(0)
|
||||
.map(() => chars.symbols[randomInt(chars.symbols.length)])
|
||||
);
|
||||
}
|
||||
|
||||
const requiredTotal = Object.values(required).reduce<number>((a, b) => a + b, 0);
|
||||
const remainingLength = Math.max(length - requiredTotal, 0);
|
||||
|
||||
const allowedChars = Object.entries(chars)
|
||||
.filter(([key]) => required[key as keyof typeof required] > 0)
|
||||
.map(([, value]) => value)
|
||||
.join("");
|
||||
|
||||
parts.push(
|
||||
...Array(remainingLength)
|
||||
.fill(0)
|
||||
.map(() => allowedChars[randomInt(allowedChars.length)])
|
||||
);
|
||||
|
||||
// shuffle the array to mix up the characters
|
||||
for (let i = parts.length - 1; i > 0; i -= 1) {
|
||||
const j = randomInt(i + 1);
|
||||
[parts[i], parts[j]] = [parts[j], parts[i]];
|
||||
}
|
||||
|
||||
return parts.join("");
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
throw new Error(`Failed to generate password: ${message}`);
|
||||
}
|
||||
};
|
||||
|
||||
const generateUsername = (provider: SqlProviders) => {
|
||||
@ -34,8 +118,21 @@ type TSqlDatabaseProviderDTO = {
|
||||
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
|
||||
return providerInputs;
|
||||
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
|
||||
validateHandlebarTemplate("SQL creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration", "database"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("SQL renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration", "database"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("SQL revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username", "database"].includes(val)
|
||||
});
|
||||
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
@ -61,7 +158,8 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
}
|
||||
: undefined
|
||||
},
|
||||
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT,
|
||||
pool: { min: 0, max: 7 }
|
||||
});
|
||||
return db;
|
||||
};
|
||||
@ -95,7 +193,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
let isConnected = false;
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
@ -115,7 +213,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const username = generateUsername(providerInputs.client);
|
||||
const password = generatePassword(providerInputs.client);
|
||||
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
try {
|
||||
|
@ -3,7 +3,7 @@ import slugify from "@sindresorhus/slugify";
|
||||
|
||||
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
|
||||
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
@ -87,9 +87,14 @@ export const groupServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
const isCustomRole = Boolean(customRole);
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
|
||||
|
||||
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to create a more privileged group",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const group = await groupDAL.transaction(async (tx) => {
|
||||
const existingGroup = await groupDAL.findOne({ orgId: actorOrgId, name }, tx);
|
||||
@ -156,9 +161,13 @@ export const groupServiceFactory = ({
|
||||
);
|
||||
|
||||
const isCustomRole = Boolean(customOrgRole);
|
||||
const hasRequiredNewRolePermission = isAtLeastAsPrivileged(permission, rolePermission);
|
||||
if (!hasRequiredNewRolePermission)
|
||||
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
|
||||
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update a more privileged group",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
if (isCustomRole) customRole = customOrgRole;
|
||||
}
|
||||
|
||||
@ -329,9 +338,13 @@ export const groupServiceFactory = ({
|
||||
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
|
||||
if (!hasRequiredPrivileges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to add user to more privileged group" });
|
||||
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to add user to more privileged group",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const user = await userDAL.findOne({ username });
|
||||
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });
|
||||
@ -396,9 +409,13 @@ export const groupServiceFactory = ({
|
||||
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
|
||||
if (!hasRequiredPrivileges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to delete user from more privileged group" });
|
||||
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to delete user from more privileged group",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const user = await userDAL.findOne({ username });
|
||||
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });
|
||||
|
@ -1,10 +1,11 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
@ -79,9 +80,16 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -161,9 +169,17 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
if (data?.slug) {
|
||||
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
@ -239,9 +255,13 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const deletedPrivilege = await identityProjectAdditionalPrivilegeDAL.deleteById(identityPrivilege.id);
|
||||
return {
|
||||
|
@ -1,10 +1,11 @@
|
||||
import { ForbiddenError, MongoAbility, RawRuleOf, subject } from "@casl/ability";
|
||||
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
@ -88,9 +89,13 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -98,6 +103,10 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
});
|
||||
if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
|
||||
|
||||
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const packedPermission = JSON.stringify(packRules(customPermission));
|
||||
if (!dto.isTemporary) {
|
||||
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({
|
||||
@ -172,9 +181,13 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -195,6 +208,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
}
|
||||
|
||||
const isTemporary = typeof data?.isTemporary !== "undefined" ? data.isTemporary : identityPrivilege.isTemporary;
|
||||
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const packedPermission = data.permissions ? JSON.stringify(packRules(data.permissions)) : undefined;
|
||||
if (isTemporary) {
|
||||
@ -268,9 +284,13 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to edit more privileged identity" });
|
||||
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to edit more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import * as x509 from "@peculiar/x509";
|
||||
import crypto, { KeyObject } from "crypto";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
|
||||
import { isValidHostname, isValidIp } from "@app/lib/ip";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { constructPemChainFromCerts } from "@app/services/certificate/certificate-fns";
|
||||
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
|
||||
import {
|
||||
|
24
backend/src/ee/services/license/licence-enums.ts
Normal file
24
backend/src/ee/services/license/licence-enums.ts
Normal file
@ -0,0 +1,24 @@
|
||||
export const BillingPlanRows = {
|
||||
MemberLimit: { name: "Organization member limit", field: "memberLimit" },
|
||||
IdentityLimit: { name: "Organization identity limit", field: "identityLimit" },
|
||||
WorkspaceLimit: { name: "Project limit", field: "workspaceLimit" },
|
||||
EnvironmentLimit: { name: "Environment limit", field: "environmentLimit" },
|
||||
SecretVersioning: { name: "Secret versioning", field: "secretVersioning" },
|
||||
PitRecovery: { name: "Point in time recovery", field: "pitRecovery" },
|
||||
Rbac: { name: "RBAC", field: "rbac" },
|
||||
CustomRateLimits: { name: "Custom rate limits", field: "customRateLimits" },
|
||||
CustomAlerts: { name: "Custom alerts", field: "customAlerts" },
|
||||
AuditLogs: { name: "Audit logs", field: "auditLogs" },
|
||||
SamlSSO: { name: "SAML SSO", field: "samlSSO" },
|
||||
Hsm: { name: "Hardware Security Module (HSM)", field: "hsm" },
|
||||
OidcSSO: { name: "OIDC SSO", field: "oidcSSO" },
|
||||
SecretApproval: { name: "Secret approvals", field: "secretApproval" },
|
||||
SecretRotation: { name: "Secret rotation", field: "secretRotation" },
|
||||
InstanceUserManagement: { name: "Instance User Management", field: "instanceUserManagement" },
|
||||
ExternalKms: { name: "External KMS", field: "externalKms" }
|
||||
} as const;
|
||||
|
||||
export const BillingPlanTableHead = {
|
||||
Allowed: { name: "Allowed" },
|
||||
Used: { name: "Used" }
|
||||
} as const;
|
@ -12,10 +12,13 @@ import { getConfig } from "@app/lib/config/env";
|
||||
import { verifyOfflineLicense } from "@app/lib/crypto";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums";
|
||||
import { TLicenseDALFactory } from "./license-dal";
|
||||
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
|
||||
import {
|
||||
@ -28,6 +31,7 @@ import {
|
||||
TFeatureSet,
|
||||
TGetOrgBillInfoDTO,
|
||||
TGetOrgTaxIdDTO,
|
||||
TOfflineLicense,
|
||||
TOfflineLicenseContents,
|
||||
TOrgInvoiceDTO,
|
||||
TOrgLicensesDTO,
|
||||
@ -39,10 +43,12 @@ import {
|
||||
} from "./license-types";
|
||||
|
||||
type TLicenseServiceFactoryDep = {
|
||||
orgDAL: Pick<TOrgDALFactory, "findOrgById">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findOrgById" | "countAllOrgMembers">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseDAL: TLicenseDALFactory;
|
||||
keyStore: Pick<TKeyStoreFactory, "setItemWithExpiry" | "getItem" | "deleteItem">;
|
||||
identityOrgMembershipDAL: TIdentityOrgDALFactory;
|
||||
projectDAL: TProjectDALFactory;
|
||||
};
|
||||
|
||||
export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
|
||||
@ -50,18 +56,21 @@ export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
|
||||
const LICENSE_SERVER_CLOUD_LOGIN = "/api/auth/v1/license-server-login";
|
||||
const LICENSE_SERVER_ON_PREM_LOGIN = "/api/auth/v1/license-login";
|
||||
|
||||
const LICENSE_SERVER_CLOUD_PLAN_TTL = 30; // 30 second
|
||||
const LICENSE_SERVER_CLOUD_PLAN_TTL = 5 * 60; // 5 mins
|
||||
const FEATURE_CACHE_KEY = (orgId: string) => `infisical-cloud-plan-${orgId}`;
|
||||
|
||||
export const licenseServiceFactory = ({
|
||||
orgDAL,
|
||||
permissionService,
|
||||
licenseDAL,
|
||||
keyStore
|
||||
keyStore,
|
||||
identityOrgMembershipDAL,
|
||||
projectDAL
|
||||
}: TLicenseServiceFactoryDep) => {
|
||||
let isValidLicense = false;
|
||||
let instanceType = InstanceType.OnPrem;
|
||||
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
|
||||
let selfHostedLicense: TOfflineLicense | null = null;
|
||||
|
||||
const appCfg = getConfig();
|
||||
const licenseServerCloudApi = setupLicenseRequestWithStore(
|
||||
@ -125,6 +134,7 @@ export const licenseServiceFactory = ({
|
||||
instanceType = InstanceType.EnterpriseOnPremOffline;
|
||||
logger.info(`Instance type: ${InstanceType.EnterpriseOnPremOffline}`);
|
||||
isValidLicense = true;
|
||||
selfHostedLicense = contents.license;
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -142,7 +152,10 @@ export const licenseServiceFactory = ({
|
||||
try {
|
||||
if (instanceType === InstanceType.Cloud) {
|
||||
const cachedPlan = await keyStore.getItem(FEATURE_CACHE_KEY(orgId));
|
||||
if (cachedPlan) return JSON.parse(cachedPlan) as TFeatureSet;
|
||||
if (cachedPlan) {
|
||||
logger.info(`getPlan: plan fetched from cache [orgId=${orgId}] [projectId=${projectId}]`);
|
||||
return JSON.parse(cachedPlan) as TFeatureSet;
|
||||
}
|
||||
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` });
|
||||
@ -170,6 +183,8 @@ export const licenseServiceFactory = ({
|
||||
JSON.stringify(onPremFeatures)
|
||||
);
|
||||
return onPremFeatures;
|
||||
} finally {
|
||||
logger.info(`getPlan: Process done for [orgId=${orgId}] [projectId=${projectId}]`);
|
||||
}
|
||||
return onPremFeatures;
|
||||
};
|
||||
@ -343,10 +358,21 @@ export const licenseServiceFactory = ({
|
||||
message: `Organization with ID '${orgId}' not found`
|
||||
});
|
||||
}
|
||||
const { data } = await licenseServerCloudApi.request.get(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
|
||||
);
|
||||
return data;
|
||||
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
|
||||
const { data } = await licenseServerCloudApi.request.get(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
|
||||
);
|
||||
return data;
|
||||
}
|
||||
|
||||
return {
|
||||
currentPeriodStart: selfHostedLicense?.issuedAt ? Date.parse(selfHostedLicense?.issuedAt) / 1000 : undefined,
|
||||
currentPeriodEnd: selfHostedLicense?.expiresAt ? Date.parse(selfHostedLicense?.expiresAt) / 1000 : undefined,
|
||||
interval: "month",
|
||||
intervalCount: 1,
|
||||
amount: 0,
|
||||
quantity: 1
|
||||
};
|
||||
};
|
||||
|
||||
// returns org current plan feature table
|
||||
@ -360,10 +386,41 @@ export const licenseServiceFactory = ({
|
||||
message: `Organization with ID '${orgId}' not found`
|
||||
});
|
||||
}
|
||||
const { data } = await licenseServerCloudApi.request.get(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
|
||||
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
|
||||
const { data } = await licenseServerCloudApi.request.get(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
|
||||
);
|
||||
return data;
|
||||
}
|
||||
|
||||
const mappedRows = await Promise.all(
|
||||
Object.values(BillingPlanRows).map(async ({ name, field }: { name: string; field: string }) => {
|
||||
const allowed = onPremFeatures[field as keyof TFeatureSet];
|
||||
let used = "-";
|
||||
|
||||
if (field === BillingPlanRows.MemberLimit.field) {
|
||||
const orgMemberships = await orgDAL.countAllOrgMembers(orgId);
|
||||
used = orgMemberships.toString();
|
||||
} else if (field === BillingPlanRows.WorkspaceLimit.field) {
|
||||
const projects = await projectDAL.find({ orgId });
|
||||
used = projects.length.toString();
|
||||
} else if (field === BillingPlanRows.IdentityLimit.field) {
|
||||
const identities = await identityOrgMembershipDAL.countAllOrgIdentities({ orgId });
|
||||
used = identities.toString();
|
||||
}
|
||||
|
||||
return {
|
||||
name,
|
||||
allowed,
|
||||
used
|
||||
};
|
||||
})
|
||||
);
|
||||
return data;
|
||||
|
||||
return {
|
||||
head: Object.values(BillingPlanTableHead),
|
||||
rows: mappedRows
|
||||
};
|
||||
};
|
||||
|
||||
const getOrgBillingDetails = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgBillInfoDTO) => {
|
||||
|
@ -32,6 +32,10 @@ export enum OrgPermissionAdminConsoleAction {
|
||||
AccessAllProjects = "access-all-projects"
|
||||
}
|
||||
|
||||
export enum OrgPermissionSecretShareAction {
|
||||
ManageSettings = "manage-settings"
|
||||
}
|
||||
|
||||
export enum OrgPermissionGatewayActions {
|
||||
// is there a better word for this. This mean can an identity be a gateway
|
||||
CreateGateways = "create-gateways",
|
||||
@ -59,7 +63,8 @@ export enum OrgPermissionSubjects {
|
||||
ProjectTemplates = "project-templates",
|
||||
AppConnections = "app-connections",
|
||||
Kmip = "kmip",
|
||||
Gateway = "gateway"
|
||||
Gateway = "gateway",
|
||||
SecretShare = "secret-share"
|
||||
}
|
||||
|
||||
export type AppConnectionSubjectFields = {
|
||||
@ -91,7 +96,8 @@ export type OrgPermissionSet =
|
||||
)
|
||||
]
|
||||
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole]
|
||||
| [OrgPermissionKmipActions, OrgPermissionSubjects.Kmip];
|
||||
| [OrgPermissionKmipActions, OrgPermissionSubjects.Kmip]
|
||||
| [OrgPermissionSecretShareAction, OrgPermissionSubjects.SecretShare];
|
||||
|
||||
const AppConnectionConditionSchema = z
|
||||
.object({
|
||||
@ -185,6 +191,12 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.SecretShare).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionSecretShareAction).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.Kmip).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionKmipActions).describe(
|
||||
@ -292,6 +304,8 @@ const buildAdminPermission = () => {
|
||||
// the proxy assignment is temporary in order to prevent "more privilege" error during role assignment to MI
|
||||
can(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
can(OrgPermissionSecretShareAction.ManageSettings, OrgPermissionSubjects.SecretShare);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
|
@ -1,7 +1,109 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { ForbiddenError, MongoAbility, PureAbility, subject } from "@casl/ability";
|
||||
import { z } from "zod";
|
||||
|
||||
import { TOrganizations } from "@app/db/schemas";
|
||||
import { ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { BadRequestError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { ActorAuthMethod, AuthMethod } from "@app/services/auth/auth-type";
|
||||
|
||||
import {
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSet,
|
||||
ProjectPermissionSub,
|
||||
ProjectPermissionV2Schema,
|
||||
SecretSubjectFields
|
||||
} from "./project-permission";
|
||||
|
||||
export function throwIfMissingSecretReadValueOrDescribePermission(
|
||||
permission: MongoAbility<ProjectPermissionSet> | PureAbility,
|
||||
action: Extract<
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSecretActions.ReadValue | ProjectPermissionSecretActions.DescribeSecret
|
||||
>,
|
||||
subjectFields?: SecretSubjectFields
|
||||
) {
|
||||
try {
|
||||
if (subjectFields) {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, subjectFields)
|
||||
);
|
||||
} else {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
}
|
||||
} catch {
|
||||
if (subjectFields) {
|
||||
ForbiddenError.from(permission).throwUnlessCan(action, subject(ProjectPermissionSub.Secrets, subjectFields));
|
||||
} else {
|
||||
ForbiddenError.from(permission).throwUnlessCan(action, ProjectPermissionSub.Secrets);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function hasSecretReadValueOrDescribePermission(
|
||||
permission: MongoAbility<ProjectPermissionSet>,
|
||||
action: Extract<
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSecretActions.DescribeSecret | ProjectPermissionSecretActions.ReadValue
|
||||
>,
|
||||
subjectFields?: SecretSubjectFields
|
||||
) {
|
||||
let canNewPermission = false;
|
||||
let canOldPermission = false;
|
||||
|
||||
if (subjectFields) {
|
||||
canNewPermission = permission.can(action, subject(ProjectPermissionSub.Secrets, subjectFields));
|
||||
canOldPermission = permission.can(
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, subjectFields)
|
||||
);
|
||||
} else {
|
||||
canNewPermission = permission.can(action, ProjectPermissionSub.Secrets);
|
||||
canOldPermission = permission.can(
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
}
|
||||
|
||||
return canNewPermission || canOldPermission;
|
||||
}
|
||||
|
||||
const OptionalArrayPermissionSchema = ProjectPermissionV2Schema.array().optional();
|
||||
export function checkForInvalidPermissionCombination(permissions: z.infer<typeof OptionalArrayPermissionSchema>) {
|
||||
if (!permissions) return;
|
||||
|
||||
for (const permission of permissions) {
|
||||
if (permission.subject === ProjectPermissionSub.Secrets) {
|
||||
if (permission.action.includes(ProjectPermissionSecretActions.DescribeAndReadValue)) {
|
||||
const hasReadValue = permission.action.includes(ProjectPermissionSecretActions.ReadValue);
|
||||
const hasDescribeSecret = permission.action.includes(ProjectPermissionSecretActions.DescribeSecret);
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!hasReadValue && !hasDescribeSecret) continue;
|
||||
|
||||
const hasBothDescribeAndReadValue = hasReadValue && hasDescribeSecret;
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `You have selected Read, and ${
|
||||
hasBothDescribeAndReadValue
|
||||
? "both Read Value and Describe Secret"
|
||||
: hasReadValue
|
||||
? "Read Value"
|
||||
: hasDescribeSecret
|
||||
? "Describe Secret"
|
||||
: ""
|
||||
}. You cannot select Read Value or Describe Secret if you have selected Read. The Read permission is a legacy action which has been replaced by Describe Secret and Read Value.`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function isAuthMethodSaml(actorAuthMethod: ActorAuthMethod) {
|
||||
if (!actorAuthMethod) return false;
|
||||
|
||||
@ -29,12 +131,12 @@ function validateOrgSSO(actorAuthMethod: ActorAuthMethod, isOrgSsoEnforced: TOrg
|
||||
}
|
||||
}
|
||||
|
||||
const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
|
||||
const escapeHandlebarsMissingDict = (obj: Record<string, string>, key: string) => {
|
||||
const handler = {
|
||||
get(target: Record<string, string>, prop: string) {
|
||||
if (!(prop in target)) {
|
||||
if (!Object.hasOwn(target, prop)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
target[prop] = `{{identity.metadata.${prop}}}`; // Add missing key as an "own" property
|
||||
target[prop] = `{{${key}.${prop}}}`; // Add missing key as an "own" property
|
||||
}
|
||||
return target[prop];
|
||||
}
|
||||
@ -43,4 +145,4 @@ const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
|
||||
return new Proxy(obj, handler);
|
||||
};
|
||||
|
||||
export { escapeHandlebarsMissingMetadata, isAuthMethodSaml, validateOrgSSO };
|
||||
export { escapeHandlebarsMissingDict, isAuthMethodSaml, validateOrgSSO };
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { createMongoAbility, MongoAbility, RawRuleOf } from "@casl/ability";
|
||||
import { PackRule, unpackRules } from "@casl/ability/extra";
|
||||
import { requestContext } from "@fastify/request-context";
|
||||
import { MongoQuery } from "@ucast/mongo2js";
|
||||
import handlebars from "handlebars";
|
||||
|
||||
@ -22,7 +23,7 @@ import { TServiceTokenDALFactory } from "@app/services/service-token/service-tok
|
||||
|
||||
import { orgAdminPermissions, orgMemberPermissions, orgNoAccessPermissions, OrgPermissionSet } from "./org-permission";
|
||||
import { TPermissionDALFactory } from "./permission-dal";
|
||||
import { escapeHandlebarsMissingMetadata, validateOrgSSO } from "./permission-fns";
|
||||
import { escapeHandlebarsMissingDict, validateOrgSSO } from "./permission-fns";
|
||||
import {
|
||||
TBuildOrgPermissionDTO,
|
||||
TBuildProjectPermissionDTO,
|
||||
@ -243,13 +244,13 @@ export const permissionServiceFactory = ({
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
const unescapedMetadata = objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
);
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata");
|
||||
requestContext.set("identityPermissionMetadata", { metadata: unescapedMetadata });
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
@ -317,20 +318,26 @@ export const permissionServiceFactory = ({
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
const unescapedIdentityAuthInfo = requestContext.get("identityAuthInfo");
|
||||
const unescapedMetadata = objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
);
|
||||
const identityAuthInfo =
|
||||
unescapedIdentityAuthInfo?.identityId === identityId && unescapedIdentityAuthInfo
|
||||
? escapeHandlebarsMissingDict(unescapedIdentityAuthInfo as never, "identity.auth")
|
||||
: {};
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata");
|
||||
|
||||
requestContext.set("identityPermissionMetadata", { metadata: unescapedMetadata, auth: unescapedIdentityAuthInfo });
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
id: identityProjectPermission.identityId,
|
||||
username: identityProjectPermission.username,
|
||||
metadata: metadataKeyValuePair
|
||||
metadata: metadataKeyValuePair,
|
||||
auth: identityAuthInfo
|
||||
}
|
||||
},
|
||||
{ data: false }
|
||||
@ -424,12 +431,13 @@ export const permissionServiceFactory = ({
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingDict(
|
||||
objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
),
|
||||
"identity.metadata"
|
||||
);
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
@ -469,14 +477,14 @@ export const permissionServiceFactory = ({
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingDict(
|
||||
objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
),
|
||||
"identity.metadata"
|
||||
);
|
||||
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
|
@ -5,22 +5,6 @@ import { PermissionConditionOperators } from "@app/lib/casl";
|
||||
|
||||
export const PermissionConditionSchema = {
|
||||
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
|
||||
[PermissionConditionOperators.$ALL]: z.string().trim().min(1).array(),
|
||||
[PermissionConditionOperators.$REGEX]: z
|
||||
.string()
|
||||
.min(1)
|
||||
.refine(
|
||||
(el) => {
|
||||
try {
|
||||
// eslint-disable-next-line no-new
|
||||
new RegExp(el);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
{ message: "Invalid regex pattern" }
|
||||
),
|
||||
[PermissionConditionOperators.$EQ]: z.string().min(1),
|
||||
[PermissionConditionOperators.$NEQ]: z.string().min(1),
|
||||
[PermissionConditionOperators.$GLOB]: z
|
||||
|
@ -17,6 +17,15 @@ export enum ProjectPermissionActions {
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSecretActions {
|
||||
DescribeAndReadValue = "read",
|
||||
DescribeSecret = "describeSecret",
|
||||
ReadValue = "readValue",
|
||||
Create = "create",
|
||||
Edit = "edit",
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionCmekActions {
|
||||
Read = "read",
|
||||
Create = "create",
|
||||
@ -115,7 +124,7 @@ export type IdentityManagementSubjectFields = {
|
||||
|
||||
export type ProjectPermissionSet =
|
||||
| [
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub.Secrets | (ForcedSubject<ProjectPermissionSub.Secrets> & SecretSubjectFields)
|
||||
]
|
||||
| [
|
||||
@ -429,6 +438,7 @@ const GeneralPermissionSchema = [
|
||||
})
|
||||
];
|
||||
|
||||
// Do not update this schema anymore, as it's kept purely for backwards compatability. Update V2 schema only.
|
||||
export const ProjectPermissionV1Schema = z.discriminatedUnion("subject", [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
|
||||
@ -460,7 +470,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: SecretConditionV2Schema.describe(
|
||||
@ -517,7 +527,6 @@ const buildAdminPermissionRules = () => {
|
||||
|
||||
// Admins get full access to everything
|
||||
[
|
||||
ProjectPermissionSub.Secrets,
|
||||
ProjectPermissionSub.SecretFolders,
|
||||
ProjectPermissionSub.SecretImports,
|
||||
ProjectPermissionSub.SecretApproval,
|
||||
@ -550,10 +559,22 @@ const buildAdminPermissionRules = () => {
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionActions.Delete
|
||||
],
|
||||
el as ProjectPermissionSub
|
||||
el
|
||||
);
|
||||
});
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
ProjectPermissionSecretActions.Delete
|
||||
],
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
@ -613,10 +634,12 @@ const buildMemberPermissionRules = () => {
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionActions.Delete
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
ProjectPermissionSecretActions.Delete
|
||||
],
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
@ -788,7 +811,9 @@ export const projectMemberPermissions = buildMemberPermissionRules();
|
||||
const buildViewerPermissionRules = () => {
|
||||
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionSecretActions.DescribeAndReadValue, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionSecretActions.DescribeSecret, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionSecretActions.ReadValue, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretFolders);
|
||||
can(ProjectPermissionDynamicSecretActions.ReadRootCredential, ProjectPermissionSub.DynamicSecrets);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretImports);
|
||||
@ -837,7 +862,6 @@ export const buildServiceTokenProjectPermission = (
|
||||
(subject) => {
|
||||
if (canWrite) {
|
||||
can(ProjectPermissionActions.Edit, subject, {
|
||||
// TODO: @Akhi
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
@ -916,7 +940,17 @@ export const backfillPermissionV1SchemaToV2Schema = (
|
||||
subject: ProjectPermissionSub.SecretImports as const
|
||||
}));
|
||||
|
||||
const secretPolicies = secretSubjects.map(({ subject, ...el }) => ({
|
||||
subject: ProjectPermissionSub.Secrets as const,
|
||||
...el,
|
||||
action:
|
||||
el.action.includes(ProjectPermissionActions.Read) && !el.action.includes(ProjectPermissionSecretActions.ReadValue)
|
||||
? el.action.concat(ProjectPermissionSecretActions.ReadValue)
|
||||
: el.action
|
||||
}));
|
||||
|
||||
const secretFolderPolicies = secretSubjects
|
||||
|
||||
.map(({ subject, ...el }) => ({
|
||||
...el,
|
||||
// read permission is not needed anymore
|
||||
@ -958,6 +992,7 @@ export const backfillPermissionV1SchemaToV2Schema = (
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
secretImportPolicies,
|
||||
secretPolicies,
|
||||
dynamicSecretPolicies,
|
||||
hasReadOnlyFolder.length ? [] : secretFolderPolicies
|
||||
);
|
||||
|
@ -1,10 +1,11 @@
|
||||
import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
|
||||
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
@ -76,9 +77,13 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetUserPermission.update(targetUserPermission.rules.concat(customPermission));
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged user",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -88,6 +93,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
if (existingSlug)
|
||||
throw new BadRequestError({ message: `Additional privilege with provided slug ${slug} already exists` });
|
||||
|
||||
validateHandlebarTemplate("User Additional Privilege Create", JSON.stringify(customPermission || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const packedPermission = JSON.stringify(packRules(customPermission));
|
||||
if (!dto.isTemporary) {
|
||||
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({
|
||||
@ -163,9 +172,13 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetUserPermission.update(targetUserPermission.rules.concat(dto.permissions || []));
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
if (dto?.slug) {
|
||||
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
|
||||
@ -177,6 +190,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
throw new BadRequestError({ message: `Additional privilege with provided slug ${dto.slug} already exists` });
|
||||
}
|
||||
|
||||
validateHandlebarTemplate("User Additional Privilege Update", JSON.stringify(dto.permissions || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const isTemporary = typeof dto?.isTemporary !== "undefined" ? dto.isTemporary : userPrivilege.isTemporary;
|
||||
|
||||
const packedPermission = dto.permissions && JSON.stringify(packRules(dto.permissions));
|
||||
|
@ -63,7 +63,7 @@ export const samlConfigServiceFactory = ({
|
||||
kmsService
|
||||
}: TSamlConfigServiceFactoryDep) => {
|
||||
const createSamlCfg = async ({
|
||||
cert,
|
||||
idpCert,
|
||||
actor,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
@ -93,9 +93,9 @@ export const samlConfigServiceFactory = ({
|
||||
orgId,
|
||||
authProvider,
|
||||
isActive,
|
||||
encryptedSamlIssuer: encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob,
|
||||
encryptedSamlCertificate: encryptor({ plainText: Buffer.from(idpCert) }).cipherTextBlob,
|
||||
encryptedSamlEntryPoint: encryptor({ plainText: Buffer.from(entryPoint) }).cipherTextBlob,
|
||||
encryptedSamlCertificate: encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob
|
||||
encryptedSamlIssuer: encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob
|
||||
});
|
||||
|
||||
return samlConfig;
|
||||
@ -106,7 +106,7 @@ export const samlConfigServiceFactory = ({
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
cert,
|
||||
idpCert,
|
||||
actorId,
|
||||
issuer,
|
||||
isActive,
|
||||
@ -136,8 +136,8 @@ export const samlConfigServiceFactory = ({
|
||||
updateQuery.encryptedSamlIssuer = encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (cert !== undefined) {
|
||||
updateQuery.encryptedSamlCertificate = encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob;
|
||||
if (idpCert !== undefined) {
|
||||
updateQuery.encryptedSamlCertificate = encryptor({ plainText: Buffer.from(idpCert) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
const [ssoConfig] = await samlConfigDAL.update({ orgId }, updateQuery);
|
||||
|
@ -15,7 +15,7 @@ export type TCreateSamlCfgDTO = {
|
||||
isActive: boolean;
|
||||
entryPoint: string;
|
||||
issuer: string;
|
||||
cert: string;
|
||||
idpCert: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
export type TUpdateSamlCfgDTO = Partial<{
|
||||
@ -23,7 +23,7 @@ export type TUpdateSamlCfgDTO = Partial<{
|
||||
isActive: boolean;
|
||||
entryPoint: string;
|
||||
issuer: string;
|
||||
cert: string;
|
||||
idpCert: string;
|
||||
}> &
|
||||
TOrgPermission;
|
||||
|
||||
|
@ -62,7 +62,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
projectId,
|
||||
secretPath,
|
||||
environment,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
}: TCreateSapDTO) => {
|
||||
const groupApprovers = approvers
|
||||
?.filter((approver) => approver.type === ApproverType.Group)
|
||||
@ -113,7 +114,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -172,7 +174,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
secretPolicyId,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
}: TUpdateSapDTO) => {
|
||||
const groupApprovers = approvers
|
||||
?.filter((approver) => approver.type === ApproverType.Group)
|
||||
@ -218,7 +221,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -10,6 +10,7 @@ export type TCreateSapDTO = {
|
||||
projectId: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
allowedSelfApprovals: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateSapDTO = {
|
||||
@ -19,6 +20,7 @@ export type TUpdateSapDTO = {
|
||||
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
allowedSelfApprovals?: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteSapDTO = {
|
||||
|
@ -100,6 +100,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("lastName").withSchema("committerUser").as("committerUserLastName"),
|
||||
tx.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
|
||||
tx.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
|
||||
tx.ref("comment").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerComment"),
|
||||
tx.ref("email").withSchema("secretApprovalReviewerUser").as("reviewerEmail"),
|
||||
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
|
||||
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
|
||||
@ -111,6 +112,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
|
||||
tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"),
|
||||
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
|
||||
);
|
||||
@ -149,7 +151,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
envId: el.policyEnvId,
|
||||
deletedAt: el.policyDeletedAt
|
||||
deletedAt: el.policyDeletedAt,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals
|
||||
}
|
||||
}),
|
||||
childrenMapper: [
|
||||
@ -162,8 +165,10 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
reviewerEmail: email,
|
||||
reviewerLastName: lastName,
|
||||
reviewerUsername: username,
|
||||
reviewerFirstName: firstName
|
||||
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
|
||||
reviewerFirstName: firstName,
|
||||
reviewerComment: comment
|
||||
}) =>
|
||||
userId ? { userId, status, email, firstName, lastName, username, comment: comment ?? "" } : undefined
|
||||
},
|
||||
{
|
||||
key: "approverUserId",
|
||||
@ -333,6 +338,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
),
|
||||
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
|
||||
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
|
||||
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
|
||||
db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"),
|
||||
@ -361,7 +367,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
name: el.policyName,
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals
|
||||
},
|
||||
committerUser: {
|
||||
userId: el.committerUserId,
|
||||
@ -479,6 +486,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
|
||||
),
|
||||
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
|
||||
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
|
||||
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
|
||||
@ -508,7 +516,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
name: el.policyName,
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals
|
||||
},
|
||||
committerUser: {
|
||||
userId: el.committerUserId,
|
||||
|
@ -6,6 +6,7 @@ import {
|
||||
SecretEncryptionAlgo,
|
||||
SecretKeyEncoding,
|
||||
SecretType,
|
||||
TableName,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsV2Insert
|
||||
} from "@app/db/schemas";
|
||||
@ -57,8 +58,9 @@ import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
|
||||
import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service";
|
||||
import { TSecretApprovalRequestDALFactory } from "./secret-approval-request-dal";
|
||||
@ -88,7 +90,12 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
secretDAL: TSecretDALFactory;
|
||||
secretTagDAL: Pick<
|
||||
TSecretTagDALFactory,
|
||||
"findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "saveTagsToSecretV2" | "deleteTagsToSecretV2"
|
||||
| "findManyTagsById"
|
||||
| "saveTagsToSecret"
|
||||
| "deleteTagsManySecret"
|
||||
| "saveTagsToSecretV2"
|
||||
| "deleteTagsToSecretV2"
|
||||
| "find"
|
||||
>;
|
||||
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
|
||||
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
|
||||
@ -106,7 +113,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey" | "encryptWithInputKey" | "decryptWithInputKey">;
|
||||
secretV2BridgeDAL: Pick<
|
||||
TSecretV2BridgeDALFactory,
|
||||
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany"
|
||||
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany" | "find"
|
||||
>;
|
||||
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
|
||||
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
|
||||
@ -320,6 +327,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
approvalId,
|
||||
actor,
|
||||
status,
|
||||
comment,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
@ -344,6 +352,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
message: "The policy associated with this secret approval request has been deleted."
|
||||
});
|
||||
}
|
||||
if (!policy.allowedSelfApprovals && actorId === secretApprovalRequest.committerUserId) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to review secret approval request. Users are not authorized to review their own request."
|
||||
});
|
||||
}
|
||||
|
||||
const { hasRole } = await permissionService.getProjectPermission({
|
||||
actor: ActorType.USER,
|
||||
@ -372,15 +385,18 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
return secretApprovalRequestReviewerDAL.create(
|
||||
{
|
||||
status,
|
||||
comment,
|
||||
requestId: secretApprovalRequest.id,
|
||||
reviewerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
return secretApprovalRequestReviewerDAL.updateById(review.id, { status }, tx);
|
||||
|
||||
return secretApprovalRequestReviewerDAL.updateById(review.id, { status, comment }, tx);
|
||||
});
|
||||
return reviewStatus;
|
||||
|
||||
return { ...reviewStatus, projectId: secretApprovalRequest.projectId };
|
||||
};
|
||||
|
||||
const updateApprovalStatus = async ({
|
||||
@ -499,7 +515,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (!hasMinApproval && !isSoftEnforcement)
|
||||
throw new BadRequestError({ message: "Doesn't have minimum approvals needed" });
|
||||
|
||||
const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
|
||||
const { botKey, shouldUseSecretV2Bridge, project } = await projectBotService.getBotKey(projectId);
|
||||
let mergeStatus;
|
||||
if (shouldUseSecretV2Bridge) {
|
||||
// this cycle if for bridged secrets
|
||||
@ -857,7 +873,6 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
|
||||
if (isSoftEnforcement) {
|
||||
const cfg = getConfig();
|
||||
const project = await projectDAL.findProjectById(projectId);
|
||||
const env = await projectEnvDAL.findOne({ id: policy.envId });
|
||||
const requestedByUser = await userDAL.findOne({ id: actorId });
|
||||
const approverUsers = await userDAL.find({
|
||||
@ -909,10 +924,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
|
||||
environment,
|
||||
secretPath
|
||||
});
|
||||
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
@ -997,6 +1013,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
: keyName2BlindIndex[secretName];
|
||||
// add tags
|
||||
if (tagIds?.length) commitTagIds[keyName2BlindIndex[secretName]] = tagIds;
|
||||
|
||||
return {
|
||||
...latestSecretVersions[secretId],
|
||||
...el,
|
||||
@ -1152,7 +1169,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
environment: env.name,
|
||||
secretPath,
|
||||
projectId,
|
||||
requestId: secretApprovalRequest.id
|
||||
requestId: secretApprovalRequest.id,
|
||||
secretKeys: [...new Set(Object.values(data).flatMap((arr) => arr?.map((item) => item.secretName) ?? []))]
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -1323,17 +1341,48 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
// deleted secrets
|
||||
const deletedSecrets = data[SecretOperations.Delete];
|
||||
if (deletedSecrets && deletedSecrets.length) {
|
||||
const secretsToDeleteInDB = await secretV2BridgeDAL.findBySecretKeys(
|
||||
const secretsToDeleteInDB = await secretV2BridgeDAL.find({
|
||||
folderId,
|
||||
deletedSecrets.map((el) => ({
|
||||
key: el.secretKey,
|
||||
type: SecretType.Shared
|
||||
}))
|
||||
);
|
||||
$complex: {
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "or",
|
||||
value: deletedSecrets.map((el) => ({
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "eq",
|
||||
field: `${TableName.SecretV2}.key` as "key",
|
||||
value: el.secretKey
|
||||
},
|
||||
{
|
||||
operator: "eq",
|
||||
field: "type",
|
||||
value: SecretType.Shared
|
||||
}
|
||||
]
|
||||
}))
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
if (secretsToDeleteInDB.length !== deletedSecrets.length)
|
||||
throw new NotFoundError({
|
||||
message: `Secret does not exist: ${secretsToDeleteInDB.map((el) => el.key).join(",")}`
|
||||
});
|
||||
secretsToDeleteInDB.forEach((el) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretActions.Delete,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath,
|
||||
secretName: el.key,
|
||||
secretTags: el.tags?.map((i) => i.slug)
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
const secretsGroupedByKey = groupBy(secretsToDeleteInDB, (i) => i.key);
|
||||
const deletedSecretIds = deletedSecrets.map((el) => secretsGroupedByKey[el.secretKey][0].id);
|
||||
const latestSecretVersions = await secretVersionV2BridgeDAL.findLatestVersionMany(folderId, deletedSecretIds);
|
||||
@ -1359,9 +1408,9 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const tagsGroupById = groupBy(tags, (i) => i.id);
|
||||
|
||||
commits.forEach((commit) => {
|
||||
let action = ProjectPermissionActions.Create;
|
||||
if (commit.op === SecretOperations.Update) action = ProjectPermissionActions.Edit;
|
||||
if (commit.op === SecretOperations.Delete) action = ProjectPermissionActions.Delete;
|
||||
let action = ProjectPermissionSecretActions.Create;
|
||||
if (commit.op === SecretOperations.Update) action = ProjectPermissionSecretActions.Edit;
|
||||
if (commit.op === SecretOperations.Delete) return; // we do the validation on top
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
action,
|
||||
@ -1452,7 +1501,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
environment: env.name,
|
||||
secretPath,
|
||||
projectId,
|
||||
requestId: secretApprovalRequest.id
|
||||
requestId: secretApprovalRequest.id,
|
||||
secretKeys: [...new Set(Object.values(data).flatMap((arr) => arr?.map((item) => item.secretKey) ?? []))]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -80,6 +80,7 @@ export type TStatusChangeDTO = {
|
||||
export type TReviewRequestDTO = {
|
||||
approvalId: string;
|
||||
status: ApprovalStatus;
|
||||
comment?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TApprovalRequestCountDTO = TProjectPermission;
|
||||
|
@ -265,6 +265,7 @@ export const secretReplicationServiceFactory = ({
|
||||
folderDAL,
|
||||
secretImportDAL,
|
||||
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : ""),
|
||||
viewSecretValue: true,
|
||||
hasSecretAccess: () => true
|
||||
});
|
||||
// secrets that gets replicated across imports
|
||||
|
@ -8,10 +8,9 @@ import axios from "axios";
|
||||
import jmespath from "jmespath";
|
||||
import knex from "knex";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns";
|
||||
import { TAssignOp, TDbProviderClients, TDirectAssignOp, THttpProviderFunction } from "../templates/types";
|
||||
import { TSecretRotationData, TSecretRotationDbFn } from "./secret-rotation-queue-types";
|
||||
|
||||
@ -88,32 +87,14 @@ export const secretRotationDbFn = async ({
|
||||
variables,
|
||||
options
|
||||
}: TSecretRotationDbFn) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const ssl = ca ? { rejectUnauthorized: false, ca } : undefined;
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
if (
|
||||
isCloud &&
|
||||
// internal ips
|
||||
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new Error("Invalid db host");
|
||||
if (
|
||||
host === "localhost" ||
|
||||
host === "127.0.0.1" ||
|
||||
// database infisical uses
|
||||
dbHost === host
|
||||
)
|
||||
throw new Error("Invalid db host");
|
||||
|
||||
const [hostIp] = await verifyHostInputValidity(host);
|
||||
const db = knex({
|
||||
client,
|
||||
connection: {
|
||||
database,
|
||||
port,
|
||||
host,
|
||||
host: hostIp,
|
||||
user: username,
|
||||
password,
|
||||
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
|
||||
|
@ -13,6 +13,7 @@ import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
@ -332,6 +333,7 @@ export const secretRotationQueueFactory = ({
|
||||
await secretVersionV2BridgeDAL.insertMany(
|
||||
updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({
|
||||
...el,
|
||||
actorType: ActorType.PLATFORM,
|
||||
secretId: id
|
||||
})),
|
||||
tx
|
||||
|
@ -15,7 +15,11 @@ import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "../permission/project-permission";
|
||||
import { TSecretRotationDALFactory } from "./secret-rotation-dal";
|
||||
import { TSecretRotationQueueFactory } from "./secret-rotation-queue";
|
||||
import { TSecretRotationEncData } from "./secret-rotation-queue/secret-rotation-queue-types";
|
||||
@ -106,7 +110,7 @@ export const secretRotationServiceFactory = ({
|
||||
});
|
||||
}
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
|
@ -1,16 +1,18 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment,@typescript-eslint/no-unsafe-member-access,@typescript-eslint/no-unsafe-argument */
|
||||
// akhilmhdh: I did this, quite strange bug with eslint. Everything do have a type stil has this error
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { ActionProjectType, TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas";
|
||||
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { InternalServerError, NotFoundError } from "@app/lib/errors";
|
||||
import { groupBy } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
|
||||
import { INFISICAL_SECRET_VALUE_HIDDEN_MASK } from "@app/services/secret/secret-fns";
|
||||
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
|
||||
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
@ -21,8 +23,16 @@ import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secre
|
||||
import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import {
|
||||
hasSecretReadValueOrDescribePermission,
|
||||
throwIfMissingSecretReadValueOrDescribePermission
|
||||
} from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "../permission/project-permission";
|
||||
import {
|
||||
TGetSnapshotDataDTO,
|
||||
TProjectSnapshotCountDTO,
|
||||
@ -96,10 +106,10 @@ export const secretSnapshotServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
|
||||
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.DescribeSecret, {
|
||||
environment,
|
||||
secretPath: path
|
||||
});
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) {
|
||||
@ -133,10 +143,10 @@ export const secretSnapshotServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
|
||||
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.DescribeSecret, {
|
||||
environment,
|
||||
secretPath: path
|
||||
});
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder)
|
||||
@ -161,6 +171,7 @@ export const secretSnapshotServiceFactory = ({
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
|
||||
const shouldUseBridge = snapshot.projectVersion === 3;
|
||||
let snapshotDetails;
|
||||
if (shouldUseBridge) {
|
||||
@ -169,68 +180,112 @@ export const secretSnapshotServiceFactory = ({
|
||||
projectId: snapshot.projectId
|
||||
});
|
||||
const encryptedSnapshotDetails = await snapshotDAL.findSecretSnapshotV2DataById(id);
|
||||
|
||||
const fullFolderPath = await getFullFolderPath({
|
||||
folderDAL,
|
||||
folderId: encryptedSnapshotDetails.folderId,
|
||||
envId: encryptedSnapshotDetails.environment.id
|
||||
});
|
||||
|
||||
snapshotDetails = {
|
||||
...encryptedSnapshotDetails,
|
||||
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => ({
|
||||
...el,
|
||||
secretKey: el.key,
|
||||
secretValue: el.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
|
||||
: "",
|
||||
secretComment: el.encryptedComment
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
|
||||
: ""
|
||||
}))
|
||||
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => {
|
||||
const canReadValue = hasSecretReadValueOrDescribePermission(
|
||||
permission,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
{
|
||||
environment: encryptedSnapshotDetails.environment.slug,
|
||||
secretPath: fullFolderPath,
|
||||
secretName: el.key,
|
||||
secretTags: el.tags.length ? el.tags.map((tag) => tag.slug) : undefined
|
||||
}
|
||||
);
|
||||
|
||||
let secretValue = "";
|
||||
if (canReadValue) {
|
||||
secretValue = el.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
|
||||
: "";
|
||||
} else {
|
||||
secretValue = INFISICAL_SECRET_VALUE_HIDDEN_MASK;
|
||||
}
|
||||
|
||||
return {
|
||||
...el,
|
||||
secretKey: el.key,
|
||||
secretValueHidden: !canReadValue,
|
||||
secretValue,
|
||||
secretComment: el.encryptedComment
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
|
||||
: ""
|
||||
};
|
||||
})
|
||||
};
|
||||
} else {
|
||||
const encryptedSnapshotDetails = await snapshotDAL.findSecretSnapshotDataById(id);
|
||||
|
||||
const fullFolderPath = await getFullFolderPath({
|
||||
folderDAL,
|
||||
folderId: encryptedSnapshotDetails.folderId,
|
||||
envId: encryptedSnapshotDetails.environment.id
|
||||
});
|
||||
|
||||
const { botKey } = await projectBotService.getBotKey(snapshot.projectId);
|
||||
if (!botKey)
|
||||
throw new NotFoundError({ message: `Project bot key not found for project with ID '${snapshot.projectId}'` });
|
||||
snapshotDetails = {
|
||||
...encryptedSnapshotDetails,
|
||||
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => ({
|
||||
...el,
|
||||
secretKey: decryptSymmetric128BitHexKeyUTF8({
|
||||
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretKeyCiphertext,
|
||||
iv: el.secretKeyIV,
|
||||
tag: el.secretKeyTag,
|
||||
key: botKey
|
||||
}),
|
||||
secretValue: decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretValueCiphertext,
|
||||
iv: el.secretValueIV,
|
||||
tag: el.secretValueTag,
|
||||
key: botKey
|
||||
}),
|
||||
secretComment:
|
||||
el.secretCommentTag && el.secretCommentIV && el.secretCommentCiphertext
|
||||
? decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretCommentCiphertext,
|
||||
iv: el.secretCommentIV,
|
||||
tag: el.secretCommentTag,
|
||||
key: botKey
|
||||
})
|
||||
: ""
|
||||
}))
|
||||
});
|
||||
|
||||
const canReadValue = hasSecretReadValueOrDescribePermission(
|
||||
permission,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
{
|
||||
environment: encryptedSnapshotDetails.environment.slug,
|
||||
secretPath: fullFolderPath,
|
||||
secretName: secretKey,
|
||||
secretTags: el.tags.length ? el.tags.map((tag) => tag.slug) : undefined
|
||||
}
|
||||
);
|
||||
|
||||
let secretValue = "";
|
||||
|
||||
if (canReadValue) {
|
||||
secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretValueCiphertext,
|
||||
iv: el.secretValueIV,
|
||||
tag: el.secretValueTag,
|
||||
key: botKey
|
||||
});
|
||||
} else {
|
||||
secretValue = INFISICAL_SECRET_VALUE_HIDDEN_MASK;
|
||||
}
|
||||
|
||||
return {
|
||||
...el,
|
||||
secretKey,
|
||||
secretValueHidden: !canReadValue,
|
||||
secretValue,
|
||||
secretComment:
|
||||
el.secretCommentTag && el.secretCommentIV && el.secretCommentCiphertext
|
||||
? decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretCommentCiphertext,
|
||||
iv: el.secretCommentIV,
|
||||
tag: el.secretCommentTag,
|
||||
key: botKey
|
||||
})
|
||||
: ""
|
||||
};
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
const fullFolderPath = await getFullFolderPath({
|
||||
folderDAL,
|
||||
folderId: snapshotDetails.folderId,
|
||||
envId: snapshotDetails.environment.id
|
||||
});
|
||||
|
||||
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: snapshotDetails.environment.slug,
|
||||
secretPath: fullFolderPath
|
||||
})
|
||||
);
|
||||
|
||||
return snapshotDetails;
|
||||
};
|
||||
|
||||
@ -370,7 +425,21 @@ export const secretSnapshotServiceFactory = ({
|
||||
const secrets = await secretV2BridgeDAL.insertMany(
|
||||
rollbackSnaps.flatMap(({ secretVersions, folderId }) =>
|
||||
secretVersions.map(
|
||||
({ latestSecretVersion, version, updatedAt, createdAt, secretId, envId, id, tags, ...el }) => ({
|
||||
({
|
||||
latestSecretVersion,
|
||||
version,
|
||||
updatedAt,
|
||||
createdAt,
|
||||
secretId,
|
||||
envId,
|
||||
id,
|
||||
tags,
|
||||
// exclude the bottom fields from the secret - they are for versioning only.
|
||||
userActorId,
|
||||
identityActorId,
|
||||
actorType,
|
||||
...el
|
||||
}) => ({
|
||||
...el,
|
||||
id: secretId,
|
||||
version: deletedTopLevelSecsGroupById[secretId] ? latestSecretVersion + 1 : latestSecretVersion,
|
||||
@ -401,8 +470,18 @@ export const secretSnapshotServiceFactory = ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
const userActorId = actor === ActorType.USER ? actorId : undefined;
|
||||
const identityActorId = actor !== ActorType.USER ? actorId : undefined;
|
||||
const actorType = actor || ActorType.PLATFORM;
|
||||
|
||||
const secretVersions = await secretVersionV2BridgeDAL.insertMany(
|
||||
secrets.map(({ id, updatedAt, createdAt, ...el }) => ({ ...el, secretId: id })),
|
||||
secrets.map(({ id, updatedAt, createdAt, ...el }) => ({
|
||||
...el,
|
||||
secretId: id,
|
||||
userActorId,
|
||||
identityActorId,
|
||||
actorType
|
||||
})),
|
||||
tx
|
||||
);
|
||||
await secretVersionV2TagBridgeDAL.insertMany(
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
|
||||
import { TSshCertificateAuthorityDALFactory } from "../ssh/ssh-certificate-authority-dal";
|
||||
import { TSshCertificateTemplateDALFactory } from "./ssh-certificate-template-dal";
|
||||
|
@ -1,13 +1,13 @@
|
||||
import { execFile } from "child_process";
|
||||
import crypto from "crypto";
|
||||
import { promises as fs } from "fs";
|
||||
import ms from "ms";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import { promisify } from "util";
|
||||
|
||||
import { TSshCertificateTemplates } from "@app/db/schemas";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
|
||||
import {
|
||||
|
@ -244,7 +244,7 @@ export const KUBERNETES_AUTH = {
|
||||
kubernetesHost: "The host string, host:port pair, or URL to the base of the Kubernetes API server.",
|
||||
caCert: "The PEM-encoded CA cert for the Kubernetes API server.",
|
||||
tokenReviewerJwt:
|
||||
"The long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
|
||||
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
|
||||
allowedNamespaces:
|
||||
"The comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
|
||||
allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.",
|
||||
@ -260,7 +260,7 @@ export const KUBERNETES_AUTH = {
|
||||
kubernetesHost: "The new host string, host:port pair, or URL to the base of the Kubernetes API server.",
|
||||
caCert: "The new PEM-encoded CA cert for the Kubernetes API server.",
|
||||
tokenReviewerJwt:
|
||||
"The new long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
|
||||
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
|
||||
allowedNamespaces:
|
||||
"The new comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
|
||||
allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.",
|
||||
@ -329,6 +329,7 @@ export const OIDC_AUTH = {
|
||||
boundIssuer: "The unique identifier of the identity provider issuing the JWT.",
|
||||
boundAudiences: "The list of intended recipients.",
|
||||
boundClaims: "The attributes that should be present in the JWT for it to be valid.",
|
||||
claimMetadataMapping: "The attributes that should be present in the permission metadata from the JWT.",
|
||||
boundSubject: "The expected principal that is the subject of the JWT.",
|
||||
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The lifetime for an access token in seconds.",
|
||||
@ -342,6 +343,7 @@ export const OIDC_AUTH = {
|
||||
boundIssuer: "The new unique identifier of the identity provider issuing the JWT.",
|
||||
boundAudiences: "The new list of intended recipients.",
|
||||
boundClaims: "The new attributes that should be present in the JWT for it to be valid.",
|
||||
claimMetadataMapping: "The new attributes that should be present in the permission metadata from the JWT.",
|
||||
boundSubject: "The new expected principal that is the subject of the JWT.",
|
||||
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The new lifetime for an access token in seconds.",
|
||||
@ -459,7 +461,8 @@ export const PROJECTS = {
|
||||
workspaceId: "The ID of the project to update.",
|
||||
name: "The new name of the project.",
|
||||
projectDescription: "An optional description label for the project.",
|
||||
autoCapitalization: "Disable or enable auto-capitalization for the project."
|
||||
autoCapitalization: "Disable or enable auto-capitalization for the project.",
|
||||
slug: "An optional slug for the project. (must be unique within the organization)"
|
||||
},
|
||||
GET_KEY: {
|
||||
workspaceId: "The ID of the project to get the key from."
|
||||
@ -628,7 +631,8 @@ export const FOLDERS = {
|
||||
workspaceId: "The ID of the project to list folders from.",
|
||||
environment: "The slug of the environment to list folders from.",
|
||||
path: "The path to list folders from.",
|
||||
directory: "The directory to list folders from. (Deprecated in favor of path)"
|
||||
directory: "The directory to list folders from. (Deprecated in favor of path)",
|
||||
recursive: "Whether or not to fetch all folders from the specified base path, and all of its subdirectories."
|
||||
},
|
||||
GET_BY_ID: {
|
||||
folderId: "The ID of the folder to get details."
|
||||
@ -666,6 +670,7 @@ export const SECRETS = {
|
||||
secretPath: "The path of the secret to attach tags to.",
|
||||
type: "The type of the secret to attach tags to. (shared/personal)",
|
||||
environment: "The slug of the environment where the secret is located",
|
||||
viewSecretValue: "Whether or not to retrieve the secret value.",
|
||||
projectSlug: "The slug of the project where the secret is located.",
|
||||
tagSlugs: "An array of existing tag slugs to attach to the secret."
|
||||
},
|
||||
@ -689,6 +694,7 @@ export const RAW_SECRETS = {
|
||||
"The slug of the project to list secrets from. This parameter is only applicable by machine identities.",
|
||||
environment: "The slug of the environment to list secrets from.",
|
||||
secretPath: "The secret path to list secrets from.",
|
||||
viewSecretValue: "Whether or not to retrieve the secret value.",
|
||||
includeImports: "Weather to include imported secrets or not.",
|
||||
tagSlugs: "The comma separated tag slugs to filter secrets.",
|
||||
metadataFilter:
|
||||
@ -717,6 +723,7 @@ export const RAW_SECRETS = {
|
||||
secretPath: "The path of the secret to get.",
|
||||
version: "The version of the secret to get.",
|
||||
type: "The type of the secret to get.",
|
||||
viewSecretValue: "Whether or not to retrieve the secret value.",
|
||||
includeImports: "Weather to include imported secrets or not."
|
||||
},
|
||||
UPDATE: {
|
||||
@ -809,7 +816,8 @@ export const DASHBOARD = {
|
||||
search: "The text string to filter secret keys and folder names by.",
|
||||
includeSecrets: "Whether to include project secrets in the response.",
|
||||
includeFolders: "Whether to include project folders in the response.",
|
||||
includeDynamicSecrets: "Whether to include dynamic project secrets in the response."
|
||||
includeDynamicSecrets: "Whether to include dynamic project secrets in the response.",
|
||||
includeImports: "Whether to include project secret imports in the response."
|
||||
},
|
||||
SECRET_DETAILS_LIST: {
|
||||
projectId: "The ID of the project to list secrets/folders from.",
|
||||
@ -1721,7 +1729,8 @@ export const SecretSyncs = {
|
||||
SYNC_OPTIONS: (destination: SecretSync) => {
|
||||
const destinationName = SECRET_SYNC_NAME_MAP[destination];
|
||||
return {
|
||||
initialSyncBehavior: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`
|
||||
initialSyncBehavior: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`,
|
||||
disableSecretDeletion: `Enable this flag to prevent removal of secrets from the ${destinationName} destination when syncing.`
|
||||
};
|
||||
},
|
||||
ADDITIONAL_SYNC_OPTIONS: {
|
||||
@ -1767,6 +1776,12 @@ export const SecretSyncs = {
|
||||
},
|
||||
DATABRICKS: {
|
||||
scope: "The Databricks secret scope that secrets should be synced to."
|
||||
},
|
||||
HUMANITEC: {
|
||||
app: "The ID of the Humanitec app to sync secrets to.",
|
||||
org: "The ID of the Humanitec org to sync secrets to.",
|
||||
env: "The ID of the Humanitec environment to sync secrets to.",
|
||||
scope: "The Humanitec scope that secrets should be synced to."
|
||||
}
|
||||
}
|
||||
};
|
||||
|
669
backend/src/lib/casl/boundary.test.ts
Normal file
669
backend/src/lib/casl/boundary.test.ts
Normal file
@ -0,0 +1,669 @@
|
||||
import { createMongoAbility } from "@casl/ability";
|
||||
|
||||
import { PermissionConditionOperators } from ".";
|
||||
import { validatePermissionBoundary } from "./boundary";
|
||||
|
||||
describe("Validate Permission Boundary Function", () => {
|
||||
test.each([
|
||||
{
|
||||
title: "child with equal privilege",
|
||||
parentPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create", "edit", "delete", "read"],
|
||||
subject: "secrets"
|
||||
}
|
||||
]),
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create", "edit", "delete", "read"],
|
||||
subject: "secrets"
|
||||
}
|
||||
]),
|
||||
expectValid: true,
|
||||
missingPermissions: []
|
||||
},
|
||||
{
|
||||
title: "child with less privilege",
|
||||
parentPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create", "edit", "delete", "read"],
|
||||
subject: "secrets"
|
||||
}
|
||||
]),
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create", "edit"],
|
||||
subject: "secrets"
|
||||
}
|
||||
]),
|
||||
expectValid: true,
|
||||
missingPermissions: []
|
||||
},
|
||||
{
|
||||
title: "child with more privilege",
|
||||
parentPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets"
|
||||
}
|
||||
]),
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create", "edit"],
|
||||
subject: "secrets"
|
||||
}
|
||||
]),
|
||||
expectValid: false,
|
||||
missingPermissions: [{ action: "edit", subject: "secrets" }]
|
||||
},
|
||||
{
|
||||
title: "parent with multiple and child with multiple",
|
||||
parentPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets"
|
||||
},
|
||||
{
|
||||
action: ["create", "edit"],
|
||||
subject: "members"
|
||||
}
|
||||
]),
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "members"
|
||||
},
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets"
|
||||
}
|
||||
]),
|
||||
expectValid: true,
|
||||
missingPermissions: []
|
||||
},
|
||||
{
|
||||
title: "Child with no access",
|
||||
parentPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets"
|
||||
},
|
||||
{
|
||||
action: ["create", "edit"],
|
||||
subject: "members"
|
||||
}
|
||||
]),
|
||||
childPermission: createMongoAbility([]),
|
||||
expectValid: true,
|
||||
missingPermissions: []
|
||||
},
|
||||
{
|
||||
title: "Parent and child disjoint set",
|
||||
parentPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create", "edit", "delete", "read"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "dev" }
|
||||
}
|
||||
}
|
||||
]),
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create", "edit", "delete", "read"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$EQ]: "dev" }
|
||||
}
|
||||
}
|
||||
]),
|
||||
expectValid: false,
|
||||
missingPermissions: ["create", "edit", "delete", "read"].map((el) => ({
|
||||
action: el,
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$EQ]: "dev" }
|
||||
}
|
||||
}))
|
||||
},
|
||||
{
|
||||
title: "Parent with inverted rules",
|
||||
parentPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create", "edit", "delete", "read"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "dev" }
|
||||
}
|
||||
},
|
||||
{
|
||||
action: "read",
|
||||
subject: "secrets",
|
||||
inverted: true,
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "dev" },
|
||||
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
|
||||
}
|
||||
}
|
||||
]),
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: "read",
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "dev" },
|
||||
secretPath: { [PermissionConditionOperators.$EQ]: "/" }
|
||||
}
|
||||
}
|
||||
]),
|
||||
expectValid: true,
|
||||
missingPermissions: []
|
||||
},
|
||||
{
|
||||
title: "Parent with inverted rules - child accessing invalid one",
|
||||
parentPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create", "edit", "delete", "read"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "dev" }
|
||||
}
|
||||
},
|
||||
{
|
||||
action: "read",
|
||||
subject: "secrets",
|
||||
inverted: true,
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "dev" },
|
||||
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
|
||||
}
|
||||
}
|
||||
]),
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: "read",
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "dev" },
|
||||
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
|
||||
}
|
||||
}
|
||||
]),
|
||||
expectValid: false,
|
||||
missingPermissions: [
|
||||
{
|
||||
action: "read",
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "dev" },
|
||||
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
])("Check permission: $title", ({ parentPermission, childPermission, expectValid, missingPermissions }) => {
|
||||
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
|
||||
if (expectValid) {
|
||||
expect(permissionBoundary.isValid).toBeTruthy();
|
||||
} else {
|
||||
expect(permissionBoundary.isValid).toBeFalsy();
|
||||
expect(permissionBoundary.missingPermissions).toEqual(expect.arrayContaining(missingPermissions));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("Validate Permission Boundary: Checking Parent $eq operator", () => {
|
||||
const parentPermission = createMongoAbility([
|
||||
{
|
||||
action: ["create", "read"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "dev" }
|
||||
}
|
||||
}
|
||||
]);
|
||||
|
||||
test.each([
|
||||
{
|
||||
operator: PermissionConditionOperators.$EQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "dev" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$IN,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$IN]: ["dev"] }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$GLOB,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$GLOB]: "dev" }
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
])("Child $operator truthy cases", ({ childPermission }) => {
|
||||
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
|
||||
expect(permissionBoundary.isValid).toBeTruthy();
|
||||
});
|
||||
|
||||
test.each([
|
||||
{
|
||||
operator: PermissionConditionOperators.$EQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "prod" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$IN,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$IN]: ["dev", "prod"] }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$GLOB,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$GLOB]: "dev**" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$NEQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$GLOB]: "staging" }
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
])("Child $operator falsy cases", ({ childPermission }) => {
|
||||
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
|
||||
expect(permissionBoundary.isValid).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Validate Permission Boundary: Checking Parent $neq operator", () => {
|
||||
const parentPermission = createMongoAbility([
|
||||
{
|
||||
action: ["create", "read"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello" }
|
||||
}
|
||||
}
|
||||
]);
|
||||
|
||||
test.each([
|
||||
{
|
||||
operator: PermissionConditionOperators.$EQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$EQ]: "/" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$NEQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$IN,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/staging"] }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$GLOB,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$GLOB]: "/dev**" }
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
])("Child $operator truthy cases", ({ childPermission }) => {
|
||||
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
|
||||
expect(permissionBoundary.isValid).toBeTruthy();
|
||||
});
|
||||
|
||||
test.each([
|
||||
{
|
||||
operator: PermissionConditionOperators.$EQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$EQ]: "/hello" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$NEQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$NEQ]: "/" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$IN,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/hello"] }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$GLOB,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello**" }
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
])("Child $operator falsy cases", ({ childPermission }) => {
|
||||
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
|
||||
expect(permissionBoundary.isValid).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Validate Permission Boundary: Checking Parent $IN operator", () => {
|
||||
const parentPermission = createMongoAbility([
|
||||
{
|
||||
action: ["edit"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$IN]: ["dev", "staging"] }
|
||||
}
|
||||
}
|
||||
]);
|
||||
|
||||
test.each([
|
||||
{
|
||||
operator: PermissionConditionOperators.$EQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["edit"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "dev" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$IN,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["edit"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$IN]: ["dev"] }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: `${PermissionConditionOperators.$IN} - 2`,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["edit"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$IN]: ["dev", "staging"] }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$GLOB,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["edit"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$GLOB]: "dev" }
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
])("Child $operator truthy cases", ({ childPermission }) => {
|
||||
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
|
||||
expect(permissionBoundary.isValid).toBeTruthy();
|
||||
});
|
||||
|
||||
test.each([
|
||||
{
|
||||
operator: PermissionConditionOperators.$EQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["edit"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$EQ]: "prod" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$NEQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["edit"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$NEQ]: "dev" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$IN,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["edit"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$IN]: ["dev", "prod"] }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$GLOB,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["edit"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
environment: { [PermissionConditionOperators.$GLOB]: "dev**" }
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
])("Child $operator falsy cases", ({ childPermission }) => {
|
||||
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
|
||||
expect(permissionBoundary.isValid).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Validate Permission Boundary: Checking Parent $GLOB operator", () => {
|
||||
const parentPermission = createMongoAbility([
|
||||
{
|
||||
action: ["create", "read"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
|
||||
}
|
||||
}
|
||||
]);
|
||||
|
||||
test.each([
|
||||
{
|
||||
operator: PermissionConditionOperators.$EQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$IN,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$IN]: ["/hello/world", "/hello/world2"] }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$GLOB,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**/world" }
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
])("Child $operator truthy cases", ({ childPermission }) => {
|
||||
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
|
||||
expect(permissionBoundary.isValid).toBeTruthy();
|
||||
});
|
||||
|
||||
test.each([
|
||||
{
|
||||
operator: PermissionConditionOperators.$EQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$EQ]: "/print" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$NEQ,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello/world" }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$IN,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/hello"] }
|
||||
}
|
||||
}
|
||||
])
|
||||
},
|
||||
{
|
||||
operator: PermissionConditionOperators.$GLOB,
|
||||
childPermission: createMongoAbility([
|
||||
{
|
||||
action: ["create"],
|
||||
subject: "secrets",
|
||||
conditions: {
|
||||
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello**" }
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
])("Child $operator falsy cases", ({ childPermission }) => {
|
||||
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
|
||||
expect(permissionBoundary.isValid).toBeFalsy();
|
||||
});
|
||||
});
|
249
backend/src/lib/casl/boundary.ts
Normal file
249
backend/src/lib/casl/boundary.ts
Normal file
@ -0,0 +1,249 @@
|
||||
import { MongoAbility } from "@casl/ability";
|
||||
import { MongoQuery } from "@ucast/mongo2js";
|
||||
import picomatch from "picomatch";
|
||||
|
||||
import { PermissionConditionOperators } from "./index";
|
||||
|
||||
type TMissingPermission = {
|
||||
action: string;
|
||||
subject: string;
|
||||
conditions?: MongoQuery;
|
||||
};
|
||||
|
||||
type TPermissionConditionShape = {
|
||||
[PermissionConditionOperators.$EQ]: string;
|
||||
[PermissionConditionOperators.$NEQ]: string;
|
||||
[PermissionConditionOperators.$GLOB]: string;
|
||||
[PermissionConditionOperators.$IN]: string[];
|
||||
};
|
||||
|
||||
const getPermissionSetID = (action: string, subject: string) => `${action}:${subject}`;
|
||||
const invertTheOperation = (shouldInvert: boolean, operation: boolean) => (shouldInvert ? !operation : operation);
|
||||
const formatConditionOperator = (condition: TPermissionConditionShape | string) => {
|
||||
return (
|
||||
typeof condition === "string" ? { [PermissionConditionOperators.$EQ]: condition } : condition
|
||||
) as TPermissionConditionShape;
|
||||
};
|
||||
|
||||
const isOperatorsASubset = (parentSet: TPermissionConditionShape, subset: TPermissionConditionShape) => {
|
||||
// we compute each operator against each other in left hand side and right hand side
|
||||
if (subset[PermissionConditionOperators.$EQ] || subset[PermissionConditionOperators.$NEQ]) {
|
||||
const subsetOperatorValue = subset[PermissionConditionOperators.$EQ] || subset[PermissionConditionOperators.$NEQ];
|
||||
const isInverted = !subset[PermissionConditionOperators.$EQ];
|
||||
if (
|
||||
parentSet[PermissionConditionOperators.$EQ] &&
|
||||
invertTheOperation(isInverted, parentSet[PermissionConditionOperators.$EQ] !== subsetOperatorValue)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
parentSet[PermissionConditionOperators.$NEQ] &&
|
||||
invertTheOperation(isInverted, parentSet[PermissionConditionOperators.$NEQ] === subsetOperatorValue)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
parentSet[PermissionConditionOperators.$IN] &&
|
||||
invertTheOperation(isInverted, !parentSet[PermissionConditionOperators.$IN].includes(subsetOperatorValue))
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
// ne and glob cannot match each other
|
||||
if (parentSet[PermissionConditionOperators.$GLOB] && isInverted) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
parentSet[PermissionConditionOperators.$GLOB] &&
|
||||
!picomatch.isMatch(subsetOperatorValue, parentSet[PermissionConditionOperators.$GLOB], { strictSlashes: false })
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (subset[PermissionConditionOperators.$IN]) {
|
||||
const subsetOperatorValue = subset[PermissionConditionOperators.$IN];
|
||||
if (
|
||||
parentSet[PermissionConditionOperators.$EQ] &&
|
||||
(subsetOperatorValue.length !== 1 || subsetOperatorValue[0] !== parentSet[PermissionConditionOperators.$EQ])
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
parentSet[PermissionConditionOperators.$NEQ] &&
|
||||
subsetOperatorValue.includes(parentSet[PermissionConditionOperators.$NEQ])
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
parentSet[PermissionConditionOperators.$IN] &&
|
||||
!subsetOperatorValue.every((el) => parentSet[PermissionConditionOperators.$IN].includes(el))
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
parentSet[PermissionConditionOperators.$GLOB] &&
|
||||
!subsetOperatorValue.every((el) =>
|
||||
picomatch.isMatch(el, parentSet[PermissionConditionOperators.$GLOB], {
|
||||
strictSlashes: false
|
||||
})
|
||||
)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (subset[PermissionConditionOperators.$GLOB]) {
|
||||
const subsetOperatorValue = subset[PermissionConditionOperators.$GLOB];
|
||||
const { isGlob } = picomatch.scan(subsetOperatorValue);
|
||||
// if it's glob, all other fixed operators would make this superset because glob is powerful. like eq
|
||||
// example: $in [dev, prod] => glob: dev** could mean anything starting with dev: thus is bigger
|
||||
if (
|
||||
isGlob &&
|
||||
Object.keys(parentSet).some(
|
||||
(el) => el !== PermissionConditionOperators.$GLOB && el !== PermissionConditionOperators.$NEQ
|
||||
)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
parentSet[PermissionConditionOperators.$EQ] &&
|
||||
parentSet[PermissionConditionOperators.$EQ] !== subsetOperatorValue
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
parentSet[PermissionConditionOperators.$NEQ] &&
|
||||
picomatch.isMatch(parentSet[PermissionConditionOperators.$NEQ], subsetOperatorValue, {
|
||||
strictSlashes: false
|
||||
})
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
// if parent set is IN, glob cannot be used for children - It's a bigger scope
|
||||
if (
|
||||
parentSet[PermissionConditionOperators.$IN] &&
|
||||
!parentSet[PermissionConditionOperators.$IN].includes(subsetOperatorValue)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
parentSet[PermissionConditionOperators.$GLOB] &&
|
||||
!picomatch.isMatch(subsetOperatorValue, parentSet[PermissionConditionOperators.$GLOB], {
|
||||
strictSlashes: false
|
||||
})
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
const isSubsetForSamePermissionSubjectAction = (
|
||||
parentSetRules: ReturnType<MongoAbility["possibleRulesFor"]>,
|
||||
subsetRules: ReturnType<MongoAbility["possibleRulesFor"]>,
|
||||
appendToMissingPermission: (condition?: MongoQuery) => void
|
||||
) => {
|
||||
const isMissingConditionInParent = parentSetRules.every((el) => !el.conditions);
|
||||
if (isMissingConditionInParent) return true;
|
||||
|
||||
// all subset rules must pass in comparison to parent rul
|
||||
return subsetRules.every((subsetRule) => {
|
||||
const subsetRuleConditions = subsetRule.conditions as Record<string, TPermissionConditionShape | string>;
|
||||
// compare subset rule with all parent rules
|
||||
const isSubsetOfNonInvertedParentSet = parentSetRules
|
||||
.filter((el) => !el.inverted)
|
||||
.some((parentSetRule) => {
|
||||
// get conditions and iterate
|
||||
const parentSetRuleConditions = parentSetRule?.conditions as Record<string, TPermissionConditionShape | string>;
|
||||
if (!parentSetRuleConditions) return true;
|
||||
return Object.keys(parentSetRuleConditions).every((parentConditionField) => {
|
||||
// if parent condition is missing then it's never a subset
|
||||
if (!subsetRuleConditions?.[parentConditionField]) return false;
|
||||
|
||||
// standardize the conditions plain string operator => $eq function
|
||||
const parentRuleConditionOperators = formatConditionOperator(parentSetRuleConditions[parentConditionField]);
|
||||
const selectedSubsetRuleCondition = subsetRuleConditions?.[parentConditionField];
|
||||
const subsetRuleConditionOperators = formatConditionOperator(selectedSubsetRuleCondition);
|
||||
return isOperatorsASubset(parentRuleConditionOperators, subsetRuleConditionOperators);
|
||||
});
|
||||
});
|
||||
|
||||
const invertedParentSetRules = parentSetRules.filter((el) => el.inverted);
|
||||
const isNotSubsetOfInvertedParentSet = invertedParentSetRules.length
|
||||
? !invertedParentSetRules.some((parentSetRule) => {
|
||||
// get conditions and iterate
|
||||
const parentSetRuleConditions = parentSetRule?.conditions as Record<
|
||||
string,
|
||||
TPermissionConditionShape | string
|
||||
>;
|
||||
if (!parentSetRuleConditions) return true;
|
||||
return Object.keys(parentSetRuleConditions).every((parentConditionField) => {
|
||||
// if parent condition is missing then it's never a subset
|
||||
if (!subsetRuleConditions?.[parentConditionField]) return false;
|
||||
|
||||
// standardize the conditions plain string operator => $eq function
|
||||
const parentRuleConditionOperators = formatConditionOperator(parentSetRuleConditions[parentConditionField]);
|
||||
const selectedSubsetRuleCondition = subsetRuleConditions?.[parentConditionField];
|
||||
const subsetRuleConditionOperators = formatConditionOperator(selectedSubsetRuleCondition);
|
||||
return isOperatorsASubset(parentRuleConditionOperators, subsetRuleConditionOperators);
|
||||
});
|
||||
})
|
||||
: true;
|
||||
const isSubset = isSubsetOfNonInvertedParentSet && isNotSubsetOfInvertedParentSet;
|
||||
if (!isSubset) {
|
||||
appendToMissingPermission(subsetRule.conditions);
|
||||
}
|
||||
return isSubset;
|
||||
});
|
||||
};
|
||||
|
||||
export const validatePermissionBoundary = (parentSetPermissions: MongoAbility, subsetPermissions: MongoAbility) => {
|
||||
const checkedPermissionRules = new Set<string>();
|
||||
const missingPermissions: TMissingPermission[] = [];
|
||||
|
||||
subsetPermissions.rules.forEach((subsetPermissionRules) => {
|
||||
const subsetPermissionSubject = subsetPermissionRules.subject.toString();
|
||||
let subsetPermissionActions: string[] = [];
|
||||
|
||||
// actions can be string or string[]
|
||||
if (typeof subsetPermissionRules.action === "string") {
|
||||
subsetPermissionActions.push(subsetPermissionRules.action);
|
||||
} else {
|
||||
subsetPermissionRules.action.forEach((subsetPermissionAction) => {
|
||||
subsetPermissionActions.push(subsetPermissionAction);
|
||||
});
|
||||
}
|
||||
|
||||
// if action is already processed ignore
|
||||
subsetPermissionActions = subsetPermissionActions.filter(
|
||||
(el) => !checkedPermissionRules.has(getPermissionSetID(el, subsetPermissionSubject))
|
||||
);
|
||||
|
||||
if (!subsetPermissionActions.length) return;
|
||||
subsetPermissionActions.forEach((subsetPermissionAction) => {
|
||||
const parentSetRulesOfSubset = parentSetPermissions.possibleRulesFor(
|
||||
subsetPermissionAction,
|
||||
subsetPermissionSubject
|
||||
);
|
||||
const nonInveretedOnes = parentSetRulesOfSubset.filter((el) => !el.inverted);
|
||||
if (!nonInveretedOnes.length) {
|
||||
missingPermissions.push({ action: subsetPermissionAction, subject: subsetPermissionSubject });
|
||||
return;
|
||||
}
|
||||
|
||||
const subsetRules = subsetPermissions.possibleRulesFor(subsetPermissionAction, subsetPermissionSubject);
|
||||
isSubsetForSamePermissionSubjectAction(parentSetRulesOfSubset, subsetRules, (conditions) => {
|
||||
missingPermissions.push({ action: subsetPermissionAction, subject: subsetPermissionSubject, conditions });
|
||||
});
|
||||
});
|
||||
|
||||
subsetPermissionActions.forEach((el) =>
|
||||
checkedPermissionRules.add(getPermissionSetID(el, subsetPermissionSubject))
|
||||
);
|
||||
});
|
||||
|
||||
if (missingPermissions.length) {
|
||||
return { isValid: false as const, missingPermissions };
|
||||
}
|
||||
|
||||
return { isValid: true };
|
||||
};
|
@ -1,5 +1,5 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import { buildMongoQueryMatcher, MongoAbility } from "@casl/ability";
|
||||
import { buildMongoQueryMatcher } from "@casl/ability";
|
||||
import { FieldCondition, FieldInstruction, JsInterpreter } from "@ucast/mongo2js";
|
||||
import picomatch from "picomatch";
|
||||
|
||||
@ -20,45 +20,8 @@ const glob: JsInterpreter<FieldCondition<string>> = (node, object, context) => {
|
||||
|
||||
export const conditionsMatcher = buildMongoQueryMatcher({ $glob }, { glob });
|
||||
|
||||
/**
|
||||
* Extracts and formats permissions from a CASL Ability object or a raw permission set.
|
||||
*/
|
||||
const extractPermissions = (ability: MongoAbility) => {
|
||||
const permissions: string[] = [];
|
||||
ability.rules.forEach((permission) => {
|
||||
if (typeof permission.action === "string") {
|
||||
permissions.push(`${permission.action}_${permission.subject as string}`);
|
||||
} else {
|
||||
permission.action.forEach((permissionAction) => {
|
||||
permissions.push(`${permissionAction}_${permission.subject as string}`);
|
||||
});
|
||||
}
|
||||
});
|
||||
return permissions;
|
||||
};
|
||||
|
||||
/**
|
||||
* Compares two sets of permissions to determine if the first set is at least as privileged as the second set.
|
||||
* The function checks if all permissions in the second set are contained within the first set and if the first set has equal or more permissions.
|
||||
*
|
||||
*/
|
||||
export const isAtLeastAsPrivileged = (permissions1: MongoAbility, permissions2: MongoAbility) => {
|
||||
const set1 = new Set(extractPermissions(permissions1));
|
||||
const set2 = new Set(extractPermissions(permissions2));
|
||||
|
||||
for (const perm of set2) {
|
||||
if (!set1.has(perm)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return set1.size >= set2.size;
|
||||
};
|
||||
|
||||
export enum PermissionConditionOperators {
|
||||
$IN = "$in",
|
||||
$ALL = "$all",
|
||||
$REGEX = "$regex",
|
||||
$EQ = "$eq",
|
||||
$NEQ = "$ne",
|
||||
$GLOB = "$glob"
|
||||
|
@ -56,6 +56,7 @@ const envSchema = z
|
||||
// TODO(akhilmhdh): will be changed to one
|
||||
ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
QUEUE_WORKERS_ENABLED: zodStrBool.default("true"),
|
||||
HTTPS_ENABLED: zodStrBool,
|
||||
// smtp options
|
||||
SMTP_HOST: zpStr(z.string().optional()),
|
||||
|
@ -1,4 +1,5 @@
|
||||
/* eslint-disable max-classes-per-file */
|
||||
|
||||
export class DatabaseError extends Error {
|
||||
name: string;
|
||||
|
||||
@ -52,10 +53,18 @@ export class ForbiddenRequestError extends Error {
|
||||
|
||||
error: unknown;
|
||||
|
||||
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown } = {}) {
|
||||
details?: unknown;
|
||||
|
||||
constructor({
|
||||
name,
|
||||
error,
|
||||
message,
|
||||
details
|
||||
}: { message?: string; name?: string; error?: unknown; details?: unknown } = {}) {
|
||||
super(message ?? "You are not allowed to access this resource");
|
||||
this.name = name || "ForbiddenError";
|
||||
this.error = error;
|
||||
this.details = details;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
import crypto from "node:crypto";
|
||||
import net from "node:net";
|
||||
|
||||
import * as quic from "@infisical/quic";
|
||||
import quicDefault, * as quicModule from "@infisical/quic";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
import { logger } from "../logger";
|
||||
@ -10,6 +10,8 @@ import { logger } from "../logger";
|
||||
const DEFAULT_MAX_RETRIES = 3;
|
||||
const DEFAULT_RETRY_DELAY = 1000; // 1 second
|
||||
|
||||
const quic = quicDefault || quicModule;
|
||||
|
||||
const parseSubjectDetails = (data: string) => {
|
||||
const values: Record<string, string> = {};
|
||||
data.split("\n").forEach((el) => {
|
||||
@ -91,9 +93,11 @@ export const pingGatewayAndVerify = async ({
|
||||
let lastError: Error | null = null;
|
||||
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
|
||||
throw new BadRequestError({
|
||||
message: (err as Error)?.message,
|
||||
error: err as Error
|
||||
});
|
||||
});
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
|
||||
try {
|
||||
const stream = quicClient.connection.newStream("bidi");
|
||||
@ -106,17 +110,13 @@ export const pingGatewayAndVerify = async ({
|
||||
const { value, done } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
throw new BadRequestError({
|
||||
message: "Gateway closed before receiving PONG"
|
||||
});
|
||||
throw new Error("Gateway closed before receiving PONG");
|
||||
}
|
||||
|
||||
const response = Buffer.from(value).toString();
|
||||
|
||||
if (response !== "PONG\n" && response !== "PONG") {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to Ping. Unexpected response: ${response}`
|
||||
});
|
||||
throw new Error(`Failed to Ping. Unexpected response: ${response}`);
|
||||
}
|
||||
|
||||
reader.releaseLock();
|
||||
@ -144,6 +144,7 @@ interface TProxyServer {
|
||||
server: net.Server;
|
||||
port: number;
|
||||
cleanup: () => Promise<void>;
|
||||
getProxyError: () => string;
|
||||
}
|
||||
|
||||
const setupProxyServer = async ({
|
||||
@ -168,6 +169,7 @@ const setupProxyServer = async ({
|
||||
error: err as Error
|
||||
});
|
||||
});
|
||||
const proxyErrorMsg = [""];
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
@ -183,31 +185,33 @@ const setupProxyServer = async ({
|
||||
const forwardWriter = stream.writable.getWriter();
|
||||
await forwardWriter.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`));
|
||||
forwardWriter.releaseLock();
|
||||
/* eslint-disable @typescript-eslint/no-misused-promises */
|
||||
|
||||
// Set up bidirectional copy
|
||||
const setupCopy = async () => {
|
||||
const setupCopy = () => {
|
||||
// Client to QUIC
|
||||
// eslint-disable-next-line
|
||||
(async () => {
|
||||
try {
|
||||
const writer = stream.writable.getWriter();
|
||||
const writer = stream.writable.getWriter();
|
||||
|
||||
// Create a handler for client data
|
||||
clientConn.on("data", async (chunk) => {
|
||||
await writer.write(chunk);
|
||||
// Create a handler for client data
|
||||
clientConn.on("data", (chunk) => {
|
||||
writer.write(chunk).catch((err) => {
|
||||
proxyErrorMsg.push((err as Error)?.message);
|
||||
});
|
||||
});
|
||||
|
||||
// Handle client connection close
|
||||
clientConn.on("end", async () => {
|
||||
await writer.close();
|
||||
// Handle client connection close
|
||||
clientConn.on("end", () => {
|
||||
writer.close().catch((err) => {
|
||||
logger.error(err);
|
||||
});
|
||||
});
|
||||
|
||||
clientConn.on("error", async (err) => {
|
||||
await writer.abort(err);
|
||||
clientConn.on("error", (clientConnErr) => {
|
||||
writer.abort(clientConnErr?.message).catch((err) => {
|
||||
proxyErrorMsg.push((err as Error)?.message);
|
||||
});
|
||||
} catch (err) {
|
||||
clientConn.destroy();
|
||||
}
|
||||
});
|
||||
})();
|
||||
|
||||
// QUIC to Client
|
||||
@ -236,15 +240,18 @@ const setupProxyServer = async ({
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
proxyErrorMsg.push((err as Error)?.message);
|
||||
clientConn.destroy();
|
||||
}
|
||||
})();
|
||||
};
|
||||
await setupCopy();
|
||||
//
|
||||
|
||||
setupCopy();
|
||||
// Handle connection closure
|
||||
clientConn.on("close", async () => {
|
||||
await stream.destroy();
|
||||
clientConn.on("close", () => {
|
||||
stream.destroy().catch((err) => {
|
||||
proxyErrorMsg.push((err as Error)?.message);
|
||||
});
|
||||
});
|
||||
|
||||
const cleanup = async () => {
|
||||
@ -252,13 +259,18 @@ const setupProxyServer = async ({
|
||||
await stream.destroy();
|
||||
};
|
||||
|
||||
clientConn.on("error", (err) => {
|
||||
logger.error(err, "Client socket error");
|
||||
void cleanup();
|
||||
reject(err);
|
||||
clientConn.on("error", (clientConnErr) => {
|
||||
logger.error(clientConnErr, "Client socket error");
|
||||
cleanup().catch((err) => {
|
||||
logger.error(err, "Client conn cleanup");
|
||||
});
|
||||
});
|
||||
|
||||
clientConn.on("end", cleanup);
|
||||
clientConn.on("end", () => {
|
||||
cleanup().catch((err) => {
|
||||
logger.error(err, "Client conn end");
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to establish target connection:");
|
||||
clientConn.end();
|
||||
@ -270,12 +282,12 @@ const setupProxyServer = async ({
|
||||
reject(err);
|
||||
});
|
||||
|
||||
server.on("close", async () => {
|
||||
await quicClient?.destroy();
|
||||
server.on("close", () => {
|
||||
quicClient?.destroy().catch((err) => {
|
||||
logger.error(err, "Failed to destroy quic client");
|
||||
});
|
||||
});
|
||||
|
||||
/* eslint-enable */
|
||||
|
||||
server.listen(0, () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
@ -291,7 +303,8 @@ const setupProxyServer = async ({
|
||||
cleanup: async () => {
|
||||
server.close();
|
||||
await quicClient?.destroy();
|
||||
}
|
||||
},
|
||||
getProxyError: () => proxyErrorMsg.join(",")
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -314,7 +327,7 @@ export const withGatewayProxy = async (
|
||||
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId } = options;
|
||||
|
||||
// Setup the proxy server
|
||||
const { port, cleanup } = await setupProxyServer({
|
||||
const { port, cleanup, getProxyError } = await setupProxyServer({
|
||||
targetHost,
|
||||
targetPort,
|
||||
relayPort,
|
||||
@ -328,8 +341,12 @@ export const withGatewayProxy = async (
|
||||
// Execute the callback with the allocated port
|
||||
await callback(port);
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to proxy");
|
||||
throw new BadRequestError({ message: (err as Error)?.message });
|
||||
const proxyErrorMessage = getProxyError();
|
||||
if (proxyErrorMessage) {
|
||||
logger.error(new Error(proxyErrorMessage), "Failed to proxy");
|
||||
}
|
||||
logger.error(err, "Failed to do gateway");
|
||||
throw new BadRequestError({ message: proxyErrorMessage || (err as Error)?.message });
|
||||
} finally {
|
||||
// Ensure cleanup happens regardless of success or failure
|
||||
await cleanup();
|
||||
|
61
backend/src/lib/ip/ipRange.ts
Normal file
61
backend/src/lib/ip/ipRange.ts
Normal file
@ -0,0 +1,61 @@
|
||||
import { BlockList } from "node:net";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
// Define BlockList instances for each range type
|
||||
const ipv4RangeLists: Record<string, BlockList> = {
|
||||
unspecified: new BlockList(),
|
||||
broadcast: new BlockList(),
|
||||
multicast: new BlockList(),
|
||||
linkLocal: new BlockList(),
|
||||
loopback: new BlockList(),
|
||||
carrierGradeNat: new BlockList(),
|
||||
private: new BlockList(),
|
||||
reserved: new BlockList()
|
||||
};
|
||||
|
||||
// Add IPv4 CIDR ranges to each BlockList
|
||||
ipv4RangeLists.unspecified.addSubnet("0.0.0.0", 8);
|
||||
ipv4RangeLists.broadcast.addAddress("255.255.255.255");
|
||||
ipv4RangeLists.multicast.addSubnet("224.0.0.0", 4);
|
||||
ipv4RangeLists.linkLocal.addSubnet("169.254.0.0", 16);
|
||||
ipv4RangeLists.loopback.addSubnet("127.0.0.0", 8);
|
||||
ipv4RangeLists.carrierGradeNat.addSubnet("100.64.0.0", 10);
|
||||
|
||||
// IPv4 Private ranges
|
||||
ipv4RangeLists.private.addSubnet("10.0.0.0", 8);
|
||||
ipv4RangeLists.private.addSubnet("172.16.0.0", 12);
|
||||
ipv4RangeLists.private.addSubnet("192.168.0.0", 16);
|
||||
|
||||
// IPv4 Reserved ranges
|
||||
ipv4RangeLists.reserved.addSubnet("192.0.0.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("192.0.2.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("192.88.99.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("198.18.0.0", 15);
|
||||
ipv4RangeLists.reserved.addSubnet("198.51.100.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("203.0.113.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("240.0.0.0", 4);
|
||||
|
||||
/**
|
||||
* Checks if an IP address (IPv4) is private or public
|
||||
* inspired by: https://github.com/whitequark/ipaddr.js/blob/main/lib/ipaddr.js
|
||||
*/
|
||||
export const getIpRange = (ip: string): string => {
|
||||
try {
|
||||
const rangeLists = ipv4RangeLists;
|
||||
// Check each range type
|
||||
for (const rangeName in rangeLists) {
|
||||
if (Object.hasOwn(rangeLists, rangeName)) {
|
||||
if (rangeLists[rangeName].check(ip)) {
|
||||
return rangeName;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no range matched, it's a public address
|
||||
return "unicast";
|
||||
} catch (error) {
|
||||
throw new BadRequestError({ message: "Invalid IP address", error });
|
||||
}
|
||||
};
|
||||
|
||||
export const isPrivateIp = (ip: string) => getIpRange(ip) !== "unicast";
|
15
backend/src/lib/ms/index.ts
Normal file
15
backend/src/lib/ms/index.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import msFn, { StringValue } from "ms";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
|
||||
export const ms = (val: string) => {
|
||||
if (typeof val !== "string") {
|
||||
throw new BadRequestError({ message: `Date must be string` });
|
||||
}
|
||||
|
||||
try {
|
||||
return msFn(val as StringValue);
|
||||
} catch {
|
||||
throw new BadRequestError({ message: `Invalid date format string: ${val}` });
|
||||
}
|
||||
};
|
34
backend/src/lib/template/dot-access.ts
Normal file
34
backend/src/lib/template/dot-access.ts
Normal file
@ -0,0 +1,34 @@
|
||||
/**
|
||||
* Safely retrieves a value from a nested object using dot notation path
|
||||
*/
|
||||
export const getStringValueByDot = (
|
||||
obj: Record<string, unknown> | null | undefined,
|
||||
path: string,
|
||||
defaultValue?: string
|
||||
): string | undefined => {
|
||||
// Handle null or undefined input
|
||||
if (!obj) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
const parts = path.split(".");
|
||||
let current: unknown = obj;
|
||||
|
||||
for (const part of parts) {
|
||||
const isObject = typeof current === "object" && !Array.isArray(current) && current !== null;
|
||||
if (!isObject) {
|
||||
return defaultValue;
|
||||
}
|
||||
if (!Object.hasOwn(current as object, part)) {
|
||||
// Check if the property exists as an own property
|
||||
return defaultValue;
|
||||
}
|
||||
current = (current as Record<string, unknown>)[part];
|
||||
}
|
||||
|
||||
if (typeof current !== "string") {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
return current;
|
||||
};
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user