1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-03-22 02:36:14 +00:00

Compare commits

..

1 Commits

Author SHA1 Message Date
925742daf7 fix: upgrade @aws-sdk/client-secrets-manager from 3.504.0 to 3.509.0
Snyk has created this PR to upgrade @aws-sdk/client-secrets-manager from 3.504.0 to 3.509.0.

See this package in npm:
https://www.npmjs.com/package/@aws-sdk/client-secrets-manager

See this project in Snyk:
https://app.snyk.io/org/maidul98/project/35057e82-ed7d-4e19-ba4d-719a42135cd6?utm_source=github&utm_medium=referral&page=upgrade-pr
2024-02-29 21:47:27 +00:00
1089 changed files with 11934 additions and 44687 deletions
.env.example
.github
.gitignore.goreleaser.yaml.infisicalignoreDockerfile.standalone-infisicalMakefileREADME.md
backend
.eslintrc.js
e2e-test
package-lock.jsonpackage.json
scripts
src
@types
db
ee
routes/v1
services
audit-log
dynamic-secret-lease
dynamic-secret
group
identity-project-additional-privilege
ldap-config
license
permission
project-user-additional-privilege
saml-config
scim
secret-approval-policy
secret-approval-request
secret-rotation
secret-scanning
secret-snapshot
trusted-ip
lib
queue
server
services
auth
group-project
identity-access-token
identity-project
identity-ua
identity
integration-auth
integration
org
project-bot
project-env
project-key
project-membership
project-role
project
secret-blind-index
secret-folder
secret-import
secret-tag
secret
service-token
smtp/templates
super-admin
telemetry
user-alias
user
webhook
tsup.config.js
cli
docker-compose.dev.yml
docs
api-reference
changelog
cli
documentation
images
agent
auth-methods
docker-swarm-secrets-complete.png
guides/microsoft-power-apps
integrations
organization-members.png
platform
access-controls
dynamic-secrets
groups
organization
project
scim/okta
secret-rotation/aws-iam
secret-versioning.png
secret-rotation
self-hosting
sso
infisical-agent
integrations
internals
mint.json
sdks
self-hosting
style.css
frontend
.eslintrc.js
.storybook
Dockerfilepackage-lock.jsonpackage.json
public/images/secretRotation
scripts
src
components
analytics
basic
context/Notifications
dashboard
features
integrations
navigation
notifications
permissions
signup
tags/CreateTagModal
utilities
v2
config
context
AuthContext
OrgPermissionContext
ProjectPermissionContext
ServerConfigContext
index.tsx
ee
helpers
hoc/withPermission
hooks
i18n.ts
layouts
AdminLayout
AppLayout
lib/fn
pages
reactQuery.ts
services
styles
views
IntegrationsPage
Login
Org
Project
SecretApprovalPage/components
SecretMainPage
SecretOverviewPage
SecretRotationPage
SecretScanning/components
Settings
BillingSettingsPage
OrgSettingsPage
PersonalSettingsPage
ProjectSettingsPage
Signup
admin
DashboardPage
SignUpPage
SignUpPage.tsx
components/DownloadBackupKeys
helm-charts
k8-operator
package-lock.jsonpackage.json
pg-migrator/src/models/integration
sink

@ -3,6 +3,9 @@
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NEVER BE USED FOR PRODUCTION
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
# Required
DB_CONNECTION_URI=postgres://infisical:infisical@db:5432/infisical
# JWT
# Required secrets to sign JWT tokens
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
@ -13,9 +16,6 @@ POSTGRES_PASSWORD=infisical
POSTGRES_USER=infisical
POSTGRES_DB=infisical
# Required
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
# Redis
REDIS_URL=redis://redis:6379

@ -1,190 +0,0 @@
# inspired by https://www.photoroom.com/inside-photoroom/how-we-automated-our-changelog-thanks-to-chatgpt
import os
import requests
import re
from openai import OpenAI
import subprocess
from datetime import datetime
import uuid
# Constants
REPO_OWNER = "infisical"
REPO_NAME = "infisical"
TOKEN = os.environ["GITHUB_TOKEN"]
SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"]
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]
SLACK_MSG_COLOR = "#36a64f"
headers = {
"Authorization": f"Bearer {TOKEN}",
"Accept": "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
}
def set_multiline_output(name, value):
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
delimiter = uuid.uuid1()
print(f'{name}<<{delimiter}', file=fh)
print(value, file=fh)
print(delimiter, file=fh)
def post_changelog_to_slack(changelog, tag):
slack_payload = {
"text": "Hey team, it's changelog time! :wave:",
"attachments": [
{
"color": SLACK_MSG_COLOR,
"title": f"🗓Infisical Changelog - {tag}",
"text": changelog,
}
],
}
response = requests.post(SLACK_WEBHOOK_URL, json=slack_payload)
if response.status_code != 200:
raise Exception("Failed to post changelog to Slack.")
def find_previous_release_tag(release_tag:str):
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{release_tag}^"]).decode("utf-8").strip()
while not(previous_tag.startswith("infisical/")):
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{previous_tag}^"]).decode("utf-8").strip()
return previous_tag
def get_tag_creation_date(tag_name):
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/git/refs/tags/{tag_name}"
response = requests.get(url, headers=headers)
response.raise_for_status()
commit_sha = response.json()['object']['sha']
commit_url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/commits/{commit_sha}"
commit_response = requests.get(commit_url, headers=headers)
commit_response.raise_for_status()
creation_date = commit_response.json()['commit']['author']['date']
return datetime.strptime(creation_date, '%Y-%m-%dT%H:%M:%SZ')
def fetch_prs_between_tags(previous_tag_date:datetime, release_tag_date:datetime):
# Use GitHub API to fetch PRs merged between the commits
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/pulls?state=closed&merged=true"
response = requests.get(url, headers=headers)
if response.status_code != 200:
raise Exception("Error fetching PRs from GitHub API!")
prs = []
for pr in response.json():
# the idea is as tags happen recently we get last 100 closed PRs and then filter by tag creation date
if pr["merged_at"] and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') < release_tag_date and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') > previous_tag_date:
prs.append(pr)
return prs
def extract_commit_details_from_prs(prs):
commit_details = []
for pr in prs:
commit_message = pr["title"]
commit_url = pr["html_url"]
pr_number = pr["number"]
branch_name = pr["head"]["ref"]
issue_numbers = re.findall(r"(www-\d+|web-\d+)", branch_name)
# If no issue numbers are found, add the PR details without issue numbers and URLs
if not issue_numbers:
commit_details.append(
{
"message": commit_message,
"pr_number": pr_number,
"pr_url": commit_url,
"issue_number": None,
"issue_url": None,
}
)
continue
for issue in issue_numbers:
commit_details.append(
{
"message": commit_message,
"pr_number": pr_number,
"pr_url": commit_url,
"issue_number": issue,
}
)
return commit_details
# Function to generate changelog using OpenAI
def generate_changelog_with_openai(commit_details):
commit_messages = []
for details in commit_details:
base_message = f"{details['pr_url']} - {details['message']}"
# Add the issue URL if available
# if details["issue_url"]:
# base_message += f" (Linear Issue: {details['issue_url']})"
commit_messages.append(base_message)
commit_list = "\n".join(commit_messages)
prompt = """
Generate a changelog for Infisical, opensource secretops
The changelog should:
1. Be Informative: Using the provided list of GitHub commits, break them down into categories such as Features, Fixes & Improvements, and Technical Updates. Summarize each commit concisely, ensuring the key points are highlighted.
2. Have a Professional yet Friendly tone: The tone should be balanced, not too corporate or too informal.
3. Celebratory Introduction and Conclusion: Start the changelog with a celebratory note acknowledging the team's hard work and progress. End with a shoutout to the team and wishes for a pleasant weekend.
4. Formatting: you cannot use Markdown formatting, and you can only use emojis for the introductory paragraph or the conclusion paragraph, nowhere else.
5. Links: the syntax to create links is the following: `<http://www.example.com|This message is a link>`.
6. Linear Links: note that the Linear link is optional, include it only if provided.
7. Do not wrap your answer in a codeblock. Just output the text, nothing else
Here's a good example to follow, please try to match the formatting as closely as possible, only changing the content of the changelog and have some liberty with the introduction. Notice the importance of the formatting of a changelog item:
- <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>))
And here's an example of the full changelog:
*Features*
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
*Fixes & Improvements*
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
*Technical Updates*
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
Stay tuned for more exciting updates coming soon!
And here are the commits:
{}
""".format(
commit_list
)
client = OpenAI(api_key=OPENAI_API_KEY)
messages = [{"role": "user", "content": prompt}]
response = client.chat.completions.create(model="gpt-3.5-turbo", messages=messages)
if "error" in response.choices[0].message:
raise Exception("Error generating changelog with OpenAI!")
return response.choices[0].message.content.strip()
if __name__ == "__main__":
try:
# Get the latest and previous release tags
latest_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0"]).decode("utf-8").strip()
previous_tag = find_previous_release_tag(latest_tag)
latest_tag_date = get_tag_creation_date(latest_tag)
previous_tag_date = get_tag_creation_date(previous_tag)
prs = fetch_prs_between_tags(previous_tag_date,latest_tag_date)
pr_details = extract_commit_details_from_prs(prs)
# Generate changelog
changelog = generate_changelog_with_openai(pr_details)
post_changelog_to_slack(changelog,latest_tag)
# Print or post changelog to Slack
# set_multiline_output("changelog", changelog)
except Exception as e:
print(str(e))

2
.github/values.yaml vendored

@ -27,7 +27,7 @@ infisical:
deploymentAnnotations:
secrets.infisical.com/auto-reload: "true"
kubeSecretRef: "managed-secret"
kubeSecretRef: "infisical-gamma-secrets"
ingress:
## @param ingress.enabled Enable ingress

@ -41,7 +41,6 @@ jobs:
load: true
context: backend
tags: infisical/infisical:test
platforms: linux/amd64,linux/arm64
- name: ⏻ Spawn backend container and dependencies
run: |
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
@ -93,7 +92,6 @@ jobs:
project: 64mmf0n610
context: frontend
tags: infisical/frontend:test
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}

@ -1,38 +0,0 @@
name: Build patroni
on: [workflow_dispatch]
jobs:
patroni-image:
name: Build patroni
runs-on: ubuntu-latest
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
repository: 'zalando/patroni'
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 🏗️ Build backend and push to docker hub
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile
tags: |
infisical/patroni:${{ steps.commit.outputs.short }}
infisical/patroni:latest
platforms: linux/amd64,linux/arm64

@ -1,140 +0,0 @@
name: Deployment pipeline
on: [workflow_dispatch]
permissions:
id-token: write
contents: read
jobs:
infisical-image:
name: Build backend image
runs-on: ubuntu-latest
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 🏗️ Build backend and push to docker hub
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile.standalone-infisical
tags: |
infisical/staging_infisical:${{ steps.commit.outputs.short }}
infisical/staging_infisical:latest
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
gamma-deployment:
name: Deploy to gamma
runs-on: ubuntu-latest
needs: [infisical-image]
environment:
name: Gamma
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-prod-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-prod-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-prod-platform
cluster: infisical-prod-platform
wait-for-service-stability: true
production-postgres-deployment:
name: Deploy to production
runs-on: ubuntu-latest
needs: [gamma-deployment]
environment:
name: Production
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-prod-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-prod-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-prod-platform
cluster: infisical-prod-platform
wait-for-service-stability: true

@ -0,0 +1,120 @@
name: Build, Publish and Deploy to Gamma
on: [workflow_dispatch]
jobs:
infisical-image:
name: Build backend image
runs-on: ubuntu-latest
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
# - name: 🧪 Run tests
# run: npm run test:ci
# working-directory: backend
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
load: true
context: .
file: Dockerfile.standalone-infisical
tags: infisical/infisical:test
# - name: ⏻ Spawn backend container and dependencies
# run: |
# docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
# - name: 🧪 Test backend image
# run: |
# ./.github/resources/healthcheck.sh infisical-backend-test
# - name: ⏻ Shut down backend container and dependencies
# run: |
# docker compose -f .github/resources/docker-compose.be-test.yml down
- name: 🏗️ Build backend and push
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile.standalone-infisical
tags: |
infisical/staging_infisical:${{ steps.commit.outputs.short }}
infisical/staging_infisical:latest
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
postgres-migration:
name: Run latest migration files
runs-on: ubuntu-latest
needs: [infisical-image]
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
# - name: Run postgres DB migration files
# env:
# DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
# run: npm run migration:latest
gamma-deployment:
name: Deploy to gamma
runs-on: ubuntu-latest
needs: [postgres-migration]
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install infisical helm chart
run: |
helm repo add infisical-helm-charts 'https://dl.cloudsmith.io/public/infisical/helm-charts/helm/charts/'
helm repo update
- name: Install kubectl
uses: azure/setup-kubectl@v3
- name: Install doctl
uses: digitalocean/action-doctl@v2
with:
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
- name: Save DigitalOcean kubeconfig with short-lived credentials
run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 infisical-gamma-postgres
- name: switch to gamma namespace
run: kubectl config set-context --current --namespace=gamma
- name: test kubectl
run: kubectl get ingress
- name: Download helm values to file and upgrade gamma deploy
run: |
wget https://raw.githubusercontent.com/Infisical/infisical/main/.github/values.yaml
helm upgrade infisical infisical-helm-charts/infisical-standalone --values values.yaml --wait --install
if [[ $(helm status infisical) == *"FAILED"* ]]; then
echo "Helm upgrade failed"
exit 1
else
echo "Helm upgrade was successful"
fi

@ -5,7 +5,6 @@ on:
types: [opened, synchronize]
paths:
- "backend/src/server/routes/**"
- "backend/src/ee/routes/**"
jobs:
check-be-api-changes:

@ -1,34 +0,0 @@
name: Generate Changelog
permissions:
contents: write
on:
workflow_dispatch:
push:
tags:
- "infisical/v*.*.*-postgres"
jobs:
generate_changelog:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-tags: true
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12.0"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install requests openai
- name: Generate Changelog and Post to Slack
id: gen-changelog
run: python .github/resources/changelog-generator.py
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

@ -1,64 +1,58 @@
name: Build and release CLI
on:
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
permissions:
contents: write
# packages: write
# issues: write
jobs:
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
contents: write
# packages: write
# issues: write
goreleaser:
runs-on: ubuntu-20.04
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: libssl1.1 => libssl1.0-dev for OSXCross
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
jobs:
goreleaser:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: libssl1.1 => libssl1.0-dev for OSXCross
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

@ -1,34 +0,0 @@
name: Go CLI Tests
on:
pull_request:
types: [opened, synchronize]
paths:
- "cli/**"
workflow_call:
jobs:
test:
defaults:
run:
working-directory: ./cli
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: "1.21.x"
- name: Install dependencies
run: go get .
- name: Test with the Go CLI
env:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
run: go test -v -count=1 ./test

4
.gitignore vendored

@ -59,13 +59,9 @@ yarn-error.log*
# Infisical init
.infisical.json
.infisicalignore
# Editor specific
.vscode/*
frontend-build
*.tgz
cli/infisical-merge
cli/test/infisical-merge

@ -190,34 +190,10 @@ dockers:
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:latest-amd64"
build_flag_templates:
- "--pull"
- "--platform=linux/amd64"
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- "infisical/cli:latest-arm64"
build_flag_templates:
- "--pull"
- "--platform=linux/arm64"
docker_manifests:
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- name_template: "infisical/cli:latest"
image_templates:
- "infisical/cli:latest-amd64"
- "infisical/cli:latest-arm64"
- "infisical/cli:{{ .Version }}"
- "infisical/cli:{{ .Major }}.{{ .Minor }}"
- "infisical/cli:{{ .Major }}"
- "infisical/cli:latest"

@ -1,5 +1 @@
.github/resources/docker-compose.be-test.yml:generic-api-key:16
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/IdentityRbacSection.tsx:generic-api-key:206
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:304
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/MemberRbacSection.tsx:generic-api-key:206
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292

@ -1,7 +1,6 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG SAML_ORG_SLUG=saml-org-slug-default
FROM node:20-alpine AS base
@ -36,8 +35,6 @@ ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
# Build
RUN npm run build
@ -103,9 +100,6 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
ARG INTERCOM_ID=intercom-id
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
WORKDIR /
@ -124,6 +118,9 @@ WORKDIR /backend
ENV TELEMETRY_ENABLED true
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
EXPOSE 8080
EXPOSE 443

@ -7,9 +7,6 @@ push:
up-dev:
docker compose -f docker-compose.dev.yml up --build
up-dev-ldap:
docker compose -f docker-compose.dev.yml --profile ldap up --build
up-prod:
docker-compose -f docker-compose.prod.yml up --build

@ -10,8 +10,7 @@
<a href="https://infisical.com/">Infisical Cloud</a> |
<a href="https://infisical.com/docs/self-hosting/overview">Self-Hosting</a> |
<a href="https://infisical.com/docs/documentation/getting-started/introduction">Docs</a> |
<a href="https://www.infisical.com">Website</a> |
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
<a href="https://www.infisical.com">Website</a>
</h4>
<p align="center">

@ -23,17 +23,16 @@ module.exports = {
root: true,
overrides: [
{
files: ["./e2e-test/**/*", "./src/db/migrations/**/*"],
files: ["./e2e-test/**/*"],
rules: {
"@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-assignment": "off",
"@typescript-eslint/no-unsafe-argument": "off",
"@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unsafe-call": "off"
"@typescript-eslint/no-unsafe-call": "off",
}
}
],
rules: {
"@typescript-eslint/no-empty-function": "off",
"@typescript-eslint/no-unsafe-enum-comparison": "off",

@ -46,7 +46,7 @@ const deleteSecretImport = async (id: string) => {
describe("Secret Import Router", async () => {
test.each([
{ importEnv: "prod", importPath: "/" }, // one in root
{ importEnv: "dev", importPath: "/" }, // one in root
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
// check for default environments
@ -66,7 +66,7 @@ describe("Secret Import Router", async () => {
});
test("Get secret imports", async () => {
const createdImport1 = await createSecretImport("/", "prod");
const createdImport1 = await createSecretImport("/", "dev");
const createdImport2 = await createSecretImport("/", "staging");
const res = await testServer.inject({
method: "GET",
@ -103,10 +103,10 @@ describe("Secret Import Router", async () => {
});
test("Update secret import position", async () => {
const prodImportDetails = { path: "/", envSlug: "prod" };
const devImportDetails = { path: "/", envSlug: "dev" };
const stagingImportDetails = { path: "/", envSlug: "staging" };
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
const createdImport1 = await createSecretImport(devImportDetails.path, devImportDetails.envSlug);
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
const updateImportRes = await testServer.inject({
@ -136,7 +136,7 @@ describe("Secret Import Router", async () => {
position: 2,
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.stringMatching(prodImportDetails.envSlug),
slug: expect.stringMatching(devImportDetails.envSlug),
id: expect.any(String)
})
})
@ -166,7 +166,7 @@ describe("Secret Import Router", async () => {
});
test("Delete secret import position", async () => {
const createdImport1 = await createSecretImport("/", "prod");
const createdImport1 = await createSecretImport("/", "dev");
const createdImport2 = await createSecretImport("/", "staging");
const deletedImport = await deleteSecretImport(createdImport1.id);
// check for default environments

@ -942,113 +942,6 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual([]);
});
test.each(testRawSecrets)("Bulk create secret raw in path $path", async ({ path, secret }) => {
const createSecretReqBody = {
projectSlug: seedData1.project.slug,
environment: seedData1.environment.slug,
secretPath: path,
secrets: [
{
secretKey: secret.key,
secretValue: secret.value,
secretComment: secret.comment
}
]
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secrets");
// fetch secrets
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: secret.key,
value: secret.value,
type: SecretType.Shared
})
])
);
await deleteRawSecret({ path, key: secret.key });
});
test.each(testRawSecrets)("Bulk update secret raw in path $path", async ({ secret, path }) => {
await createRawSecret({ path, ...secret });
const updateSecretReqBody = {
projectSlug: seedData1.project.slug,
environment: seedData1.environment.slug,
secretPath: path,
secrets: [
{
secretValue: "new-value",
secretKey: secret.key
}
]
};
const updateSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: updateSecretReqBody
});
expect(updateSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secrets");
// fetch secrets
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: secret.key,
value: "new-value",
version: 2,
type: SecretType.Shared
})
])
);
await deleteRawSecret({ path, key: secret.key });
});
test.each(testRawSecrets)("Bulk delete secret raw in path $path", async ({ path, secret }) => {
await createRawSecret({ path, ...secret });
const deletedSecretReqBody = {
projectSlug: seedData1.project.slug,
environment: seedData1.environment.slug,
secretPath: path,
secrets: [{ secretKey: secret.key }]
};
const deletedSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: deletedSecretReqBody
});
expect(deletedSecRes.statusCode).toBe(200);
const deletedSecretPayload = JSON.parse(deletedSecRes.payload);
expect(deletedSecretPayload).toHaveProperty("secrets");
// fetch secrets
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual([]);
});
}
);

@ -10,7 +10,7 @@ import { seedData1 } from "@app/db/seed-data";
import { initEnvConfig } from "@app/lib/config/env";
import { initLogger } from "@app/lib/logger";
import { main } from "@app/server/app";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { AuthTokenType } from "@app/services/auth/auth-type";
import { mockQueue } from "./mocks/queue";
import { mockSmtpServer } from "./mocks/smtp";
@ -52,8 +52,6 @@ export default {
authTokenType: AuthTokenType.ACCESS_TOKEN,
userId: seedData1.id,
tokenVersionId: seedData1.token.id,
authMethod: AuthMethod.EMAIL,
organizationId: seedData1.organization.id,
accessVersion: 1
},
cfg.AUTH_SECRET,

1619
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -70,8 +70,7 @@
"vitest": "^1.2.2"
},
"dependencies": {
"@aws-sdk/client-iam": "^3.525.0",
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@aws-sdk/client-secrets-manager": "^3.509.0",
"@casl/ability": "^6.5.0",
"@fastify/cookie": "^9.3.1",
"@fastify/cors": "^8.5.0",
@ -96,7 +95,6 @@
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"bullmq": "^5.3.3",
"cassandra-driver": "^4.7.2",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",
@ -108,21 +106,18 @@
"knex": "^3.0.1",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
"mysql2": "^3.9.4",
"mysql2": "^3.9.1",
"nanoid": "^5.0.4",
"node-cache": "^5.1.2",
"nodemailer": "^6.9.9",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",
"passport-ldapauth": "^3.0.1",
"pg": "^8.11.3",
"pg-query-stream": "^4.5.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"posthog-node": "^3.6.2",
"posthog-node": "^3.6.0",
"probot": "^13.0.0",
"smee-client": "^2.0.0",
"tweetnacl": "^1.0.3",

@ -103,15 +103,11 @@ export const ${dalName} = (db: TDbClient) => {
`import { z } from "zod";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { readLimit } from "@app/server/config/rateLimiter";
export const register${pascalCase}Router = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({}),
response: {

@ -7,10 +7,10 @@ const prompt = promptSync({ sigint: true });
const migrationName = prompt("Enter name for migration: ");
// Remove spaces from migration name and replace with hyphens
const formattedMigrationName = migrationName.replace(/\s+/g, "-");
execSync(
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,
`npx knex migrate:make --knexfile ${path.join(
__dirname,
"../src/db/knexfile.ts"
)} -x ts ${migrationName}`,
{ stdio: "inherit" }
);

@ -3,14 +3,8 @@ import "fastify";
import { TUsers } from "@app/db/schemas";
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
@ -24,9 +18,8 @@ import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { ActorType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
@ -65,10 +58,9 @@ declare module "fastify" {
// identity injection. depending on which kinda of token the information is filled in auth
auth: TAuthMode;
permission: {
authMethod: ActorAuthMethod;
type: ActorType;
id: string;
orgId: string;
orgId?: string;
};
// passport data
passportUser: {
@ -77,7 +69,6 @@ declare module "fastify" {
};
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
}
interface FastifyInstance {
@ -91,8 +82,6 @@ declare module "fastify" {
orgRole: TOrgRoleServiceFactory;
superAdmin: TSuperAdminServiceFactory;
user: TUserServiceFactory;
group: TGroupServiceFactory;
groupProject: TGroupProjectServiceFactory;
apiKey: TApiKeyServiceFactory;
project: TProjectServiceFactory;
projectMembership: TProjectMembershipServiceFactory;
@ -118,17 +107,12 @@ declare module "fastify" {
snapshot: TSecretSnapshotServiceFactory;
saml: TSamlConfigServiceFactory;
scim: TScimServiceFactory;
ldap: TLdapConfigServiceFactory;
auditLog: TAuditLogServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;
trustedIp: TTrustedIpServiceFactory;
secretBlindIndex: TSecretBlindIndexServiceFactory;
telemetry: TTelemetryServiceFactory;
dynamicSecret: TDynamicSecretServiceFactory;
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

@ -17,27 +17,12 @@ import {
TBackupPrivateKey,
TBackupPrivateKeyInsert,
TBackupPrivateKeyUpdate,
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate,
TDynamicSecrets,
TDynamicSecretsInsert,
TDynamicSecretsUpdate,
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate,
TGitAppOrg,
TGitAppOrgInsert,
TGitAppOrgUpdate,
TGroupProjectMembershipRoles,
TGroupProjectMembershipRolesInsert,
TGroupProjectMembershipRolesUpdate,
TGroupProjectMemberships,
TGroupProjectMembershipsInsert,
TGroupProjectMembershipsUpdate,
TGroups,
TGroupsInsert,
TGroupsUpdate,
TIdentities,
TIdentitiesInsert,
TIdentitiesUpdate,
@ -47,12 +32,6 @@ import {
TIdentityOrgMemberships,
TIdentityOrgMembershipsInsert,
TIdentityOrgMembershipsUpdate,
TIdentityProjectAdditionalPrivilege,
TIdentityProjectAdditionalPrivilegeInsert,
TIdentityProjectAdditionalPrivilegeUpdate,
TIdentityProjectMembershipRole,
TIdentityProjectMembershipRoleInsert,
TIdentityProjectMembershipRoleUpdate,
TIdentityProjectMemberships,
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate,
@ -71,9 +50,6 @@ import {
TIntegrations,
TIntegrationsInsert,
TIntegrationsUpdate,
TLdapConfigs,
TLdapConfigsInsert,
TLdapConfigsUpdate,
TOrganizations,
TOrganizationsInsert,
TOrganizationsUpdate,
@ -104,12 +80,6 @@ import {
TProjects,
TProjectsInsert,
TProjectsUpdate,
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
TProjectUserAdditionalPrivilegeUpdate,
TProjectUserMembershipRoles,
TProjectUserMembershipRolesInsert,
TProjectUserMembershipRolesUpdate,
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
@ -191,15 +161,9 @@ import {
TUserActions,
TUserActionsInsert,
TUserActionsUpdate,
TUserAliases,
TUserAliasesInsert,
TUserAliasesUpdate,
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
TUserEncryptionKeysUpdate,
TUserGroupMembership,
TUserGroupMembershipInsert,
TUserGroupMembershipUpdate,
TUsers,
TUsersInsert,
TUsersUpdate,
@ -211,23 +175,6 @@ import {
declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: Knex.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.UserGroupMembership]: Knex.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,
TUserGroupMembershipUpdate
>;
[TableName.GroupProjectMembership]: Knex.CompositeTableType<
TGroupProjectMemberships,
TGroupProjectMembershipsInsert,
TGroupProjectMembershipsUpdate
>;
[TableName.GroupProjectMembershipRole]: Knex.CompositeTableType<
TGroupProjectMembershipRoles,
TGroupProjectMembershipRolesInsert,
TGroupProjectMembershipRolesUpdate
>;
[TableName.UserAliases]: Knex.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
@ -267,17 +214,7 @@ declare module "knex/types/tables" {
TProjectEnvironmentsUpdate
>;
[TableName.ProjectBot]: Knex.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
[TableName.ProjectUserMembershipRole]: Knex.CompositeTableType<
TProjectUserMembershipRoles,
TProjectUserMembershipRolesInsert,
TProjectUserMembershipRolesUpdate
>;
[TableName.ProjectRoles]: Knex.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
[TableName.ProjectUserAdditionalPrivilege]: Knex.CompositeTableType<
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
TProjectUserAdditionalPrivilegeUpdate
>;
[TableName.ProjectKeys]: Knex.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
[TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
[TableName.SecretBlindIndex]: Knex.CompositeTableType<
@ -328,16 +265,6 @@ declare module "knex/types/tables" {
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate
>;
[TableName.IdentityProjectMembershipRole]: Knex.CompositeTableType<
TIdentityProjectMembershipRole,
TIdentityProjectMembershipRoleInsert,
TIdentityProjectMembershipRoleUpdate
>;
[TableName.IdentityProjectAdditionalPrivilege]: Knex.CompositeTableType<
TIdentityProjectAdditionalPrivilege,
TIdentityProjectAdditionalPrivilegeInsert,
TIdentityProjectAdditionalPrivilegeUpdate
>;
[TableName.ScimToken]: Knex.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
[TableName.SecretApprovalPolicy]: Knex.CompositeTableType<
TSecretApprovalPolicies,
@ -390,14 +317,7 @@ declare module "knex/types/tables" {
TSecretSnapshotFoldersInsert,
TSecretSnapshotFoldersUpdate
>;
[TableName.DynamicSecret]: Knex.CompositeTableType<TDynamicSecrets, TDynamicSecretsInsert, TDynamicSecretsUpdate>;
[TableName.DynamicSecretLease]: Knex.CompositeTableType<
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate
>;
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
[TableName.GitAppInstallSession]: Knex.CompositeTableType<

@ -6,13 +6,6 @@ export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnection
client: "pg",
connection: {
connectionString: dbConnectionUri,
host: process.env.DB_HOST,
// @ts-expect-error I have no clue why only for the port there is a type error
// eslint-disable-next-line
port: process.env.DB_PORT,
user: process.env.DB_USER,
database: process.env.DB_NAME,
password: process.env.DB_PASSWORD,
ssl: dbRootCert
? {
rejectUnauthorized: true,

@ -7,22 +7,18 @@ import path from "path";
// Update with your config settings. .
dotenv.config({
path: path.join(__dirname, "../../../.env.migration")
path: path.join(__dirname, "../../../.env.migration"),
debug: true
});
dotenv.config({
path: path.join(__dirname, "../../../.env")
path: path.join(__dirname, "../../../.env"),
debug: true
});
export default {
development: {
client: "postgres",
connection: {
connectionString: process.env.DB_CONNECTION_URI,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
database: process.env.DB_NAME,
password: process.env.DB_PASSWORD,
ssl: process.env.DB_ROOT_CERT
? {
rejectUnauthorized: true,
@ -45,11 +41,6 @@ export default {
client: "postgres",
connection: {
connectionString: process.env.DB_CONNECTION_URI,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
database: process.env.DB_NAME,
password: process.env.DB_PASSWORD,
ssl: process.env.DB_ROOT_CERT
? {
rejectUnauthorized: true,

@ -10,12 +10,12 @@ export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.uuid("instanceId").notNullable().defaultTo(knex.fn.uuid());
});
const superUserConfigExists = await knex(TableName.SuperAdmin).where("id", ADMIN_CONFIG_UUID).first();
if (!superUserConfigExists) {
// eslint-disable-next-line
await knex(TableName.SuperAdmin).update({ id: ADMIN_CONFIG_UUID }).whereNotNull("id").limit(1);
}
// eslint-disable-next-line
await knex(TableName.SuperAdmin)
.update({ id: ADMIN_CONFIG_UUID })
.whereNotNull("id")
.andWhere("id", "<>", ADMIN_CONFIG_UUID)
.limit(1);
}
export async function down(knex: Knex): Promise<void> {

@ -1,15 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.Integration, (t) => {
t.datetime("lastUsed");
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.Integration, (t) => {
t.dropColumn("lastUsed");
});
}

@ -1,68 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.LdapConfig))) {
await knex.schema.createTable(TableName.LdapConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable().unique();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.boolean("isActive").notNullable();
t.string("url").notNullable();
t.string("encryptedBindDN").notNullable();
t.string("bindDNIV").notNullable();
t.string("bindDNTag").notNullable();
t.string("encryptedBindPass").notNullable();
t.string("bindPassIV").notNullable();
t.string("bindPassTag").notNullable();
t.string("searchBase").notNullable();
t.text("encryptedCACert").notNullable();
t.string("caCertIV").notNullable();
t.string("caCertTag").notNullable();
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.LdapConfig);
if (!(await knex.schema.hasTable(TableName.UserAliases))) {
await knex.schema.createTable(TableName.UserAliases, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("userId").notNullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.string("username").notNullable();
t.string("aliasType").notNullable();
t.string("externalId").notNullable();
t.specificType("emails", "text[]");
t.uuid("orgId").nullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.UserAliases);
await knex.schema.alterTable(TableName.Users, (t) => {
t.string("username").unique();
t.string("email").nullable().alter();
t.dropUnique(["email"]);
});
await knex(TableName.Users).update("username", knex.ref("email"));
await knex.schema.alterTable(TableName.Users, (t) => {
t.string("username").notNullable().alter();
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.LdapConfig);
await knex.schema.dropTableIfExists(TableName.UserAliases);
await knex.schema.alterTable(TableName.Users, (t) => {
t.dropColumn("username");
// t.string("email").notNullable().alter();
});
await dropOnUpdateTrigger(knex, TableName.LdapConfig);
}

@ -1,50 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const doesTableExist = await knex.schema.hasTable(TableName.ProjectUserMembershipRole);
if (!doesTableExist) {
await knex.schema.createTable(TableName.ProjectUserMembershipRole, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("role").notNullable();
t.uuid("projectMembershipId").notNullable();
t.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
// until role is changed/removed the role should not deleted
t.uuid("customRoleId");
t.foreign("customRoleId").references("id").inTable(TableName.ProjectRoles);
t.boolean("isTemporary").notNullable().defaultTo(false);
t.string("temporaryMode");
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
t.datetime("temporaryAccessStartTime");
t.datetime("temporaryAccessEndTime");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.ProjectUserMembershipRole);
const projectMemberships = await knex(TableName.ProjectMembership).select(
"id",
"role",
"createdAt",
"updatedAt",
knex.ref("roleId").withSchema(TableName.ProjectMembership).as("customRoleId")
);
if (projectMemberships.length)
await knex.batchInsert(
TableName.ProjectUserMembershipRole,
projectMemberships.map((data) => ({ ...data, projectMembershipId: data.id }))
);
// will be dropped later
// await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
// t.dropColumn("roleId");
// t.dropColumn("role");
// });
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ProjectUserMembershipRole);
await dropOnUpdateTrigger(knex, TableName.ProjectUserMembershipRole);
}

@ -1,52 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const doesTableExist = await knex.schema.hasTable(TableName.IdentityProjectMembershipRole);
if (!doesTableExist) {
await knex.schema.createTable(TableName.IdentityProjectMembershipRole, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("role").notNullable();
t.uuid("projectMembershipId").notNullable();
t.foreign("projectMembershipId")
.references("id")
.inTable(TableName.IdentityProjectMembership)
.onDelete("CASCADE");
// until role is changed/removed the role should not deleted
t.uuid("customRoleId");
t.foreign("customRoleId").references("id").inTable(TableName.ProjectRoles);
t.boolean("isTemporary").notNullable().defaultTo(false);
t.string("temporaryMode");
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
t.datetime("temporaryAccessStartTime");
t.datetime("temporaryAccessEndTime");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.IdentityProjectMembershipRole);
const identityMemberships = await knex(TableName.IdentityProjectMembership).select(
"id",
"role",
"createdAt",
"updatedAt",
knex.ref("roleId").withSchema(TableName.IdentityProjectMembership).as("customRoleId")
);
if (identityMemberships.length)
await knex.batchInsert(
TableName.IdentityProjectMembershipRole,
identityMemberships.map((data) => ({ ...data, projectMembershipId: data.id }))
);
// await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
// t.dropColumn("roleId");
// t.dropColumn("role");
// });
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityProjectMembershipRole);
await dropOnUpdateTrigger(knex, TableName.IdentityProjectMembershipRole);
}

@ -1,58 +0,0 @@
import { Knex } from "knex";
import { SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const doesTableExist = await knex.schema.hasTable(TableName.DynamicSecret);
if (!doesTableExist) {
await knex.schema.createTable(TableName.DynamicSecret, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name").notNullable();
t.integer("version").notNullable();
t.string("type").notNullable();
t.string("defaultTTL").notNullable();
t.string("maxTTL");
t.string("inputIV").notNullable();
t.text("inputCiphertext").notNullable();
t.string("inputTag").notNullable();
t.string("algorithm").notNullable().defaultTo(SecretEncryptionAlgo.AES_256_GCM);
t.string("keyEncoding").notNullable().defaultTo(SecretKeyEncoding.UTF8);
t.uuid("folderId").notNullable();
// for background process communication
t.string("status");
t.string("statusDetails");
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE");
t.unique(["name", "folderId"]);
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.DynamicSecret);
const doesTableDynamicSecretLease = await knex.schema.hasTable(TableName.DynamicSecretLease);
if (!doesTableDynamicSecretLease) {
await knex.schema.createTable(TableName.DynamicSecretLease, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.integer("version").notNullable();
t.string("externalEntityId").notNullable();
t.datetime("expireAt").notNullable();
// for background process communication
t.string("status");
t.string("statusDetails");
t.uuid("dynamicSecretId").notNullable();
t.foreign("dynamicSecretId").references("id").inTable(TableName.DynamicSecret).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.DynamicSecretLease);
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.DynamicSecretLease);
await knex.schema.dropTableIfExists(TableName.DynamicSecretLease);
await dropOnUpdateTrigger(knex, TableName.DynamicSecret);
await knex.schema.dropTableIfExists(TableName.DynamicSecret);
}

@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.ProjectUserAdditionalPrivilege))) {
await knex.schema.createTable(TableName.ProjectUserAdditionalPrivilege, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("slug", 60).notNullable();
t.uuid("projectMembershipId").notNullable();
t.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
t.boolean("isTemporary").notNullable().defaultTo(false);
t.string("temporaryMode");
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
t.datetime("temporaryAccessStartTime");
t.datetime("temporaryAccessEndTime");
t.jsonb("permissions").notNullable();
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.ProjectUserAdditionalPrivilege);
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.ProjectUserAdditionalPrivilege);
await knex.schema.dropTableIfExists(TableName.ProjectUserAdditionalPrivilege);
}

@ -1,32 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityProjectAdditionalPrivilege))) {
await knex.schema.createTable(TableName.IdentityProjectAdditionalPrivilege, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("slug", 60).notNullable();
t.uuid("projectMembershipId").notNullable();
t.foreign("projectMembershipId")
.references("id")
.inTable(TableName.IdentityProjectMembership)
.onDelete("CASCADE");
t.boolean("isTemporary").notNullable().defaultTo(false);
t.string("temporaryMode");
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
t.datetime("temporaryAccessStartTime");
t.datetime("temporaryAccessEndTime");
t.jsonb("permissions").notNullable();
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.IdentityProjectAdditionalPrivilege);
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.IdentityProjectAdditionalPrivilege);
await knex.schema.dropTableIfExists(TableName.IdentityProjectAdditionalPrivilege);
}

@ -1,112 +0,0 @@
import { Knex } from "knex";
import { z } from "zod";
import { TableName, TOrgMemberships } from "../schemas";
const validateOrgMembership = (membershipToValidate: TOrgMemberships, firstMembership: TOrgMemberships) => {
const firstOrgId = firstMembership.orgId;
const firstUserId = firstMembership.userId;
if (membershipToValidate.id === firstMembership.id) {
return;
}
if (membershipToValidate.inviteEmail !== firstMembership.inviteEmail) {
throw new Error(`Invite emails are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
}
if (membershipToValidate.orgId !== firstMembership.orgId) {
throw new Error(`OrgIds are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
}
if (membershipToValidate.role !== firstMembership.role) {
throw new Error(`Roles are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
}
if (membershipToValidate.roleId !== firstMembership.roleId) {
throw new Error(`RoleIds are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
}
if (membershipToValidate.status !== firstMembership.status) {
throw new Error(`Statuses are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
}
if (membershipToValidate.userId !== firstMembership.userId) {
throw new Error(`UserIds are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
}
};
export async function up(knex: Knex): Promise<void> {
const RowSchema = z.object({
userId: z.string(),
orgId: z.string(),
cnt: z.string()
});
// Transactional find and delete duplicate rows
await knex.transaction(async (tx) => {
const duplicateRows = await tx(TableName.OrgMembership)
.select("userId", "orgId") // Select the userId and orgId so we can group by them
.whereNotNull("userId") // Ensure that the userId is not null
.count("* as cnt") // Count the number of rows for each userId and orgId, so we can make sure there are more than 1 row (a duplicate)
.groupBy("userId", "orgId")
.havingRaw("count(*) > ?", [1]); // Using havingRaw for direct SQL expressions
// Parse the rows to ensure they are in the correct format, and for type safety
const parsedRows = RowSchema.array().parse(duplicateRows);
// For each of the duplicate rows, loop through and find the actual memberships to delete
for (const row of parsedRows) {
const count = Number(row.cnt);
// An extra check to ensure that the count is actually a number, and the number is greater than 2
if (typeof count !== "number" || count < 2) {
// eslint-disable-next-line no-continue
continue;
}
// Find all the organization memberships that have the same userId and orgId
// eslint-disable-next-line no-await-in-loop
const rowsToDelete = await tx(TableName.OrgMembership).where({
userId: row.userId,
orgId: row.orgId
});
// Ensure that all the rows have exactly the same value, except id, createdAt, updatedAt
for (const rowToDelete of rowsToDelete) {
validateOrgMembership(rowToDelete, rowsToDelete[0]);
}
// Find the row with the latest createdAt, which we will keep
let lowestCreatedAt: number | null = null;
let latestCreatedRow: TOrgMemberships | null = null;
for (const rowToDelete of rowsToDelete) {
if (lowestCreatedAt === null || rowToDelete.createdAt.getTime() < lowestCreatedAt) {
lowestCreatedAt = rowToDelete.createdAt.getTime();
latestCreatedRow = rowToDelete;
}
}
if (!latestCreatedRow) {
throw new Error("Failed to find last created membership");
}
// Filter out the latest row from the rows to delete
const membershipIdsToDelete = rowsToDelete.map((r) => r.id).filter((id) => id !== latestCreatedRow!.id);
// eslint-disable-next-line no-await-in-loop
const numberOfRowsDeleted = await tx(TableName.OrgMembership).whereIn("id", membershipIdsToDelete).delete();
// eslint-disable-next-line no-console
console.log(
`Deleted ${numberOfRowsDeleted} duplicate organization memberships for ${row.userId} and ${row.orgId}`
);
}
});
await knex.schema.alterTable(TableName.OrgMembership, (table) => {
table.unique(["userId", "orgId"]);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.OrgMembership, (table) => {
table.dropUnique(["userId", "orgId"]);
});
}

@ -1,82 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.Groups))) {
await knex.schema.createTable(TableName.Groups, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.string("name").notNullable();
t.string("slug").notNullable();
t.unique(["orgId", "slug"]);
t.string("role").notNullable();
t.uuid("roleId");
t.foreign("roleId").references("id").inTable(TableName.OrgRoles);
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.Groups);
if (!(await knex.schema.hasTable(TableName.UserGroupMembership))) {
await knex.schema.createTable(TableName.UserGroupMembership, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); // link to user and link to groups cascade on groups
t.uuid("userId").notNullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.uuid("groupId").notNullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.UserGroupMembership);
if (!(await knex.schema.hasTable(TableName.GroupProjectMembership))) {
await knex.schema.createTable(TableName.GroupProjectMembership, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("groupId").notNullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.GroupProjectMembership);
if (!(await knex.schema.hasTable(TableName.GroupProjectMembershipRole))) {
await knex.schema.createTable(TableName.GroupProjectMembershipRole, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("role").notNullable();
t.uuid("projectMembershipId").notNullable();
t.foreign("projectMembershipId").references("id").inTable(TableName.GroupProjectMembership).onDelete("CASCADE");
// until role is changed/removed the role should not deleted
t.uuid("customRoleId");
t.foreign("customRoleId").references("id").inTable(TableName.ProjectRoles);
t.boolean("isTemporary").notNullable().defaultTo(false);
t.string("temporaryMode");
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
t.datetime("temporaryAccessStartTime");
t.datetime("temporaryAccessEndTime");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.GroupProjectMembershipRole);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.GroupProjectMembershipRole);
await dropOnUpdateTrigger(knex, TableName.GroupProjectMembershipRole);
await knex.schema.dropTableIfExists(TableName.UserGroupMembership);
await dropOnUpdateTrigger(knex, TableName.UserGroupMembership);
await knex.schema.dropTableIfExists(TableName.GroupProjectMembership);
await dropOnUpdateTrigger(knex, TableName.GroupProjectMembership);
await knex.schema.dropTableIfExists(TableName.Groups);
await dropOnUpdateTrigger(knex, TableName.Groups);
}

@ -1,47 +0,0 @@
import { Knex } from "knex";
import { ProjectMembershipRole, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesProjectRoleFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "role");
const doesProjectRoleIdFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "roleId");
await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
if (doesProjectRoleFieldExist) t.dropColumn("roleId");
if (doesProjectRoleIdFieldExist) t.dropColumn("role");
});
const doesIdentityProjectRoleFieldExist = await knex.schema.hasColumn(TableName.IdentityProjectMembership, "role");
const doesIdentityProjectRoleIdFieldExist = await knex.schema.hasColumn(
TableName.IdentityProjectMembership,
"roleId"
);
await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
if (doesIdentityProjectRoleFieldExist) t.dropColumn("roleId");
if (doesIdentityProjectRoleIdFieldExist) t.dropColumn("role");
});
}
export async function down(knex: Knex): Promise<void> {
const doesProjectRoleFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "role");
const doesProjectRoleIdFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "roleId");
await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
if (!doesProjectRoleFieldExist) t.string("role").defaultTo(ProjectMembershipRole.Member);
if (!doesProjectRoleIdFieldExist) {
t.uuid("roleId");
t.foreign("roleId").references("id").inTable(TableName.ProjectRoles);
}
});
const doesIdentityProjectRoleFieldExist = await knex.schema.hasColumn(TableName.IdentityProjectMembership, "role");
const doesIdentityProjectRoleIdFieldExist = await knex.schema.hasColumn(
TableName.IdentityProjectMembership,
"roleId"
);
await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
if (!doesIdentityProjectRoleFieldExist) t.string("role").defaultTo(ProjectMembershipRole.Member);
if (!doesIdentityProjectRoleIdFieldExist) {
t.uuid("roleId");
t.foreign("roleId").references("id").inTable(TableName.ProjectRoles);
}
});
}

@ -1,15 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.UserGroupMembership, (t) => {
t.boolean("isPending").notNullable().defaultTo(false);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.UserGroupMembership, (t) => {
t.dropColumn("isPending");
});
}

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const DynamicSecretLeasesSchema = z.object({
id: z.string().uuid(),
version: z.number(),
externalEntityId: z.string(),
expireAt: z.date(),
status: z.string().nullable().optional(),
statusDetails: z.string().nullable().optional(),
dynamicSecretId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TDynamicSecretLeases = z.infer<typeof DynamicSecretLeasesSchema>;
export type TDynamicSecretLeasesInsert = Omit<z.input<typeof DynamicSecretLeasesSchema>, TImmutableDBKeys>;
export type TDynamicSecretLeasesUpdate = Partial<Omit<z.input<typeof DynamicSecretLeasesSchema>, TImmutableDBKeys>>;

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const DynamicSecretsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
version: z.number(),
type: z.string(),
defaultTTL: z.string(),
maxTTL: z.string().nullable().optional(),
inputIV: z.string(),
inputCiphertext: z.string(),
inputTag: z.string(),
algorithm: z.string().default("aes-256-gcm"),
keyEncoding: z.string().default("utf8"),
folderId: z.string().uuid(),
status: z.string().nullable().optional(),
statusDetails: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
export type TDynamicSecretsInsert = Omit<z.input<typeof DynamicSecretsSchema>, TImmutableDBKeys>;
export type TDynamicSecretsUpdate = Partial<Omit<z.input<typeof DynamicSecretsSchema>, TImmutableDBKeys>>;

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const GroupProjectMembershipRolesSchema = z.object({
id: z.string().uuid(),
role: z.string(),
projectMembershipId: z.string().uuid(),
customRoleId: z.string().uuid().nullable().optional(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TGroupProjectMembershipRoles = z.infer<typeof GroupProjectMembershipRolesSchema>;
export type TGroupProjectMembershipRolesInsert = Omit<
z.input<typeof GroupProjectMembershipRolesSchema>,
TImmutableDBKeys
>;
export type TGroupProjectMembershipRolesUpdate = Partial<
Omit<z.input<typeof GroupProjectMembershipRolesSchema>, TImmutableDBKeys>
>;

@ -1,22 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const GroupProjectMembershipsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
groupId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TGroupProjectMemberships = z.infer<typeof GroupProjectMembershipsSchema>;
export type TGroupProjectMembershipsInsert = Omit<z.input<typeof GroupProjectMembershipsSchema>, TImmutableDBKeys>;
export type TGroupProjectMembershipsUpdate = Partial<
Omit<z.input<typeof GroupProjectMembershipsSchema>, TImmutableDBKeys>
>;

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const GroupsSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
name: z.string(),
slug: z.string(),
role: z.string(),
roleId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TGroups = z.infer<typeof GroupsSchema>;
export type TGroupsInsert = Omit<z.input<typeof GroupsSchema>, TImmutableDBKeys>;
export type TGroupsUpdate = Partial<Omit<z.input<typeof GroupsSchema>, TImmutableDBKeys>>;

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityProjectAdditionalPrivilegeSchema = z.object({
id: z.string().uuid(),
slug: z.string(),
projectMembershipId: z.string().uuid(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional(),
permissions: z.unknown(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityProjectAdditionalPrivilege = z.infer<typeof IdentityProjectAdditionalPrivilegeSchema>;
export type TIdentityProjectAdditionalPrivilegeInsert = Omit<
z.input<typeof IdentityProjectAdditionalPrivilegeSchema>,
TImmutableDBKeys
>;
export type TIdentityProjectAdditionalPrivilegeUpdate = Partial<
Omit<z.input<typeof IdentityProjectAdditionalPrivilegeSchema>, TImmutableDBKeys>
>;

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityProjectMembershipRoleSchema = z.object({
id: z.string().uuid(),
role: z.string(),
projectMembershipId: z.string().uuid(),
customRoleId: z.string().uuid().nullable().optional(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityProjectMembershipRole = z.infer<typeof IdentityProjectMembershipRoleSchema>;
export type TIdentityProjectMembershipRoleInsert = Omit<
z.input<typeof IdentityProjectMembershipRoleSchema>,
TImmutableDBKeys
>;
export type TIdentityProjectMembershipRoleUpdate = Partial<
Omit<z.input<typeof IdentityProjectMembershipRoleSchema>, TImmutableDBKeys>
>;

@ -9,6 +9,8 @@ import { TImmutableDBKeys } from "./models";
export const IdentityProjectMembershipsSchema = z.object({
id: z.string().uuid(),
role: z.string(),
roleId: z.string().uuid().nullable().optional(),
projectId: z.string(),
identityId: z.string().uuid(),
createdAt: z.date(),

@ -3,25 +3,17 @@ export * from "./audit-logs";
export * from "./auth-token-sessions";
export * from "./auth-tokens";
export * from "./backup-private-key";
export * from "./dynamic-secret-leases";
export * from "./dynamic-secrets";
export * from "./git-app-install-sessions";
export * from "./git-app-org";
export * from "./group-project-membership-roles";
export * from "./group-project-memberships";
export * from "./groups";
export * from "./identities";
export * from "./identity-access-tokens";
export * from "./identity-org-memberships";
export * from "./identity-project-additional-privilege";
export * from "./identity-project-membership-role";
export * from "./identity-project-memberships";
export * from "./identity-ua-client-secrets";
export * from "./identity-universal-auths";
export * from "./incident-contacts";
export * from "./integration-auths";
export * from "./integrations";
export * from "./ldap-configs";
export * from "./models";
export * from "./org-bots";
export * from "./org-memberships";
@ -32,8 +24,6 @@ export * from "./project-environments";
export * from "./project-keys";
export * from "./project-memberships";
export * from "./project-roles";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
export * from "./projects";
export * from "./saml-configs";
export * from "./scim-tokens";
@ -62,8 +52,6 @@ export * from "./service-tokens";
export * from "./super-admin";
export * from "./trusted-ips";
export * from "./user-actions";
export * from "./user-aliases";
export * from "./user-encryption-keys";
export * from "./user-group-membership";
export * from "./users";
export * from "./webhooks";

@ -27,8 +27,7 @@ export const IntegrationsSchema = z.object({
envId: z.string().uuid(),
secretPath: z.string().default("/"),
createdAt: z.date(),
updatedAt: z.date(),
lastUsed: z.date().nullable().optional()
updatedAt: z.date()
});
export type TIntegrations = z.infer<typeof IntegrationsSchema>;

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const LdapConfigsSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
isActive: z.boolean(),
url: z.string(),
encryptedBindDN: z.string(),
bindDNIV: z.string(),
bindDNTag: z.string(),
encryptedBindPass: z.string(),
bindPassIV: z.string(),
bindPassTag: z.string(),
searchBase: z.string(),
encryptedCACert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
export type TLdapConfigsInsert = Omit<z.input<typeof LdapConfigsSchema>, TImmutableDBKeys>;
export type TLdapConfigsUpdate = Partial<Omit<z.input<typeof LdapConfigsSchema>, TImmutableDBKeys>>;

@ -2,11 +2,6 @@ import { z } from "zod";
export enum TableName {
Users = "users",
Groups = "groups",
GroupProjectMembership = "group_project_memberships",
GroupProjectMembershipRole = "group_project_membership_roles",
UserGroupMembership = "user_group_membership",
UserAliases = "user_aliases",
UserEncryptionKey = "user_encryption_keys",
AuthTokens = "auth_tokens",
AuthTokenSession = "auth_token_sessions",
@ -24,8 +19,6 @@ export enum TableName {
Environment = "project_environments",
ProjectMembership = "project_memberships",
ProjectRoles = "project_roles",
ProjectUserAdditionalPrivilege = "project_user_additional_privilege",
ProjectUserMembershipRole = "project_user_membership_roles",
ProjectKeys = "project_keys",
Secret = "secrets",
SecretBlindIndex = "secret_blind_indexes",
@ -47,8 +40,6 @@ export enum TableName {
IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
ScimToken = "scim_tokens",
SecretApprovalPolicy = "secret_approval_policies",
SecretApprovalPolicyApprover = "secret_approval_policies_approvers",
@ -59,14 +50,11 @@ export enum TableName {
SecretRotation = "secret_rotations",
SecretRotationOutput = "secret_rotation_outputs",
SamlConfig = "saml_configs",
LdapConfig = "ldap_configs",
AuditLog = "audit_logs",
GitAppInstallSession = "git_app_install_sessions",
GitAppOrg = "git_app_org",
SecretScanningGitRisk = "secret_scanning_git_risks",
TrustedIps = "trusted_ips",
DynamicSecret = "dynamic_secrets",
DynamicSecretLease = "dynamic_secret_leases",
// junction tables with tags
JnSecretTag = "secret_tag_junction",
SecretVersionTag = "secret_version_tag_junction"

@ -9,10 +9,12 @@ import { TImmutableDBKeys } from "./models";
export const ProjectMembershipsSchema = z.object({
id: z.string().uuid(),
role: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
userId: z.string().uuid(),
projectId: z.string()
projectId: z.string(),
roleId: z.string().uuid().nullable().optional()
});
export type TProjectMemberships = z.infer<typeof ProjectMembershipsSchema>;

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectUserAdditionalPrivilegeSchema = z.object({
id: z.string().uuid(),
slug: z.string(),
projectMembershipId: z.string().uuid(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional(),
permissions: z.unknown(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TProjectUserAdditionalPrivilege = z.infer<typeof ProjectUserAdditionalPrivilegeSchema>;
export type TProjectUserAdditionalPrivilegeInsert = Omit<
z.input<typeof ProjectUserAdditionalPrivilegeSchema>,
TImmutableDBKeys
>;
export type TProjectUserAdditionalPrivilegeUpdate = Partial<
Omit<z.input<typeof ProjectUserAdditionalPrivilegeSchema>, TImmutableDBKeys>
>;

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectUserMembershipRolesSchema = z.object({
id: z.string().uuid(),
role: z.string(),
projectMembershipId: z.string().uuid(),
customRoleId: z.string().uuid().nullable().optional(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TProjectUserMembershipRoles = z.infer<typeof ProjectUserMembershipRolesSchema>;
export type TProjectUserMembershipRolesInsert = Omit<
z.input<typeof ProjectUserMembershipRolesSchema>,
TImmutableDBKeys
>;
export type TProjectUserMembershipRolesUpdate = Partial<
Omit<z.input<typeof ProjectUserMembershipRolesSchema>, TImmutableDBKeys>
>;

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const UserAliasesSchema = z.object({
id: z.string().uuid(),
userId: z.string().uuid(),
username: z.string(),
aliasType: z.string(),
externalId: z.string(),
emails: z.string().array().nullable().optional(),
orgId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TUserAliases = z.infer<typeof UserAliasesSchema>;
export type TUserAliasesInsert = Omit<z.input<typeof UserAliasesSchema>, TImmutableDBKeys>;
export type TUserAliasesUpdate = Partial<Omit<z.input<typeof UserAliasesSchema>, TImmutableDBKeys>>;

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const UserGroupMembershipSchema = z.object({
id: z.string().uuid(),
userId: z.string().uuid(),
groupId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
isPending: z.boolean().default(false)
});
export type TUserGroupMembership = z.infer<typeof UserGroupMembershipSchema>;
export type TUserGroupMembershipInsert = Omit<z.input<typeof UserGroupMembershipSchema>, TImmutableDBKeys>;
export type TUserGroupMembershipUpdate = Partial<Omit<z.input<typeof UserGroupMembershipSchema>, TImmutableDBKeys>>;

@ -9,7 +9,7 @@ import { TImmutableDBKeys } from "./models";
export const UsersSchema = z.object({
id: z.string().uuid(),
email: z.string().nullable().optional(),
email: z.string(),
authMethods: z.string().array().nullable().optional(),
superAdmin: z.boolean().default(false).nullable().optional(),
firstName: z.string().nullable().optional(),
@ -20,8 +20,7 @@ export const UsersSchema = z.object({
devices: z.unknown().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
isGhost: z.boolean().default(false),
username: z.string()
isGhost: z.boolean().default(false)
});
export type TUsers = z.infer<typeof UsersSchema>;

@ -21,7 +21,6 @@ export let userPublicKey: string | undefined;
export const seedData1 = {
id: "3dafd81d-4388-432b-a4c5-f735616868c1",
username: process.env.TEST_USER_USERNAME || "test@localhost.local",
email: process.env.TEST_USER_EMAIL || "test@localhost.local",
password: process.env.TEST_USER_PASSWORD || "testInfisical@1",
organization: {

@ -9,12 +9,7 @@ export async function seed(knex: Knex): Promise<void> {
await knex(TableName.Users).del();
await knex(TableName.UserEncryptionKey).del();
await knex(TableName.SuperAdmin).del();
await knex(TableName.SuperAdmin).insert([
// eslint-disable-next-line
// @ts-ignore
{ id: "00000000-0000-0000-0000-000000000000", initialized: true, allowSignUp: true }
]);
await knex(TableName.SuperAdmin).insert([{ initialized: true, allowSignUp: true }]);
// Inserts seed entries
const [user] = await knex(TableName.Users)
.insert([
@ -22,7 +17,6 @@ export async function seed(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore
id: seedData1.id,
username: seedData1.username,
email: seedData1.email,
superAdmin: true,
firstName: "test",

@ -4,7 +4,7 @@ import { Knex } from "knex";
import { encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { ProjectMembershipRole, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
import { OrgMembershipRole, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
import { buildUserProjectKey, getUserPrivateKey, seedData1 } from "../seed-data";
export const DEFAULT_PROJECT_ENVS = [
@ -30,15 +30,10 @@ export async function seed(knex: Knex): Promise<void> {
})
.returning("*");
const projectMembership = await knex(TableName.ProjectMembership)
.insert({
projectId: project.id,
userId: seedData1.id
})
.returning("*");
await knex(TableName.ProjectUserMembershipRole).insert({
role: ProjectMembershipRole.Admin,
projectMembershipId: projectMembership[0].id
await knex(TableName.ProjectMembership).insert({
projectId: project.id,
role: OrgMembershipRole.Admin,
userId: seedData1.id
});
const user = await knex(TableName.UserEncryptionKey).where({ userId: seedData1.id }).first();

@ -75,15 +75,9 @@ export async function seed(knex: Knex): Promise<void> {
}
]);
const identityProjectMembership = await knex(TableName.IdentityProjectMembership)
.insert({
identityId: seedData1.machineIdentity.id,
projectId: seedData1.project.id
})
.returning("*");
await knex(TableName.IdentityProjectMembershipRole).insert({
await knex(TableName.IdentityProjectMembership).insert({
identityId: seedData1.machineIdentity.id,
role: ProjectMembershipRole.Admin,
projectMembershipId: identityProjectMembership[0].id
projectId: seedData1.project.id
});
}

@ -1,197 +0,0 @@
import ms from "ms";
import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
import { DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
dynamicSecretName: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.dynamicSecretName).toLowerCase(),
projectSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.projectSlug),
ttl: z
.string()
.optional()
.describe(DYNAMIC_SECRET_LEASES.CREATE.ttl)
.superRefine((val, ctx) => {
if (!val) return;
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be a greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
}),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.path)
}),
response: {
200: z.object({
lease: DynamicSecretLeasesSchema,
dynamicSecret: SanitizedDynamicSecretSchema,
data: z.unknown()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { data, lease, dynamicSecret } = await server.services.dynamicSecretLease.create({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.body.dynamicSecretName,
...req.body
});
return { lease, data, dynamicSecret };
}
});
server.route({
method: "DELETE",
url: "/:leaseId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
leaseId: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.DELETE.leaseId)
}),
body: z.object({
projectSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.DELETE.projectSlug),
path: z
.string()
.min(1)
.trim()
.default("/")
.transform(removeTrailingSlash)
.describe(DYNAMIC_SECRET_LEASES.DELETE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.DELETE.environmentSlug),
isForced: z.boolean().default(false).describe(DYNAMIC_SECRET_LEASES.DELETE.isForced)
}),
response: {
200: z.object({
lease: DynamicSecretLeasesSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const lease = await server.services.dynamicSecretLease.revokeLease({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
leaseId: req.params.leaseId,
...req.body
});
return { lease };
}
});
server.route({
method: "POST",
url: "/:leaseId/renew",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
leaseId: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.leaseId)
}),
body: z.object({
ttl: z
.string()
.describe(DYNAMIC_SECRET_LEASES.RENEW.ttl)
.optional()
.superRefine((val, ctx) => {
if (!val) return;
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be a greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
}),
projectSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.projectSlug),
path: z
.string()
.min(1)
.trim()
.default("/")
.transform(removeTrailingSlash)
.describe(DYNAMIC_SECRET_LEASES.RENEW.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.ttl)
}),
response: {
200: z.object({
lease: DynamicSecretLeasesSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const lease = await server.services.dynamicSecretLease.renewLease({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
leaseId: req.params.leaseId,
...req.body
});
return { lease };
}
});
server.route({
url: "/:leaseId",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
leaseId: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.GET_BY_LEASEID.leaseId)
}),
querystring: z.object({
projectSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.GET_BY_LEASEID.projectSlug),
path: z
.string()
.trim()
.default("/")
.transform(removeTrailingSlash)
.describe(DYNAMIC_SECRET_LEASES.GET_BY_LEASEID.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.GET_BY_LEASEID.environmentSlug)
}),
response: {
200: z.object({
lease: DynamicSecretLeasesSchema.extend({
dynamicSecret: SanitizedDynamicSecretSchema
})
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const lease = await server.services.dynamicSecretLease.getLeaseDetails({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
leaseId: req.params.leaseId,
...req.query
});
return { lease };
}
});
};

@ -1,290 +0,0 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
import { DynamicSecretProviderSchema } from "@app/ee/services/dynamic-secret/providers/models";
import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerDynamicSecretRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
projectSlug: z.string().min(1).describe(DYNAMIC_SECRETS.CREATE.projectSlug),
provider: DynamicSecretProviderSchema.describe(DYNAMIC_SECRETS.CREATE.provider),
defaultTTL: z
.string()
.describe(DYNAMIC_SECRETS.CREATE.defaultTTL)
.superRefine((val, ctx) => {
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be a greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
}),
maxTTL: z
.string()
.describe(DYNAMIC_SECRETS.CREATE.maxTTL)
.optional()
.superRefine((val, ctx) => {
if (!val) return;
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be a greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
})
.nullable(),
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
name: z
.string()
.describe(DYNAMIC_SECRETS.CREATE.name)
.min(1)
.toLowerCase()
.max(64)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
})
}),
response: {
200: z.object({
dynamicSecret: SanitizedDynamicSecretSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const dynamicSecretCfg = await server.services.dynamicSecret.create({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
return { dynamicSecret: dynamicSecretCfg };
}
});
server.route({
method: "PATCH",
url: "/:name",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
name: z.string().toLowerCase().describe(DYNAMIC_SECRETS.UPDATE.name)
}),
body: z.object({
projectSlug: z.string().min(1).describe(DYNAMIC_SECRETS.UPDATE.projectSlug),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRETS.UPDATE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRETS.UPDATE.environmentSlug),
data: z.object({
inputs: z.any().optional().describe(DYNAMIC_SECRETS.UPDATE.inputs),
defaultTTL: z
.string()
.describe(DYNAMIC_SECRETS.UPDATE.defaultTTL)
.optional()
.superRefine((val, ctx) => {
if (!val) return;
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be a greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
}),
maxTTL: z
.string()
.describe(DYNAMIC_SECRETS.UPDATE.maxTTL)
.optional()
.superRefine((val, ctx) => {
if (!val) return;
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be a greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
})
.nullable(),
newName: z.string().describe(DYNAMIC_SECRETS.UPDATE.newName).optional()
})
}),
response: {
200: z.object({
dynamicSecret: SanitizedDynamicSecretSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const dynamicSecretCfg = await server.services.dynamicSecret.updateByName({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.params.name,
path: req.body.path,
projectSlug: req.body.projectSlug,
environmentSlug: req.body.environmentSlug,
...req.body.data
});
return { dynamicSecret: dynamicSecretCfg };
}
});
server.route({
method: "DELETE",
url: "/:name",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
name: z.string().toLowerCase().describe(DYNAMIC_SECRETS.DELETE.name)
}),
body: z.object({
projectSlug: z.string().min(1).describe(DYNAMIC_SECRETS.DELETE.projectSlug),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRETS.DELETE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRETS.DELETE.environmentSlug),
isForced: z.boolean().default(false).describe(DYNAMIC_SECRETS.DELETE.isForced)
}),
response: {
200: z.object({
dynamicSecret: SanitizedDynamicSecretSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const dynamicSecretCfg = await server.services.dynamicSecret.deleteByName({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.params.name,
...req.body
});
return { dynamicSecret: dynamicSecretCfg };
}
});
server.route({
url: "/:name",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
name: z.string().min(1).describe(DYNAMIC_SECRETS.GET_BY_NAME.name)
}),
querystring: z.object({
projectSlug: z.string().min(1).describe(DYNAMIC_SECRETS.GET_BY_NAME.projectSlug),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRETS.GET_BY_NAME.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRETS.GET_BY_NAME.environmentSlug)
}),
response: {
200: z.object({
dynamicSecret: SanitizedDynamicSecretSchema.extend({
inputs: z.unknown()
})
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const dynamicSecretCfg = await server.services.dynamicSecret.getDetails({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.params.name,
...req.query
});
return { dynamicSecret: dynamicSecretCfg };
}
});
server.route({
url: "/",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
projectSlug: z.string().min(1).describe(DYNAMIC_SECRETS.LIST.projectSlug),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRETS.LIST.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRETS.LIST.environmentSlug)
}),
response: {
200: z.object({
dynamicSecrets: SanitizedDynamicSecretSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const dynamicSecretCfgs = await server.services.dynamicSecret.list({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
return { dynamicSecrets: dynamicSecretCfgs };
}
});
server.route({
url: "/:name/leases",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
name: z.string().min(1).describe(DYNAMIC_SECRETS.LIST_LEAES_BY_NAME.name)
}),
querystring: z.object({
projectSlug: z.string().min(1).describe(DYNAMIC_SECRETS.LIST_LEAES_BY_NAME.projectSlug),
path: z
.string()
.trim()
.default("/")
.transform(removeTrailingSlash)
.describe(DYNAMIC_SECRETS.LIST_LEAES_BY_NAME.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRETS.LIST_LEAES_BY_NAME.environmentSlug)
}),
response: {
200: z.object({
leases: DynamicSecretLeasesSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const leases = await server.services.dynamicSecretLease.listLeases({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.params.name,
...req.query
});
return { leases };
}
});
};

@ -1,220 +0,0 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas";
import { GROUPS } from "@app/lib/api-docs";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerGroupRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/",
method: "POST",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z.object({
name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name),
slug: z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(GROUPS.CREATE.slug),
role: z.string().trim().min(1).default(OrgMembershipRole.NoAccess).describe(GROUPS.CREATE.role)
}),
response: {
200: GroupsSchema
}
},
handler: async (req) => {
const group = await server.services.group.createGroup({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
return group;
}
});
server.route({
url: "/:currentSlug",
method: "PATCH",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
currentSlug: z.string().trim().describe(GROUPS.UPDATE.currentSlug)
}),
body: z
.object({
name: z.string().trim().min(1).describe(GROUPS.UPDATE.name),
slug: z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(GROUPS.UPDATE.slug),
role: z.string().trim().min(1).describe(GROUPS.UPDATE.role)
})
.partial(),
response: {
200: GroupsSchema
}
},
handler: async (req) => {
const group = await server.services.group.updateGroup({
currentSlug: req.params.currentSlug,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
return group;
}
});
server.route({
url: "/:slug",
method: "DELETE",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
slug: z.string().trim().describe(GROUPS.DELETE.slug)
}),
response: {
200: GroupsSchema
}
},
handler: async (req) => {
const group = await server.services.group.deleteGroup({
groupSlug: req.params.slug,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return group;
}
});
server.route({
method: "GET",
url: "/:slug/users",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
slug: z.string().trim().describe(GROUPS.LIST_USERS.slug)
}),
querystring: z.object({
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
username: z.string().optional().describe(GROUPS.LIST_USERS.username)
}),
response: {
200: z.object({
users: UsersSchema.pick({
email: true,
username: true,
firstName: true,
lastName: true,
id: true
})
.merge(
z.object({
isPartOfGroup: z.boolean()
})
)
.array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const { users, totalCount } = await server.services.group.listGroupUsers({
groupSlug: req.params.slug,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
return { users, totalCount };
}
});
server.route({
method: "POST",
url: "/:slug/users/:username",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
slug: z.string().trim().describe(GROUPS.ADD_USER.slug),
username: z.string().trim().describe(GROUPS.ADD_USER.username)
}),
response: {
200: UsersSchema.pick({
email: true,
username: true,
firstName: true,
lastName: true,
id: true
})
}
},
handler: async (req) => {
const user = await server.services.group.addUserToGroup({
groupSlug: req.params.slug,
username: req.params.username,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return user;
}
});
server.route({
method: "DELETE",
url: "/:slug/users/:username",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
slug: z.string().trim().describe(GROUPS.DELETE_USER.slug),
username: z.string().trim().describe(GROUPS.DELETE_USER.username)
}),
response: {
200: UsersSchema.pick({
email: true,
username: true,
firstName: true,
lastName: true,
id: true
})
}
},
handler: async (req) => {
const user = await server.services.group.removeUserFromGroup({
groupSlug: req.params.slug,
username: req.params.username,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return user;
}
});
};

@ -1,329 +0,0 @@
import { MongoAbility, RawRuleOf } from "@casl/ability";
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { IdentityProjectAdditionalPrivilegeSchema } from "@app/db/schemas";
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
import { ProjectPermissionSet } from "@app/ee/services/permission/project-permission";
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/permanent",
config: {
rateLimit: writeLimit
},
schema: {
description: "Create a permanent or a non expiry specific privilege for identity.",
security: [
{
bearerAuth: []
}
],
body: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: z.any().array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
}),
response: {
200: z.object({
privilege: IdentityProjectAdditionalPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const privilege = await server.services.identityProjectAdditionalPrivilege.create({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
isTemporary: false,
permissions: JSON.stringify(packRules(req.body.permissions))
});
return { privilege };
}
});
server.route({
method: "POST",
url: "/temporary",
config: {
rateLimit: writeLimit
},
schema: {
description: "Create a temporary or a expiring specific privilege for identity.",
security: [
{
bearerAuth: []
}
],
body: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: z.any().array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions),
temporaryMode: z
.nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode)
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.temporaryMode),
temporaryRange: z
.string()
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.temporaryRange),
temporaryAccessStartTime: z
.string()
.datetime()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.temporaryAccessStartTime)
}),
response: {
200: z.object({
privilege: IdentityProjectAdditionalPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const privilege = await server.services.identityProjectAdditionalPrivilege.create({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
isTemporary: true,
permissions: JSON.stringify(packRules(req.body.permissions))
});
return { privilege };
}
});
server.route({
method: "PATCH",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
description: "Update a specific privilege of an identity.",
security: [
{
bearerAuth: []
}
],
body: z.object({
// disallow empty string
privilegeSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.slug),
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.projectSlug),
privilegeDetails: z
.object({
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
permissions: z.any().array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
isTemporary: z.boolean().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary),
temporaryMode: z
.nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode)
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.temporaryMode),
temporaryRange: z
.string()
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.temporaryRange),
temporaryAccessStartTime: z
.string()
.datetime()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.temporaryAccessStartTime)
})
.partial()
}),
response: {
200: z.object({
privilege: IdentityProjectAdditionalPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const updatedInfo = req.body.privilegeDetails;
const privilege = await server.services.identityProjectAdditionalPrivilege.updateBySlug({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
slug: req.body.privilegeSlug,
identityId: req.body.identityId,
projectSlug: req.body.projectSlug,
data: {
...updatedInfo,
permissions: updatedInfo?.permissions ? JSON.stringify(packRules(updatedInfo.permissions)) : undefined
}
});
return { privilege };
}
});
server.route({
method: "DELETE",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
description: "Delete a specific privilege of an identity.",
security: [
{
bearerAuth: []
}
],
body: z.object({
privilegeSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.DELETE.slug),
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.DELETE.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.DELETE.projectSlug)
}),
response: {
200: z.object({
privilege: IdentityProjectAdditionalPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const privilege = await server.services.identityProjectAdditionalPrivilege.deleteBySlug({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
slug: req.body.privilegeSlug,
identityId: req.body.identityId,
projectSlug: req.body.projectSlug
});
return { privilege };
}
});
server.route({
method: "GET",
url: "/:privilegeSlug",
config: {
rateLimit: readLimit
},
schema: {
description: "Retrieve details of a specific privilege by privilege slug.",
security: [
{
bearerAuth: []
}
],
params: z.object({
privilegeSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.GET_BY_SLUG.slug)
}),
querystring: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.GET_BY_SLUG.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.GET_BY_SLUG.projectSlug)
}),
response: {
200: z.object({
privilege: IdentityProjectAdditionalPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const privilege = await server.services.identityProjectAdditionalPrivilege.getPrivilegeDetailsBySlug({
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
slug: req.params.privilegeSlug,
...req.query
});
return { privilege };
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
description: "List of a specific privilege of an identity in a project.",
security: [
{
bearerAuth: []
}
],
querystring: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.LIST.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.LIST.projectSlug),
unpacked: z
.enum(["false", "true"])
.transform((el) => el === "true")
.default("true")
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.LIST.unpacked)
}),
response: {
200: z.object({
privileges: IdentityProjectAdditionalPrivilegeSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const privileges = await server.services.identityProjectAdditionalPrivilege.listIdentityProjectPrivileges({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
if (req.query.unpacked) {
return {
privileges: privileges.map(({ permissions, ...el }) => ({
...el,
permissions: unpackRules(permissions as PackRule<RawRuleOf<MongoAbility<ProjectPermissionSet>>>[])
}))
};
}
return { privileges };
}
});
};

@ -1,8 +1,3 @@
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerGroupRouter } from "./group-router";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router";
import { registerOrgRoleRouter } from "./org-role-router";
import { registerProjectRoleRouter } from "./project-role-router";
@ -17,7 +12,6 @@ import { registerSecretScanningRouter } from "./secret-scanning-router";
import { registerSecretVersionRouter } from "./secret-version-router";
import { registerSnapshotRouter } from "./snapshot-router";
import { registerTrustedIpRouter } from "./trusted-ip-router";
import { registerUserAdditionalPrivilegeRouter } from "./user-additional-privilege-router";
export const registerV1EERoutes = async (server: FastifyZodProvider) => {
// org role starts with organization
@ -39,27 +33,9 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(registerSecretRotationProviderRouter, {
prefix: "/secret-rotation-providers"
});
await server.register(
async (dynamicSecretRouter) => {
await dynamicSecretRouter.register(registerDynamicSecretRouter);
await dynamicSecretRouter.register(registerDynamicSecretLeaseRouter, { prefix: "/leases" });
},
{ prefix: "/dynamic-secrets" }
);
await server.register(registerSamlRouter, { prefix: "/sso" });
await server.register(registerScimRouter, { prefix: "/scim" });
await server.register(registerLdapRouter, { prefix: "/ldap" });
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
await server.register(registerSecretRotationRouter, { prefix: "/secret-rotations" });
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
await server.register(registerGroupRouter, { prefix: "/groups" });
await server.register(
async (privilegeRouter) => {
await privilegeRouter.register(registerUserAdditionalPrivilegeRouter, { prefix: "/users" });
await privilegeRouter.register(registerIdentityProjectAdditionalPrivilegeRouter, { prefix: "/identity" });
},
{ prefix: "/additional-privilege" }
);
};

@ -1,207 +0,0 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable @typescript-eslint/no-unsafe-argument */
// All the any rules are disabled because passport typesense with fastify is really poor
import { IncomingMessage } from "node:http";
import { Authenticator } from "@fastify/passport";
import fastifySession from "@fastify/session";
import { FastifyRequest } from "fastify";
import LdapStrategy from "passport-ldapauth";
import { z } from "zod";
import { LdapConfigsSchema } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerLdapRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const passport = new Authenticator({ key: "ldap", userProperty: "passportUser" });
await server.register(fastifySession, { secret: appCfg.COOKIE_SECRET_SIGN_KEY });
await server.register(passport.initialize());
await server.register(passport.secureSession());
const getLdapPassportOpts = (req: FastifyRequest, done: any) => {
const { organizationSlug } = req.body as {
organizationSlug: string;
};
process.nextTick(async () => {
try {
const { opts, ldapConfig } = await server.services.ldap.bootLdap(organizationSlug);
req.ldapConfig = ldapConfig;
done(null, opts);
} catch (err) {
done(err);
}
});
};
passport.use(
new LdapStrategy(
getLdapPassportOpts as any,
// eslint-disable-next-line
async (req: IncomingMessage, user, cb) => {
try {
const { isUserCompleted, providerAuthToken } = await server.services.ldap.ldapLogin({
externalId: user.uidNumber,
username: user.uid,
firstName: user.givenName,
lastName: user.sn,
emails: user.mail ? [user.mail] : [],
relayState: ((req as unknown as FastifyRequest).body as { RelayState?: string }).RelayState,
orgId: (req as unknown as FastifyRequest).ldapConfig.organization
});
return cb(null, { isUserCompleted, providerAuthToken });
} catch (err) {
logger.error(err);
return cb(err, false);
}
}
)
);
server.route({
url: "/login",
method: "POST",
schema: {
body: z.object({
organizationSlug: z.string().trim()
})
},
preValidation: passport.authenticate("ldapauth", {
session: false
// failureFlash: true,
// failureRedirect: "/login/provider/error"
// this is due to zod type difference
}) as any,
handler: (req, res) => {
let nextUrl;
if (req.passportUser.isUserCompleted) {
nextUrl = `${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`;
} else {
nextUrl = `${appCfg.SITE_URL}/signup/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`;
}
return res.status(200).send({
nextUrl
});
}
});
server.route({
method: "GET",
url: "/config",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
querystring: z.object({
organizationId: z.string().trim()
}),
response: {
200: z.object({
id: z.string(),
organization: z.string(),
isActive: z.boolean(),
url: z.string(),
bindDN: z.string(),
bindPass: z.string(),
searchBase: z.string(),
caCert: z.string()
})
}
},
handler: async (req) => {
const ldap = await server.services.ldap.getLdapCfgWithPermissionCheck({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.query.organizationId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return ldap;
}
});
server.route({
method: "POST",
url: "/config",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z.object({
organizationId: z.string().trim(),
isActive: z.boolean(),
url: z.string().trim(),
bindDN: z.string().trim(),
bindPass: z.string().trim(),
searchBase: z.string().trim(),
caCert: z.string().trim().default("")
}),
response: {
200: LdapConfigsSchema
}
},
handler: async (req) => {
const ldap = await server.services.ldap.createLdapCfg({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.body.organizationId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
return ldap;
}
});
server.route({
url: "/config",
method: "PATCH",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z
.object({
isActive: z.boolean(),
url: z.string().trim(),
bindDN: z.string().trim(),
bindPass: z.string().trim(),
searchBase: z.string().trim(),
caCert: z.string().trim()
})
.partial()
.merge(z.object({ organizationId: z.string() })),
response: {
200: LdapConfigsSchema
}
},
handler: async (req) => {
const ldap = await server.services.ldap.updateLdapCfg({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.body.organizationId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
return ldap;
}
});
};

@ -3,17 +3,13 @@
// TODO(akhilmhdh): Fix this when licence service gets it type
import { z } from "zod";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerLicenseRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:organizationId/plans/table",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
querystring: z.object({ billingCycle: z.enum(["monthly", "yearly"]) }),
params: z.object({ organizationId: z.string().trim() }),
@ -28,7 +24,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actor: req.permission.type,
actorOrgId: req.permission.orgId,
orgId: req.params.organizationId,
actorAuthMethod: req.permission.authMethod,
billingCycle: req.query.billingCycle
});
return data;
@ -36,11 +31,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "GET",
url: "/:organizationId/plan",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({ organizationId: z.string().trim() }),
response: {
@ -53,7 +45,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId
});
return { plan };
@ -61,11 +52,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "GET",
url: "/:organizationId/plans",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({ organizationId: z.string().trim() }),
querystring: z.object({ workspaceId: z.string().trim().optional() }),
@ -78,8 +66,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
const data = await server.services.license.getOrgPlan({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId
});
return data;
@ -87,11 +73,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "POST",
url: "/:organizationId/session/trial",
config: {
rateLimit: writeLimit
},
method: "POST",
schema: {
params: z.object({ organizationId: z.string().trim() }),
body: z.object({ success_url: z.string().trim() }),
@ -106,7 +89,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actor: req.permission.type,
actorOrgId: req.permission.orgId,
orgId: req.params.organizationId,
actorAuthMethod: req.permission.authMethod,
success_url: req.body.success_url
});
return data;
@ -116,9 +98,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/:organizationId/customer-portal-session",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({ organizationId: z.string().trim() }),
response: {
@ -131,7 +110,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId
});
return data;
@ -139,11 +117,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "GET",
url: "/:organizationId/plan/billing",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({ organizationId: z.string().trim() }),
response: {
@ -156,7 +131,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId
});
return data;
@ -164,11 +138,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "GET",
url: "/:organizationId/plan/table",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({ organizationId: z.string().trim() }),
response: {
@ -181,7 +152,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId
});
return data;
@ -189,11 +159,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "GET",
url: "/:organizationId/billing-details",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({ organizationId: z.string().trim() }),
response: {
@ -206,7 +173,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId
});
return data;
@ -214,11 +180,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "PATCH",
url: "/:organizationId/billing-details",
config: {
rateLimit: writeLimit
},
method: "PATCH",
schema: {
params: z.object({ organizationId: z.string().trim() }),
body: z.object({
@ -235,7 +198,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId,
name: req.body.name,
email: req.body.email
@ -245,11 +207,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "GET",
url: "/:organizationId/billing-details/payment-methods",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({ organizationId: z.string().trim() }),
response: {
@ -262,7 +221,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId
});
return data;
@ -270,11 +228,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "POST",
url: "/:organizationId/billing-details/payment-methods",
config: {
rateLimit: writeLimit
},
method: "POST",
schema: {
params: z.object({ organizationId: z.string().trim() }),
body: z.object({
@ -291,7 +246,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId,
success_url: req.body.success_url,
cancel_url: req.body.cancel_url
@ -301,11 +255,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "DELETE",
url: "/:organizationId/billing-details/payment-methods/:pmtMethodId",
config: {
rateLimit: writeLimit
},
method: "DELETE",
schema: {
params: z.object({
organizationId: z.string().trim(),
@ -320,7 +271,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
const data = await server.services.license.delOrgPmtMethods({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.params.organizationId,
pmtMethodId: req.params.pmtMethodId
@ -330,11 +280,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "GET",
url: "/:organizationId/billing-details/tax-ids",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({
organizationId: z.string().trim()
@ -348,7 +295,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
const data = await server.services.license.getOrgTaxIds({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.params.organizationId
});
@ -357,11 +303,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "POST",
url: "/:organizationId/billing-details/tax-ids",
config: {
rateLimit: writeLimit
},
method: "POST",
schema: {
params: z.object({
organizationId: z.string().trim()
@ -379,7 +322,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
const data = await server.services.license.addOrgTaxId({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.params.organizationId,
type: req.body.type,
@ -390,11 +332,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "DELETE",
url: "/:organizationId/billing-details/tax-ids/:taxId",
config: {
rateLimit: writeLimit
},
method: "DELETE",
schema: {
params: z.object({
organizationId: z.string().trim(),
@ -409,7 +348,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
const data = await server.services.license.delOrgTaxId({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.params.organizationId,
taxId: req.params.taxId
@ -419,11 +357,8 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "GET",
url: "/:organizationId/invoices",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({
organizationId: z.string().trim()
@ -438,19 +373,15 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
orgId: req.params.organizationId,
actorAuthMethod: req.permission.authMethod
orgId: req.params.organizationId
});
return data;
}
});
server.route({
method: "GET",
url: "/:organizationId/licenses",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({
organizationId: z.string().trim()
@ -465,7 +396,6 @@ export const registerLicenseRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId
});
return data;

@ -1,8 +1,6 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { OrgMembershipRole, OrgMembershipsSchema, OrgRolesSchema } from "@app/db/schemas";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { OrgMembershipsSchema, OrgRolesSchema } from "@app/db/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -10,25 +8,12 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/:organizationId/roles",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
organizationId: z.string().trim()
}),
body: z.object({
slug: z
.string()
.min(1)
.trim()
.refine(
(val) => !Object.keys(OrgMembershipRole).includes(val),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
}),
slug: z.string().trim(),
name: z.string().trim(),
description: z.string().trim().optional(),
permissions: z.any().array()
@ -45,7 +30,6 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
req.permission.id,
req.params.organizationId,
req.body,
req.permission.authMethod,
req.permission.orgId
);
return { role };
@ -55,26 +39,13 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
server.route({
method: "PATCH",
url: "/:organizationId/roles/:roleId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
organizationId: z.string().trim(),
roleId: z.string().trim()
}),
body: z.object({
slug: z
.string()
.trim()
.optional()
.refine(
(val) => typeof val === "undefined" || Object.keys(OrgMembershipRole).includes(val),
"Please choose a different slug, the slug you have entered is reserved."
)
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
message: "Slug must be a valid"
}),
slug: z.string().trim().optional(),
name: z.string().trim().optional(),
description: z.string().trim().optional(),
permissions: z.any().array()
@ -92,7 +63,6 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
req.params.organizationId,
req.params.roleId,
req.body,
req.permission.authMethod,
req.permission.orgId
);
return { role };
@ -102,9 +72,6 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
server.route({
method: "DELETE",
url: "/:organizationId/roles/:roleId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
organizationId: z.string().trim(),
@ -122,7 +89,6 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
req.permission.id,
req.params.organizationId,
req.params.roleId,
req.permission.authMethod,
req.permission.orgId
);
return { role };
@ -132,9 +98,6 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:organizationId/roles",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
organizationId: z.string().trim()
@ -154,7 +117,6 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
const roles = await server.services.orgRole.listRoles(
req.permission.id,
req.params.organizationId,
req.permission.authMethod,
req.permission.orgId
);
return { data: { roles } };
@ -164,9 +126,6 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:organizationId/permissions",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
organizationId: z.string().trim()
@ -183,7 +142,6 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
const { permissions, membership } = await server.services.orgRole.getUserPermission(
req.permission.id,
req.params.organizationId,
req.permission.authMethod,
req.permission.orgId
);
return { permissions, membership };

@ -1,7 +1,6 @@
import { z } from "zod";
import { ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -9,9 +8,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/:projectId/roles",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
projectId: z.string().trim()
@ -35,7 +31,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
req.permission.id,
req.params.projectId,
req.body,
req.permission.authMethod,
req.permission.orgId
);
return { role };
@ -45,9 +40,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
server.route({
method: "PATCH",
url: "/:projectId/roles/:roleId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
projectId: z.string().trim(),
@ -73,7 +65,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
req.params.projectId,
req.params.roleId,
req.body,
req.permission.authMethod,
req.permission.orgId
);
return { role };
@ -83,9 +74,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
server.route({
method: "DELETE",
url: "/:projectId/roles/:roleId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
projectId: z.string().trim(),
@ -104,7 +92,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
req.permission.id,
req.params.projectId,
req.params.roleId,
req.permission.authMethod,
req.permission.orgId
);
return { role };
@ -114,9 +101,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:projectId/roles",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
projectId: z.string().trim()
@ -137,7 +121,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
req.permission.type,
req.permission.id,
req.params.projectId,
req.permission.authMethod,
req.permission.orgId
);
return { data: { roles } };
@ -147,9 +130,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:projectId/permissions",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
projectId: z.string().trim()
@ -157,13 +137,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
response: {
200: z.object({
data: z.object({
membership: ProjectMembershipsSchema.extend({
roles: z
.object({
role: z.string()
})
.array()
}),
membership: ProjectMembershipsSchema,
permissions: z.any().array()
})
})
@ -174,10 +148,8 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
const { permissions, membership } = await server.services.projectRole.getUserPermission(
req.permission.id,
req.params.projectId,
req.permission.authMethod,
req.permission.orgId
);
return { data: { permissions, membership } };
}
});

@ -2,9 +2,7 @@ import { z } from "zod";
import { AuditLogsSchema, SecretSnapshotsSchema } from "@app/db/schemas";
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
import { AUDIT_LOGS, PROJECTS } from "@app/lib/api-docs";
import { getLastMidnightDateISO, removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
import { removeTrailingSlash } from "@app/lib/fn";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -12,24 +10,22 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:workspaceId/secret-snapshots",
config: {
rateLimit: readLimit
},
schema: {
description: "Return project secret snapshots ids",
security: [
{
apiKeyAuth: [],
bearerAuth: []
}
],
params: z.object({
workspaceId: z.string().trim().describe(PROJECTS.GET_SNAPSHOTS.workspaceId)
workspaceId: z.string().trim()
}),
querystring: z.object({
environment: z.string().trim().describe(PROJECTS.GET_SNAPSHOTS.environment),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(PROJECTS.GET_SNAPSHOTS.path),
offset: z.coerce.number().default(0).describe(PROJECTS.GET_SNAPSHOTS.offset),
limit: z.coerce.number().default(20).describe(PROJECTS.GET_SNAPSHOTS.limit)
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
offset: z.coerce.number().default(0),
limit: z.coerce.number().default(20)
}),
response: {
200: z.object({
@ -41,7 +37,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
handler: async (req) => {
const secretSnapshots = await server.services.snapshot.listSnapshots({
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
projectId: req.params.workspaceId,
@ -54,9 +49,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:workspaceId/secret-snapshots/count",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
workspaceId: z.string().trim()
@ -76,7 +68,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
const count = await server.services.snapshot.projectSecretSnapshotCount({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId: req.params.workspaceId,
environment: req.query.environment,
@ -89,27 +80,25 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:workspaceId/audit-logs",
config: {
rateLimit: readLimit
},
schema: {
description: "Return audit logs",
security: [
{
bearerAuth: []
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
workspaceId: z.string().trim().describe(AUDIT_LOGS.EXPORT.workspaceId)
workspaceId: z.string().trim()
}),
querystring: z.object({
eventType: z.nativeEnum(EventType).optional().describe(AUDIT_LOGS.EXPORT.eventType),
userAgentType: z.nativeEnum(UserAgentType).optional().describe(AUDIT_LOGS.EXPORT.userAgentType),
startDate: z.string().datetime().optional().describe(AUDIT_LOGS.EXPORT.startDate),
endDate: z.string().datetime().optional().describe(AUDIT_LOGS.EXPORT.endDate),
offset: z.coerce.number().default(0).describe(AUDIT_LOGS.EXPORT.offset),
limit: z.coerce.number().default(20).describe(AUDIT_LOGS.EXPORT.limit),
actor: z.string().optional().describe(AUDIT_LOGS.EXPORT.actor)
eventType: z.nativeEnum(EventType).optional(),
userAgentType: z.nativeEnum(UserAgentType).optional(),
startDate: z.string().datetime().optional(),
endDate: z.string().datetime().optional(),
offset: z.coerce.number().default(0),
limit: z.coerce.number().default(20),
actor: z.string().optional()
}),
response: {
200: z.object({
@ -140,10 +129,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
const auditLogs = await server.services.auditLog.listProjectAuditLogs({
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
projectId: req.params.workspaceId,
...req.query,
startDate: req.query.endDate || getLastMidnightDateISO(),
auditLogActor: req.query.actor,
actor: req.permission.type
});
@ -154,9 +141,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:workspaceId/audit-logs/filters/actors",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
workspaceId: z.string().trim()

@ -17,7 +17,6 @@ import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -28,7 +27,6 @@ type TSAMLConfig = {
cert: string;
audience: string;
wantAuthnResponseSigned?: boolean;
wantAssertionsSigned?: boolean;
disableRequestedAuthnContext?: boolean;
};
@ -84,10 +82,6 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
samlConfig.audience = `spn:${ssoConfig.issuer}`;
}
}
if (ssoConfig.authProvider === SamlProviders.GOOGLE_SAML) {
samlConfig.wantAssertionsSigned = false;
}
(req as unknown as FastifyRequest).ssoConfig = ssoConfig;
done(null, samlConfig);
} catch (error) {
@ -100,14 +94,14 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
async (req, profile, cb) => {
try {
if (!profile) throw new BadRequestError({ message: "Missing profile" });
const { firstName } = profile;
const email = profile?.email ?? (profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved
if (!profile.email || !profile.firstName) {
if (!email || !firstName) {
throw new BadRequestError({ message: "Invalid request. Missing email or first name" });
}
const { isUserCompleted, providerAuthToken } = await server.services.saml.samlLogin({
username: profile.nameID ?? email,
email,
firstName: profile.firstName as string,
lastName: profile.lastName as string,
@ -204,11 +198,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "GET",
url: "/config",
config: {
rateLimit: readLimit
},
method: "GET",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
querystring: z.object({
@ -235,7 +226,6 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
orgId: req.query.organizationId,
type: "org"
});
@ -244,11 +234,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "POST",
url: "/config",
config: {
rateLimit: writeLimit
},
method: "POST",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z.object({
@ -267,7 +254,6 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
const saml = await server.services.saml.createSamlCfg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.body.organizationId,
...req.body
@ -277,11 +263,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "PATCH",
url: "/config",
config: {
rateLimit: writeLimit
},
method: "PATCH",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z
@ -302,7 +285,6 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
const saml = await server.services.saml.updateSamlCfg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.body.organizationId,
...req.body

@ -1,7 +1,6 @@
import { z } from "zod";
import { ScimTokensSchema } from "@app/db/schemas";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -21,9 +20,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/scim-tokens",
method: "POST",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z.object({
@ -43,7 +39,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
orgId: req.body.organizationId,
actorAuthMethod: req.permission.authMethod,
description: req.body.description,
ttlDays: req.body.ttlDays
});
@ -55,9 +50,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/scim-tokens",
method: "GET",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
querystring: z.object({
@ -73,7 +65,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
const scimTokens = await server.services.scim.listScimTokens({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.query.organizationId
});
@ -85,9 +76,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/scim-tokens/:scimTokenId",
method: "DELETE",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
@ -104,7 +92,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
scimTokenId: req.params.scimTokenId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
@ -135,7 +122,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
emails: z.array(
z.object({
primary: z.boolean(),
value: z.string(),
value: z.string().email(),
type: z.string().trim()
})
),
@ -156,7 +143,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
offset: req.query.startIndex,
limit: req.query.count,
filter: req.query.filter,
orgId: req.permission.orgId
orgId: req.permission.orgId as string
});
return users;
}
@ -181,7 +168,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
emails: z.array(
z.object({
primary: z.boolean(),
value: z.string(),
value: z.string().email(),
type: z.string().trim()
})
),
@ -194,7 +181,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
handler: async (req) => {
const user = await req.server.services.scim.getScimUser({
userId: req.params.userId,
orgId: req.permission.orgId
orgId: req.permission.orgId as string
});
return user;
}
@ -206,20 +193,18 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
schema: {
body: z.object({
schemas: z.array(z.string()),
userName: z.string().trim(),
userName: z.string().trim().email(),
name: z.object({
familyName: z.string().trim(),
givenName: z.string().trim()
}),
emails: z
.array(
z.object({
primary: z.boolean(),
value: z.string().email(),
type: z.string().trim()
})
)
.optional(),
// emails: z.array( // optional?
// z.object({
// primary: z.boolean(),
// value: z.string().email(),
// type: z.string().trim()
// })
// ),
// displayName: z.string().trim(),
active: z.boolean()
}),
@ -227,7 +212,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
200: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
userName: z.string().trim(),
userName: z.string().trim().email(),
name: z.object({
familyName: z.string().trim(),
givenName: z.string().trim()
@ -246,14 +231,11 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const primaryEmail = req.body.emails?.find((email) => email.primary)?.value;
const user = await req.server.services.scim.createScimUser({
username: req.body.userName,
email: primaryEmail,
email: req.body.userName,
firstName: req.body.name.givenName,
lastName: req.body.name.familyName,
orgId: req.permission.orgId
orgId: req.permission.orgId as string
});
return user;
@ -262,261 +244,25 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/Users/:userId",
method: "DELETE",
method: "PATCH",
schema: {
params: z.object({
userId: z.string().trim()
}),
response: {
200: z.object({})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const user = await req.server.services.scim.deleteScimUser({
userId: req.params.userId,
orgId: req.permission.orgId
});
return user;
}
});
server.route({
url: "/Groups",
method: "POST",
schema: {
body: z.object({
schemas: z.array(z.string()),
displayName: z.string().trim(),
members: z
.array(
z.object({
value: z.string(),
display: z.string()
})
)
.optional() // okta-specific
}),
response: {
200: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
displayName: z.string().trim(),
members: z
.array(
z.object({
value: z.string(),
display: z.string()
})
)
.optional(),
meta: z.object({
resourceType: z.string().trim()
})
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const group = await req.server.services.scim.createScimGroup({
orgId: req.permission.orgId,
...req.body
});
return group;
}
});
server.route({
url: "/Groups",
method: "GET",
schema: {
querystring: z.object({
startIndex: z.coerce.number().default(1),
count: z.coerce.number().default(20),
filter: z.string().trim().optional()
}),
response: {
200: z.object({
Resources: z.array(
z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
displayName: z.string().trim(),
members: z.array(z.any()).length(0),
meta: z.object({
resourceType: z.string().trim()
})
})
),
itemsPerPage: z.number(),
schemas: z.array(z.string()),
startIndex: z.number(),
totalResults: z.number()
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const groups = await req.server.services.scim.listScimGroups({
orgId: req.permission.orgId,
offset: req.query.startIndex,
limit: req.query.count
});
return groups;
}
});
server.route({
url: "/Groups/:groupId",
method: "GET",
schema: {
params: z.object({
groupId: z.string().trim()
}),
response: {
200: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
displayName: z.string().trim(),
members: z.array(
z.object({
value: z.string(),
display: z.string()
})
),
meta: z.object({
resourceType: z.string().trim()
})
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const group = await req.server.services.scim.getScimGroup({
groupId: req.params.groupId,
orgId: req.permission.orgId
});
return group;
}
});
server.route({
url: "/Groups/:groupId",
method: "PUT",
schema: {
params: z.object({
groupId: z.string().trim()
}),
body: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
displayName: z.string().trim(),
members: z.array(
z.object({
value: z.string(), // infisical userId
display: z.string()
})
) // note: is this where members are added to group?
}),
response: {
200: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
displayName: z.string().trim(),
members: z.array(
z.object({
value: z.string(),
display: z.string()
})
),
meta: z.object({
resourceType: z.string().trim()
})
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const group = await req.server.services.scim.updateScimGroupNamePut({
groupId: req.params.groupId,
orgId: req.permission.orgId,
...req.body
});
return group;
}
});
server.route({
url: "/Groups/:groupId",
method: "PATCH",
schema: {
params: z.object({
groupId: z.string().trim()
}),
body: z.object({
schemas: z.array(z.string()),
Operations: z.array(
z.union([
z.object({
op: z.literal("replace"),
value: z.object({
id: z.string().trim(),
displayName: z.string().trim()
})
}),
z.object({
op: z.literal("remove"),
path: z.string().trim()
}),
z.object({
op: z.literal("add"),
value: z.object({
value: z.string().trim(),
display: z.string().trim().optional()
})
})
])
)
}),
response: {
200: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
displayName: z.string().trim(),
members: z.array(
z.object({
value: z.string(),
display: z.string()
})
),
meta: z.object({
resourceType: z.string().trim()
z.object({
op: z.string().trim(),
path: z.string().trim().optional(),
value: z.union([
z.object({
active: z.boolean()
}),
z.string().trim()
])
})
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const group = await req.server.services.scim.updateScimGroupNamePatch({
groupId: req.params.groupId,
orgId: req.permission.orgId,
operations: req.body.Operations
});
return group;
}
});
server.route({
url: "/Groups/:groupId",
method: "DELETE",
schema: {
params: z.object({
groupId: z.string().trim()
)
}),
response: {
200: z.object({})
@ -524,12 +270,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const group = await req.server.services.scim.deleteScimGroup({
groupId: req.params.groupId,
orgId: req.permission.orgId
const user = await req.server.services.scim.updateScimUser({
userId: req.params.userId,
orgId: req.permission.orgId as string,
operations: req.body.Operations
});
return group;
return user;
}
});
@ -576,7 +322,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
handler: async (req) => {
const user = await req.server.services.scim.replaceScimUser({
userId: req.params.userId,
orgId: req.permission.orgId,
orgId: req.permission.orgId as string,
active: req.body.active
});
return user;

@ -1,7 +1,6 @@
import { nanoid } from "nanoid";
import { z } from "zod";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
@ -10,9 +9,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
server.route({
url: "/",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z
.object({
@ -38,7 +34,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
const approval = await server.services.secretApprovalPolicy.createSecretApprovalPolicy({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId: req.body.workspaceId,
...req.body,
@ -51,9 +46,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
server.route({
url: "/:sapId",
method: "PATCH",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
sapId: z.string()
@ -80,7 +72,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
const approval = await server.services.secretApprovalPolicy.updateSecretApprovalPolicy({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
secretPolicyId: req.params.sapId
@ -92,9 +83,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
server.route({
url: "/:sapId",
method: "DELETE",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
sapId: z.string()
@ -110,7 +98,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
const approval = await server.services.secretApprovalPolicy.deleteSecretApprovalPolicy({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
secretPolicyId: req.params.sapId
});
@ -121,9 +108,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
server.route({
url: "/",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
workspaceId: z.string().trim()
@ -139,7 +123,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
const approvals = await server.services.secretApprovalPolicy.getSecretApprovalPolicyByProjectId({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId: req.query.workspaceId
});
@ -150,9 +133,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
server.route({
url: "/board",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
workspaceId: z.string().trim(),
@ -170,7 +150,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
const policy = await server.services.secretApprovalPolicy.getSecretApprovalPolicyOfFolder({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId: req.query.workspaceId,
...req.query

@ -10,17 +10,13 @@ import {
} from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSecretApprovalRequestRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
querystring: z.object({
workspaceId: z.string().trim(),
@ -56,7 +52,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
const approvals = await server.services.secretApprovalRequest.getSecretApprovals({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query,
projectId: req.query.workspaceId
@ -66,11 +61,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
});
server.route({
method: "GET",
url: "/count",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
querystring: z.object({
workspaceId: z.string().trim()
@ -89,7 +81,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
const approvals = await server.services.secretApprovalRequest.requestCount({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId: req.query.workspaceId
});
@ -100,9 +91,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
server.route({
url: "/:id/merge",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
@ -118,7 +106,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
const { approval } = await server.services.secretApprovalRequest.mergeSecretApprovalRequest({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
approvalId: req.params.id
});
@ -127,11 +114,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
});
server.route({
method: "POST",
url: "/:id/review",
config: {
rateLimit: writeLimit
},
method: "POST",
schema: {
params: z.object({
id: z.string()
@ -150,7 +134,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
const review = await server.services.secretApprovalRequest.reviewApproval({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
approvalId: req.params.id,
status: req.body.status
@ -160,11 +143,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
});
server.route({
method: "POST",
url: "/:id/status",
config: {
rateLimit: writeLimit
},
method: "POST",
schema: {
params: z.object({
id: z.string()
@ -183,7 +163,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
const approval = await server.services.secretApprovalRequest.updateApprovalStatus({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
approvalId: req.params.id,
status: req.body.status
@ -219,11 +198,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
.array()
.optional();
server.route({
method: "GET",
url: "/:id",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({
id: z.string()
@ -295,7 +271,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
const approval = await server.services.secretApprovalRequest.getSecretApprovalDetails({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.id
});

@ -1,16 +1,12 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSecretRotationProviderRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:workspaceId",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({
workspaceId: z.string().trim()
@ -34,7 +30,6 @@ export const registerSecretRotationProviderRouter = async (server: FastifyZodPro
const providers = await server.services.secretRotation.getProviderTemplates({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId: req.params.workspaceId
});

@ -2,17 +2,13 @@ import { z } from "zod";
import { SecretRotationOutputsSchema, SecretRotationsSchema, SecretsSchema } from "@app/db/schemas";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSecretRotationRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
method: "POST",
schema: {
body: z.object({
workspaceId: z.string().trim(),
@ -43,7 +39,6 @@ export const registerSecretRotationRouter = async (server: FastifyZodProvider) =
handler: async (req) => {
const secretRotation = await server.services.secretRotation.createRotation({
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
...req.body,
@ -56,9 +51,6 @@ export const registerSecretRotationRouter = async (server: FastifyZodProvider) =
server.route({
url: "/restart",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
id: z.string().trim()
@ -82,7 +74,6 @@ export const registerSecretRotationRouter = async (server: FastifyZodProvider) =
const secretRotation = await server.services.secretRotation.restartById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
rotationId: req.body.id
});
@ -93,9 +84,6 @@ export const registerSecretRotationRouter = async (server: FastifyZodProvider) =
server.route({
url: "/",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
workspaceId: z.string().trim()
@ -137,7 +125,6 @@ export const registerSecretRotationRouter = async (server: FastifyZodProvider) =
const secretRotations = await server.services.secretRotation.getByProjectId({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId: req.query.workspaceId
});
@ -146,11 +133,8 @@ export const registerSecretRotationRouter = async (server: FastifyZodProvider) =
});
server.route({
method: "DELETE",
url: "/:id",
config: {
rateLimit: writeLimit
},
method: "DELETE",
schema: {
params: z.object({
id: z.string().trim()
@ -174,7 +158,6 @@ export const registerSecretRotationRouter = async (server: FastifyZodProvider) =
const secretRotation = await server.services.secretRotation.deleteById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
rotationId: req.params.id
});

@ -2,17 +2,13 @@ import { z } from "zod";
import { GitAppOrgSchema, SecretScanningGitRisksSchema } from "@app/db/schemas";
import { SecretScanningRiskStatus } from "@app/ee/services/secret-scanning/secret-scanning-types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSecretScanningRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/create-installation-session/organization",
config: {
rateLimit: writeLimit
},
method: "POST",
schema: {
body: z.object({ organizationId: z.string().trim() }),
response: {
@ -26,7 +22,6 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
const session = await server.services.secretScanning.createInstallationSession({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.body.organizationId
});
@ -35,11 +30,8 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
});
server.route({
method: "POST",
url: "/link-installation",
config: {
rateLimit: writeLimit
},
method: "POST",
schema: {
body: z.object({
installationId: z.string(),
@ -54,7 +46,6 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
const { installatedApp } = await server.services.secretScanning.linkInstallationToOrg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
@ -63,11 +54,8 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
});
server.route({
method: "GET",
url: "/installation-status/organization/:organizationId",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({ organizationId: z.string().trim() }),
response: {
@ -79,7 +67,6 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
const appInstallationCompleted = await server.services.secretScanning.getOrgInstallationStatus({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.params.organizationId
});
@ -90,9 +77,6 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
server.route({
url: "/organization/:organizationId/risks",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({ organizationId: z.string().trim() }),
response: {
@ -104,7 +88,6 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
const { risks } = await server.services.secretScanning.getRisksByOrg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.params.organizationId
});
@ -113,11 +96,8 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
});
server.route({
method: "POST",
url: "/organization/:organizationId/risks/:riskId/status",
config: {
rateLimit: writeLimit
},
method: "POST",
schema: {
params: z.object({ organizationId: z.string().trim(), riskId: z.string().trim() }),
body: z.object({ status: z.nativeEnum(SecretScanningRiskStatus) }),
@ -130,7 +110,6 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
const { risk } = await server.services.secretScanning.updateRiskStatus({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.params.organizationId,
riskId: req.params.riskId,

@ -1,17 +1,13 @@
import { z } from "zod";
import { SecretVersionsSchema } from "@app/db/schemas";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSecretVersionRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:secretId/secret-versions",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({
secretId: z.string()
@ -31,7 +27,6 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
const secretVersions = await server.services.secret.getSecretVersions({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
limit: req.query.limit,
offset: req.query.offset,

@ -1,8 +1,6 @@
import { z } from "zod";
import { SecretSnapshotsSchema, SecretTagsSchema, SecretVersionsSchema } from "@app/db/schemas";
import { PROJECTS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -10,9 +8,6 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:secretSnapshotId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
secretSnapshotId: z.string().trim()
@ -51,7 +46,6 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
const secretSnapshot = await server.services.snapshot.getSnapshotData({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.secretSnapshotId
});
@ -62,18 +56,16 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/:secretSnapshotId/rollback",
config: {
rateLimit: writeLimit
},
schema: {
description: "Roll back project secrets to those captured in a secret snapshot version.",
security: [
{
apiKeyAuth: [],
bearerAuth: []
}
],
params: z.object({
secretSnapshotId: z.string().trim().describe(PROJECTS.ROLLBACK_TO_SNAPSHOT.secretSnapshotId)
secretSnapshotId: z.string().trim()
}),
response: {
200: z.object({
@ -86,7 +78,6 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
const secretSnapshot = await server.services.snapshot.rollbackSnapshot({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.secretSnapshotId
});

@ -2,17 +2,13 @@ import { z } from "zod";
import { TrustedIpsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerTrustedIpRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:workspaceId/trusted-ips",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({
workspaceId: z.string().trim()
@ -26,7 +22,6 @@ export const registerTrustedIpRouter = async (server: FastifyZodProvider) => {
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const trustedIps = await server.services.trustedIp.listIpsByProjectId({
actorAuthMethod: req.permission.authMethod,
projectId: req.params.workspaceId,
actor: req.permission.type,
actorId: req.permission.id,
@ -37,11 +32,8 @@ export const registerTrustedIpRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "POST",
url: "/:workspaceId/trusted-ips",
config: {
rateLimit: writeLimit
},
method: "POST",
schema: {
params: z.object({
workspaceId: z.string().trim()
@ -60,7 +52,6 @@ export const registerTrustedIpRouter = async (server: FastifyZodProvider) => {
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { trustedIp, project } = await server.services.trustedIp.addProjectIp({
actorAuthMethod: req.permission.authMethod,
projectId: req.params.workspaceId,
actor: req.permission.type,
actorId: req.permission.id,
@ -85,11 +76,8 @@ export const registerTrustedIpRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "PATCH",
url: "/:workspaceId/trusted-ips/:trustedIpId",
config: {
rateLimit: writeLimit
},
method: "PATCH",
schema: {
params: z.object({
workspaceId: z.string().trim(),
@ -111,7 +99,6 @@ export const registerTrustedIpRouter = async (server: FastifyZodProvider) => {
projectId: req.params.workspaceId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
trustedIpId: req.params.trustedIpId,
...req.body
@ -134,11 +121,8 @@ export const registerTrustedIpRouter = async (server: FastifyZodProvider) => {
});
server.route({
method: "DELETE",
url: "/:workspaceId/trusted-ips/:trustedIpId",
config: {
rateLimit: writeLimit
},
method: "DELETE",
schema: {
params: z.object({
workspaceId: z.string().trim(),
@ -156,7 +140,6 @@ export const registerTrustedIpRouter = async (server: FastifyZodProvider) => {
projectId: req.params.workspaceId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
trustedIpId: req.params.trustedIpId
});

@ -1,256 +0,0 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { ProjectUserAdditionalPrivilegeSchema } from "@app/db/schemas";
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/permanent",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: z.any().array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions)
}),
response: {
200: z.object({
privilege: ProjectUserAdditionalPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const privilege = await server.services.projectUserAdditionalPrivilege.create({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
isTemporary: false,
permissions: JSON.stringify(req.body.permissions)
});
return { privilege };
}
});
server.route({
method: "POST",
url: "/temporary",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: z.any().array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
temporaryMode: z
.nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode)
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryMode),
temporaryRange: z
.string()
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryRange),
temporaryAccessStartTime: z
.string()
.datetime()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryAccessStartTime)
}),
response: {
200: z.object({
privilege: ProjectUserAdditionalPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const privilege = await server.services.projectUserAdditionalPrivilege.create({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
slug: req.body.slug ? slugify(req.body.slug) : `privilege-${slugify(alphaNumericNanoId(12))}`,
isTemporary: true,
permissions: JSON.stringify(req.body.permissions)
});
return { privilege };
}
});
server.route({
method: "PATCH",
url: "/:privilegeId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
privilegeId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.privilegeId)
}),
body: z
.object({
slug: z
.string()
.max(60)
.trim()
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
permissions: z.any().array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
isTemporary: z.boolean().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary),
temporaryMode: z
.nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode)
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryMode),
temporaryRange: z
.string()
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryRange),
temporaryAccessStartTime: z
.string()
.datetime()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryAccessStartTime)
})
.partial(),
response: {
200: z.object({
privilege: ProjectUserAdditionalPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const privilege = await server.services.projectUserAdditionalPrivilege.updateById({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
permissions: req.body.permissions ? JSON.stringify(req.body.permissions) : undefined,
privilegeId: req.params.privilegeId
});
return { privilege };
}
});
server.route({
method: "DELETE",
url: "/:privilegeId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
privilegeId: z.string().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.DELETE.privilegeId)
}),
response: {
200: z.object({
privilege: ProjectUserAdditionalPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const privilege = await server.services.projectUserAdditionalPrivilege.deleteById({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
privilegeId: req.params.privilegeId
});
return { privilege };
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
projectMembershipId: z.string().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.LIST.projectMembershipId)
}),
response: {
200: z.object({
privileges: ProjectUserAdditionalPrivilegeSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const privileges = await server.services.projectUserAdditionalPrivilege.listPrivileges({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
projectMembershipId: req.query.projectMembershipId
});
return { privileges };
}
});
server.route({
method: "GET",
url: "/:privilegeId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
privilegeId: z.string().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.GET_BY_PRIVILEGEID.privilegeId)
}),
response: {
200: z.object({
privilege: ProjectUserAdditionalPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const privilege = await server.services.projectUserAdditionalPrivilege.getPrivilegeDetailsById({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
privilegeId: req.params.privilegeId
});
return { privilege };
}
});
};

@ -24,7 +24,7 @@ export const auditLogQueueServiceFactory = ({
const pushToLog = async (data: TCreateAuditLogDTO) => {
await queueService.queue(QueueName.AuditLog, QueueJobs.AuditLog, data, {
removeOnFail: {
count: 3
count: 5
},
removeOnComplete: true
});
@ -46,7 +46,6 @@ export const auditLogQueueServiceFactory = ({
const ttl = plan.auditLogsRetentionDays * MS_IN_DAY;
// skip inserting if audit log retention is 0 meaning its not supported
if (ttl === 0) return;
await auditLogDAL.create({
actor: actor.type,
actorMetadata: actor.metadata,

@ -31,17 +31,10 @@ export const auditLogServiceFactory = ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
auditLogActor
}: TListProjectAuditLogDTO) => {
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
const { permission } = await permissionService.getProjectPermission(actor, actorId, projectId, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
const auditLogs = await auditLogDAL.find({
startDate,

@ -92,8 +92,7 @@ export enum EventType {
interface UserActorMetadata {
userId: string;
email?: string | null;
username: string;
email: string;
}
interface ServiceActorMetadata {

@ -1,80 +0,0 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { DynamicSecretLeasesSchema, TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
export type TDynamicSecretLeaseDALFactory = ReturnType<typeof dynamicSecretLeaseDALFactory>;
export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.DynamicSecretLease);
const countLeasesForDynamicSecret = async (dynamicSecretId: string, tx?: Knex) => {
try {
const doc = await (tx || db)(TableName.DynamicSecretLease).count("*").where({ dynamicSecretId }).first();
return parseInt(doc || "0", 10);
} catch (error) {
throw new DatabaseError({ error, name: "DynamicSecretCountLeases" });
}
};
const findById = async (id: string, tx?: Knex) => {
try {
const doc = await (tx || db)(TableName.DynamicSecretLease)
.where({ [`${TableName.DynamicSecretLease}.id` as "id"]: id })
.first()
.join(
TableName.DynamicSecret,
`${TableName.DynamicSecretLease}.dynamicSecretId`,
`${TableName.DynamicSecret}.id`
)
.select(selectAllTableCols(TableName.DynamicSecretLease))
.select(
db.ref("id").withSchema(TableName.DynamicSecret).as("dynId"),
db.ref("name").withSchema(TableName.DynamicSecret).as("dynName"),
db.ref("version").withSchema(TableName.DynamicSecret).as("dynVersion"),
db.ref("type").withSchema(TableName.DynamicSecret).as("dynType"),
db.ref("defaultTTL").withSchema(TableName.DynamicSecret).as("dynDefaultTTL"),
db.ref("maxTTL").withSchema(TableName.DynamicSecret).as("dynMaxTTL"),
db.ref("inputIV").withSchema(TableName.DynamicSecret).as("dynInputIV"),
db.ref("inputTag").withSchema(TableName.DynamicSecret).as("dynInputTag"),
db.ref("inputCiphertext").withSchema(TableName.DynamicSecret).as("dynInputCiphertext"),
db.ref("algorithm").withSchema(TableName.DynamicSecret).as("dynAlgorithm"),
db.ref("keyEncoding").withSchema(TableName.DynamicSecret).as("dynKeyEncoding"),
db.ref("folderId").withSchema(TableName.DynamicSecret).as("dynFolderId"),
db.ref("status").withSchema(TableName.DynamicSecret).as("dynStatus"),
db.ref("statusDetails").withSchema(TableName.DynamicSecret).as("dynStatusDetails"),
db.ref("createdAt").withSchema(TableName.DynamicSecret).as("dynCreatedAt"),
db.ref("updatedAt").withSchema(TableName.DynamicSecret).as("dynUpdatedAt")
);
if (!doc) return;
return {
...DynamicSecretLeasesSchema.parse(doc),
dynamicSecret: {
id: doc.dynId,
name: doc.dynName,
version: doc.dynVersion,
type: doc.dynType,
defaultTTL: doc.dynDefaultTTL,
maxTTL: doc.dynMaxTTL,
inputIV: doc.dynInputIV,
inputTag: doc.dynInputTag,
inputCiphertext: doc.dynInputCiphertext,
algorithm: doc.dynAlgorithm,
keyEncoding: doc.dynKeyEncoding,
folderId: doc.dynFolderId,
status: doc.dynStatus,
statusDetails: doc.dynStatusDetails,
createdAt: doc.dynCreatedAt,
updatedAt: doc.dynUpdatedAt
}
};
} catch (error) {
throw new DatabaseError({ error, name: "DynamicSecretLeaseFindById" });
}
};
return { ...orm, findById, countLeasesForDynamicSecret };
};

@ -1,159 +0,0 @@
import { SecretKeyEncoding } from "@app/db/schemas";
import { DisableRotationErrors } from "@app/ee/services/secret-rotation/secret-rotation-queue";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
import { DynamicSecretProviders, TDynamicProviderFns } from "../dynamic-secret/providers/models";
import { TDynamicSecretLeaseDALFactory } from "./dynamic-secret-lease-dal";
type TDynamicSecretLeaseQueueServiceFactoryDep = {
queueService: TQueueServiceFactory;
dynamicSecretLeaseDAL: Pick<TDynamicSecretLeaseDALFactory, "findById" | "deleteById" | "find" | "updateById">;
dynamicSecretDAL: Pick<TDynamicSecretDALFactory, "findById" | "deleteById" | "updateById">;
dynamicSecretProviders: Record<DynamicSecretProviders, TDynamicProviderFns>;
};
export type TDynamicSecretLeaseQueueServiceFactory = ReturnType<typeof dynamicSecretLeaseQueueServiceFactory>;
export const dynamicSecretLeaseQueueServiceFactory = ({
queueService,
dynamicSecretDAL,
dynamicSecretProviders,
dynamicSecretLeaseDAL
}: TDynamicSecretLeaseQueueServiceFactoryDep) => {
const pruneDynamicSecret = async (dynamicSecretCfgId: string) => {
await queueService.queue(
QueueName.DynamicSecretRevocation,
QueueJobs.DynamicSecretPruning,
{ dynamicSecretCfgId },
{
jobId: dynamicSecretCfgId,
backoff: {
type: "exponential",
delay: 3000
},
removeOnFail: {
count: 3
},
removeOnComplete: true
}
);
};
const setLeaseRevocation = async (leaseId: string, expiry: number) => {
await queueService.queue(
QueueName.DynamicSecretRevocation,
QueueJobs.DynamicSecretRevocation,
{ leaseId },
{
jobId: leaseId,
backoff: {
type: "exponential",
delay: 3000
},
delay: expiry,
removeOnFail: {
count: 3
},
removeOnComplete: true
}
);
};
const unsetLeaseRevocation = async (leaseId: string) => {
await queueService.stopJobById(QueueName.DynamicSecretRevocation, leaseId);
};
queueService.start(QueueName.DynamicSecretRevocation, async (job) => {
try {
if (job.name === QueueJobs.DynamicSecretRevocation) {
const { leaseId } = job.data as { leaseId: string };
logger.info("Dynamic secret lease revocation started: ", leaseId, job.id);
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease) throw new DisableRotationErrors({ message: "Dynamic secret lease not found" });
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
await dynamicSecretLeaseDAL.deleteById(dynamicSecretLease.id);
return;
}
if (job.name === QueueJobs.DynamicSecretPruning) {
const { dynamicSecretCfgId } = job.data as { dynamicSecretCfgId: string };
logger.info("Dynamic secret pruning started: ", dynamicSecretCfgId, job.id);
const dynamicSecretCfg = await dynamicSecretDAL.findById(dynamicSecretCfgId);
if (!dynamicSecretCfg) throw new DisableRotationErrors({ message: "Dynamic secret not found" });
if ((dynamicSecretCfg.status as DynamicSecretStatus) !== DynamicSecretStatus.Deleting)
throw new DisableRotationErrors({ message: "Document not deleted" });
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfgId });
if (dynamicSecretLeases.length) {
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
await Promise.all(
dynamicSecretLeases.map(({ externalEntityId }) =>
selectedProvider.revoke(decryptedStoredInput, externalEntityId)
)
);
}
await dynamicSecretDAL.deleteById(dynamicSecretCfgId);
}
logger.info("Finished dynamic secret job", job.id);
} catch (error) {
logger.error(error);
if (job?.name === QueueJobs.DynamicSecretPruning) {
const { dynamicSecretCfgId } = job.data as { dynamicSecretCfgId: string };
await dynamicSecretDAL.updateById(dynamicSecretCfgId, {
status: DynamicSecretStatus.FailedDeletion,
statusDetails: (error as Error)?.message?.slice(0, 255)
});
}
if (job?.name === QueueJobs.DynamicSecretRevocation) {
const { leaseId } = job.data as { leaseId: string };
await dynamicSecretLeaseDAL.updateById(leaseId, {
status: DynamicSecretStatus.FailedDeletion,
statusDetails: (error as Error)?.message?.slice(0, 255)
});
}
if (error instanceof DisableRotationErrors) {
if (job.id) {
await queueService.stopRepeatableJobByJobId(QueueName.DynamicSecretRevocation, job.id);
}
}
// propogate to next part
throw error;
}
});
return {
pruneDynamicSecret,
setLeaseRevocation,
unsetLeaseRevocation
};
};

@ -1,343 +0,0 @@
import { ForbiddenError, subject } from "@casl/ability";
import ms from "ms";
import { SecretKeyEncoding } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { getConfig } from "@app/lib/config/env";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
import { DynamicSecretProviders, TDynamicProviderFns } from "../dynamic-secret/providers/models";
import { TDynamicSecretLeaseDALFactory } from "./dynamic-secret-lease-dal";
import { TDynamicSecretLeaseQueueServiceFactory } from "./dynamic-secret-lease-queue";
import {
DynamicSecretLeaseStatus,
TCreateDynamicSecretLeaseDTO,
TDeleteDynamicSecretLeaseDTO,
TDetailsDynamicSecretLeaseDTO,
TListDynamicSecretLeasesDTO,
TRenewDynamicSecretLeaseDTO
} from "./dynamic-secret-lease-types";
type TDynamicSecretLeaseServiceFactoryDep = {
dynamicSecretLeaseDAL: TDynamicSecretLeaseDALFactory;
dynamicSecretDAL: Pick<TDynamicSecretDALFactory, "findOne">;
dynamicSecretProviders: Record<DynamicSecretProviders, TDynamicProviderFns>;
dynamicSecretQueueService: TDynamicSecretLeaseQueueServiceFactory;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
};
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
export const dynamicSecretLeaseServiceFactory = ({
dynamicSecretLeaseDAL,
dynamicSecretProviders,
dynamicSecretDAL,
folderDAL,
permissionService,
dynamicSecretQueueService,
projectDAL,
licenseService
}: TDynamicSecretLeaseServiceFactoryDep) => {
const create = async ({
environmentSlug,
path,
name,
projectSlug,
actor,
actorId,
actorOrgId,
actorAuthMethod,
ttl
}: TCreateDynamicSecretLeaseDTO) => {
const appCfg = getConfig();
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan?.dynamicSecret) {
throw new BadRequestError({
message: "Failed to create lease due to plan restriction. Upgrade plan to create dynamic secret."
});
}
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
const totalLeasesTaken = await dynamicSecretLeaseDAL.countLeasesForDynamicSecret(dynamicSecretCfg.id);
if (totalLeasesTaken >= appCfg.MAX_LEASE_LIMIT)
throw new BadRequestError({ message: `Max lease limit reached. Limit: ${appCfg.MAX_LEASE_LIMIT}` });
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
const { maxTTL } = dynamicSecretCfg;
const expireAt = new Date(new Date().getTime() + ms(selectedTTL));
if (maxTTL) {
const maxExpiryDate = new Date(new Date().getTime() + ms(maxTTL));
if (expireAt > maxExpiryDate) throw new BadRequestError({ message: "TTL cannot be larger than max TTL" });
}
const { entityId, data } = await selectedProvider.create(decryptedStoredInput, expireAt.getTime());
const dynamicSecretLease = await dynamicSecretLeaseDAL.create({
expireAt,
version: 1,
dynamicSecretId: dynamicSecretCfg.id,
externalEntityId: entityId
});
await dynamicSecretQueueService.setLeaseRevocation(dynamicSecretLease.id, Number(expireAt) - Number(new Date()));
return { lease: dynamicSecretLease, dynamicSecret: dynamicSecretCfg, data };
};
const renewLease = async ({
ttl,
actorAuthMethod,
actorOrgId,
actorId,
actor,
projectSlug,
path,
environmentSlug,
leaseId
}: TRenewDynamicSecretLeaseDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan?.dynamicSecret) {
throw new BadRequestError({
message: "Failed to renew lease due to plan restriction. Upgrade plan to create dynamic secret."
});
}
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease) throw new BadRequestError({ message: "Dynamic secret lease not found" });
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
const { maxTTL } = dynamicSecretCfg;
const expireAt = new Date(dynamicSecretLease.expireAt.getTime() + ms(selectedTTL));
if (maxTTL) {
const maxExpiryDate = new Date(dynamicSecretLease.createdAt.getTime() + ms(maxTTL));
if (expireAt > maxExpiryDate) throw new BadRequestError({ message: "TTL cannot be larger than max ttl" });
}
const { entityId } = await selectedProvider.renew(
decryptedStoredInput,
dynamicSecretLease.externalEntityId,
expireAt.getTime()
);
await dynamicSecretQueueService.unsetLeaseRevocation(dynamicSecretLease.id);
await dynamicSecretQueueService.setLeaseRevocation(dynamicSecretLease.id, Number(expireAt) - Number(new Date()));
const updatedDynamicSecretLease = await dynamicSecretLeaseDAL.updateById(dynamicSecretLease.id, {
expireAt,
externalEntityId: entityId
});
return updatedDynamicSecretLease;
};
const revokeLease = async ({
leaseId,
environmentSlug,
path,
projectSlug,
actor,
actorId,
actorOrgId,
actorAuthMethod,
isForced
}: TDeleteDynamicSecretLeaseDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease) throw new BadRequestError({ message: "Dynamic secret lease not found" });
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
const revokeResponse = await selectedProvider
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId)
.catch(async (err) => {
// only propogate this error if forced is false
if (!isForced) return { error: err as Error };
});
if ((revokeResponse as { error?: Error })?.error) {
const { error } = revokeResponse as { error?: Error };
logger.error(error?.message, "Failed to revoke lease");
const deletedDynamicSecretLease = await dynamicSecretLeaseDAL.updateById(dynamicSecretLease.id, {
status: DynamicSecretLeaseStatus.FailedDeletion,
statusDetails: error?.message?.slice(0, 255)
});
return deletedDynamicSecretLease;
}
await dynamicSecretQueueService.unsetLeaseRevocation(dynamicSecretLease.id);
const deletedDynamicSecretLease = await dynamicSecretLeaseDAL.deleteById(dynamicSecretLease.id);
return deletedDynamicSecretLease;
};
const listLeases = async ({
path,
name,
actor,
actorId,
projectSlug,
actorOrgId,
environmentSlug,
actorAuthMethod
}: TListDynamicSecretLeasesDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfg.id });
return dynamicSecretLeases;
};
const getLeaseDetails = async ({
projectSlug,
actorOrgId,
path,
environmentSlug,
actor,
actorId,
leaseId,
actorAuthMethod
}: TDetailsDynamicSecretLeaseDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease) throw new BadRequestError({ message: "Dynamic secret lease not found" });
return dynamicSecretLease;
};
return {
create,
listLeases,
revokeLease,
renewLease,
getLeaseDetails
};
};

@ -1,43 +0,0 @@
import { TProjectPermission } from "@app/lib/types";
export enum DynamicSecretLeaseStatus {
FailedDeletion = "Failed to delete"
}
export type TCreateDynamicSecretLeaseDTO = {
name: string;
path: string;
environmentSlug: string;
ttl?: string;
projectSlug: string;
} & Omit<TProjectPermission, "projectId">;
export type TDetailsDynamicSecretLeaseDTO = {
leaseId: string;
path: string;
environmentSlug: string;
projectSlug: string;
} & Omit<TProjectPermission, "projectId">;
export type TListDynamicSecretLeasesDTO = {
name: string;
path: string;
environmentSlug: string;
projectSlug: string;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteDynamicSecretLeaseDTO = {
leaseId: string;
path: string;
environmentSlug: string;
projectSlug: string;
isForced?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TRenewDynamicSecretLeaseDTO = {
leaseId: string;
path: string;
environmentSlug: string;
ttl?: string;
projectSlug: string;
} & Omit<TProjectPermission, "projectId">;

@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TDynamicSecretDALFactory = ReturnType<typeof dynamicSecretDALFactory>;
export const dynamicSecretDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.DynamicSecret);
return orm;
};

@ -1,341 +0,0 @@
import { ForbiddenError, subject } from "@casl/ability";
import { SecretKeyEncoding } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { BadRequestError } from "@app/lib/errors";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TDynamicSecretLeaseDALFactory } from "../dynamic-secret-lease/dynamic-secret-lease-dal";
import { TDynamicSecretLeaseQueueServiceFactory } from "../dynamic-secret-lease/dynamic-secret-lease-queue";
import { TDynamicSecretDALFactory } from "./dynamic-secret-dal";
import {
DynamicSecretStatus,
TCreateDynamicSecretDTO,
TDeleteDynamicSecretDTO,
TDetailsDynamicSecretDTO,
TListDynamicSecretsDTO,
TUpdateDynamicSecretDTO
} from "./dynamic-secret-types";
import { DynamicSecretProviders, TDynamicProviderFns } from "./providers/models";
type TDynamicSecretServiceFactoryDep = {
dynamicSecretDAL: TDynamicSecretDALFactory;
dynamicSecretLeaseDAL: Pick<TDynamicSecretLeaseDALFactory, "find">;
dynamicSecretProviders: Record<DynamicSecretProviders, TDynamicProviderFns>;
dynamicSecretQueueService: Pick<
TDynamicSecretLeaseQueueServiceFactory,
"pruneDynamicSecret" | "unsetLeaseRevocation"
>;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
};
export type TDynamicSecretServiceFactory = ReturnType<typeof dynamicSecretServiceFactory>;
export const dynamicSecretServiceFactory = ({
dynamicSecretDAL,
dynamicSecretLeaseDAL,
licenseService,
folderDAL,
dynamicSecretProviders,
permissionService,
dynamicSecretQueueService,
projectDAL
}: TDynamicSecretServiceFactoryDep) => {
const create = async ({
path,
actor,
name,
actorId,
maxTTL,
provider,
environmentSlug,
projectSlug,
actorOrgId,
defaultTTL,
actorAuthMethod
}: TCreateDynamicSecretDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan?.dynamicSecret) {
throw new BadRequestError({
message: "Failed to create dynamic secret due to plan restriction. Upgrade plan to create dynamic secret."
});
}
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const existingDynamicSecret = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (existingDynamicSecret)
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
const selectedProvider = dynamicSecretProviders[provider.type];
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
const isConnected = await selectedProvider.validateConnection(provider.inputs);
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(inputs));
const dynamicSecretCfg = await dynamicSecretDAL.create({
type: provider.type,
version: 1,
inputIV: encryptedInput.iv,
inputTag: encryptedInput.tag,
inputCiphertext: encryptedInput.ciphertext,
algorithm: encryptedInput.algorithm,
keyEncoding: encryptedInput.encoding,
maxTTL,
defaultTTL,
folderId: folder.id,
name
});
return dynamicSecretCfg;
};
const updateByName = async ({
name,
maxTTL,
defaultTTL,
inputs,
environmentSlug,
projectSlug,
path,
actor,
actorId,
newName,
actorOrgId,
actorAuthMethod
}: TUpdateDynamicSecretDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan?.dynamicSecret) {
throw new BadRequestError({
message: "Failed to update dynamic secret due to plan restriction. Upgrade plan to create dynamic secret."
});
}
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
if (newName) {
const existingDynamicSecret = await dynamicSecretDAL.findOne({ name: newName, folderId: folder.id });
if (existingDynamicSecret)
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
}
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
const isConnected = await selectedProvider.validateConnection(newInput);
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(updatedInput));
const updatedDynamicCfg = await dynamicSecretDAL.updateById(dynamicSecretCfg.id, {
inputIV: encryptedInput.iv,
inputTag: encryptedInput.tag,
inputCiphertext: encryptedInput.ciphertext,
algorithm: encryptedInput.algorithm,
keyEncoding: encryptedInput.encoding,
maxTTL,
defaultTTL,
name: newName ?? name,
status: null,
statusDetails: null
});
return updatedDynamicCfg;
};
const deleteByName = async ({
actorAuthMethod,
actorOrgId,
actorId,
actor,
projectSlug,
name,
path,
environmentSlug,
isForced
}: TDeleteDynamicSecretDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
const leases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfg.id });
// when not forced we check with the external system to first remove the things
// we introduce a forced concept because consider the external lease got deleted by some other external like a human or another system
// this allows user to clean up it from infisical
if (isForced) {
// clear all queues for lease revocations
await Promise.all(leases.map(({ id: leaseId }) => dynamicSecretQueueService.unsetLeaseRevocation(leaseId)));
const deletedDynamicSecretCfg = await dynamicSecretDAL.deleteById(dynamicSecretCfg.id);
return deletedDynamicSecretCfg;
}
// if leases exist we should flag it as deleting and then remove leases in background
// then delete the main one
if (leases.length) {
const updatedDynamicSecretCfg = await dynamicSecretDAL.updateById(dynamicSecretCfg.id, {
status: DynamicSecretStatus.Deleting
});
await dynamicSecretQueueService.pruneDynamicSecret(updatedDynamicSecretCfg.id);
return updatedDynamicSecretCfg;
}
// if no leases just delete the config
const deletedDynamicSecretCfg = await dynamicSecretDAL.deleteById(dynamicSecretCfg.id);
return deletedDynamicSecretCfg;
};
const getDetails = async ({
name,
projectSlug,
path,
environmentSlug,
actorAuthMethod,
actorOrgId,
actorId,
actor
}: TDetailsDynamicSecretDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
return { ...dynamicSecretCfg, inputs: providerInputs };
};
const list = async ({
actorAuthMethod,
actorOrgId,
actorId,
actor,
projectSlug,
path,
environmentSlug
}: TListDynamicSecretsDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const dynamicSecretCfg = await dynamicSecretDAL.find({ folderId: folder.id });
return dynamicSecretCfg;
};
return {
create,
updateByName,
deleteByName,
getDetails,
list
};
};

@ -1,54 +0,0 @@
import { z } from "zod";
import { TProjectPermission } from "@app/lib/types";
import { DynamicSecretProviderSchema } from "./providers/models";
// various status for dynamic secret that happens in background
export enum DynamicSecretStatus {
Deleting = "Revocation in process",
FailedDeletion = "Failed to delete"
}
type TProvider = z.infer<typeof DynamicSecretProviderSchema>;
export type TCreateDynamicSecretDTO = {
provider: TProvider;
defaultTTL: string;
maxTTL?: string | null;
path: string;
environmentSlug: string;
name: string;
projectSlug: string;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateDynamicSecretDTO = {
name: string;
newName?: string;
defaultTTL?: string;
maxTTL?: string | null;
path: string;
environmentSlug: string;
inputs?: TProvider["inputs"];
projectSlug: string;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteDynamicSecretDTO = {
name: string;
path: string;
environmentSlug: string;
projectSlug: string;
isForced?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TDetailsDynamicSecretDTO = {
name: string;
path: string;
environmentSlug: string;
projectSlug: string;
} & Omit<TProjectPermission, "projectId">;
export type TListDynamicSecretsDTO = {
path: string;
environmentSlug: string;
projectSlug: string;
} & Omit<TProjectPermission, "projectId">;

@ -1,125 +0,0 @@
import cassandra from "cassandra-driver";
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
export const CassandraProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
throw new BadRequestError({ message: "Invalid db host" });
}
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const client = new cassandra.Client({
sslOptions,
protocolOptions: {
port: providerInputs.port
},
credentials: {
username: providerInputs.username,
password: providerInputs.password
},
keyspace: providerInputs.keyspace,
localDataCenter: providerInputs?.localDataCenter,
contactPoints: providerInputs.host.split(",").filter(Boolean)
});
return client;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
await client.shutdown();
return isConnected;
};
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
const { keyspace } = providerInputs;
const expiration = new Date(expireAt).toISOString();
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
expiration,
keyspace
});
const queries = creationStatement.toString().split(";").filter(Boolean);
for (const query of queries) {
// eslint-disable-next-line
await client.execute(query);
}
await client.shutdown();
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const username = entityId;
const { keyspace } = providerInputs;
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace });
const queries = revokeStatement.toString().split(";").filter(Boolean);
for (const query of queries) {
// eslint-disable-next-line
await client.execute(query);
}
await client.shutdown();
return { entityId: username };
};
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();
const { keyspace } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
const queries = renewStatement.toString().split(";").filter(Boolean);
for (const query of queries) {
// eslint-disable-next-line
await client.execute(query);
}
await client.shutdown();
return { entityId: username };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

@ -1,8 +0,0 @@
import { CassandraProvider } from "./cassandra";
import { DynamicSecretProviders } from "./models";
import { SqlDatabaseProvider } from "./sql-database";
export const buildDynamicSecretProviders = () => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
[DynamicSecretProviders.Cassandra]: CassandraProvider()
});

@ -1,51 +0,0 @@
import { z } from "zod";
export enum SqlProviders {
Postgres = "postgres",
MySQL = "mysql2",
Oracle = "oracledb"
}
export const DynamicSecretSqlDBSchema = z.object({
client: z.nativeEnum(SqlProviders),
host: z.string().toLowerCase(),
port: z.number(),
database: z.string(),
username: z.string(),
password: z.string(),
creationStatement: z.string(),
revocationStatement: z.string(),
renewStatement: z.string().optional(),
ca: z.string().optional()
});
export const DynamicSecretCassandraSchema = z.object({
host: z.string().toLowerCase(),
port: z.number(),
localDataCenter: z.string().min(1),
keyspace: z.string().optional(),
username: z.string(),
password: z.string(),
creationStatement: z.string(),
revocationStatement: z.string(),
renewStatement: z.string().optional(),
ca: z.string().optional()
});
export enum DynamicSecretProviders {
SqlDatabase = "sql-database",
Cassandra = "cassandra"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema })
]);
export type TDynamicProviderFns = {
create: (inputs: unknown, expireAt: number) => Promise<{ entityId: string; data: unknown }>;
validateConnection: (inputs: unknown) => Promise<boolean>;
validateProviderInputs: (inputs: object) => Promise<unknown>;
revoke: (inputs: unknown, entityId: string) => Promise<{ entityId: string }>;
renew: (inputs: unknown, entityId: string, expireAt: number) => Promise<{ entityId: string }>;
};

@ -1,162 +0,0 @@
import handlebars from "handlebars";
import knex from "knex";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { getDbConnectionHost } from "@app/lib/knex";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
const generatePassword = (provider: SqlProviders) => {
// oracle has limit of 48 password length
const size = provider === SqlProviders.Oracle ? 30 : 48;
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (provider: SqlProviders) => {
// For oracle, the client assumes everything is upper case when not using quotes around the password
if (provider === SqlProviders.Oracle) return alphaNumericNanoId(32).toUpperCase();
return alphaNumericNanoId(32);
};
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const appCfg = getConfig();
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
if (
isCloud &&
// localhost
// internal ips
(providerInputs.host === "host.docker.internal" ||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
if (
providerInputs.host === "localhost" ||
providerInputs.host === "127.0.0.1" ||
// database infisical uses
dbHost === providerInputs.host
)
throw new BadRequestError({ message: "Invalid db host" });
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const db = knex({
client: providerInputs.client,
connection: {
database: providerInputs.database,
port: providerInputs.port,
host: providerInputs.host,
user: providerInputs.username,
password: providerInputs.password,
ssl,
pool: { min: 0, max: 1 }
},
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT
});
return db;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
const isConnected = await db.raw(testStatement).then(() => true);
await db.destroy();
return isConnected;
};
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
const username = generateUsername(providerInputs.client);
const password = generatePassword(providerInputs.client);
const { database } = providerInputs;
const expiration = new Date(expireAt).toISOString();
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
expiration,
database
});
const queries = creationStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
await db.destroy();
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
const username = entityId;
const { database } = providerInputs;
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
const queries = revokeStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
await db.destroy();
return { entityId: username };
};
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
if (renewStatement) {
const queries = renewStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
}
await db.destroy();
return { entityId: username };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

@ -1,130 +0,0 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName, TGroups } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt } from "@app/lib/knex";
export type TGroupDALFactory = ReturnType<typeof groupDALFactory>;
export const groupDALFactory = (db: TDbClient) => {
const groupOrm = ormify(db, TableName.Groups);
const findGroups = async (filter: TFindFilter<TGroups>, { offset, limit, sort, tx }: TFindOpt<TGroups> = {}) => {
try {
const query = (tx || db)(TableName.Groups)
// eslint-disable-next-line
.where(buildFindFilter(filter))
.select(selectAllTableCols(TableName.Groups));
if (limit) void query.limit(limit);
if (offset) void query.limit(offset);
if (sort) {
void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls })));
}
const res = await query;
return res;
} catch (err) {
throw new DatabaseError({ error: err, name: "Find groups" });
}
};
const findByOrgId = async (orgId: string, tx?: Knex) => {
try {
const docs = await (tx || db)(TableName.Groups)
.where(`${TableName.Groups}.orgId`, orgId)
.leftJoin(TableName.OrgRoles, `${TableName.Groups}.roleId`, `${TableName.OrgRoles}.id`)
.select(selectAllTableCols(TableName.Groups))
// cr stands for custom role
.select(db.ref("id").as("crId").withSchema(TableName.OrgRoles))
.select(db.ref("name").as("crName").withSchema(TableName.OrgRoles))
.select(db.ref("slug").as("crSlug").withSchema(TableName.OrgRoles))
.select(db.ref("description").as("crDescription").withSchema(TableName.OrgRoles))
.select(db.ref("permissions").as("crPermission").withSchema(TableName.OrgRoles));
return docs.map(({ crId, crDescription, crSlug, crPermission, crName, ...el }) => ({
...el,
customRole: el.roleId
? {
id: crId,
name: crName,
slug: crSlug,
permissions: crPermission,
description: crDescription
}
: undefined
}));
} catch (error) {
throw new DatabaseError({ error, name: "FindByOrgId" });
}
};
// special query
const findAllGroupMembers = async ({
orgId,
groupId,
offset = 0,
limit,
username
}: {
orgId: string;
groupId: string;
offset?: number;
limit?: number;
username?: string;
}) => {
try {
let query = db(TableName.OrgMembership)
.where(`${TableName.OrgMembership}.orgId`, orgId)
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.UserGroupMembership, function () {
this.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
`${TableName.UserGroupMembership}.groupId`,
"=",
db.raw("?", [groupId])
);
})
.select(
db.ref("id").withSchema(TableName.OrgMembership),
db.ref("groupId").withSchema(TableName.UserGroupMembership),
db.ref("email").withSchema(TableName.Users),
db.ref("username").withSchema(TableName.Users),
db.ref("firstName").withSchema(TableName.Users),
db.ref("lastName").withSchema(TableName.Users),
db.ref("id").withSchema(TableName.Users).as("userId")
)
.where({ isGhost: false })
.offset(offset);
if (limit) {
query = query.limit(limit);
}
if (username) {
query = query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
}
const members = await query;
return members.map(
({ email, username: memberUsername, firstName, lastName, userId, groupId: memberGroupId }) => ({
id: userId,
email,
username: memberUsername,
firstName,
lastName,
isPartOfGroup: !!memberGroupId
})
);
} catch (error) {
throw new DatabaseError({ error, name: "Find all org members" });
}
};
return {
findGroups,
findByOrgId,
findAllGroupMembers,
...groupOrm
};
};

@ -1,454 +0,0 @@
import { Knex } from "knex";
import { SecretKeyEncoding, TUsers } from "@app/db/schemas";
import { decryptAsymmetric, encryptAsymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError, ScimRequestError } from "@app/lib/errors";
import {
TAddUsersToGroup,
TAddUsersToGroupByUserIds,
TConvertPendingGroupAdditionsToGroupMemberships,
TRemoveUsersFromGroupByUserIds
} from "./group-types";
const addAcceptedUsersToGroup = async ({
userIds,
group,
userGroupMembershipDAL,
userDAL,
groupProjectDAL,
projectKeyDAL,
projectDAL,
projectBotDAL,
tx
}: TAddUsersToGroup) => {
console.log("addAcceptedUsersToGroup args: ", {
userIds,
group
});
const users = await userDAL.findUserEncKeyByUserIdsBatch(
{
userIds
},
tx
);
await userGroupMembershipDAL.insertMany(
users.map((user) => ({
userId: user.userId,
groupId: group.id,
isPending: false
})),
tx
);
// check which projects the group is part of
const projectIds = Array.from(
new Set(
(
await groupProjectDAL.find(
{
groupId: group.id
},
{ tx }
)
).map((gp) => gp.projectId)
)
);
const keys = await projectKeyDAL.find(
{
$in: {
projectId: projectIds,
receiverId: users.map((u) => u.id)
}
},
{ tx }
);
const userKeysSet = new Set(keys.map((k) => `${k.projectId}-${k.receiverId}`));
for await (const projectId of projectIds) {
const usersToAddProjectKeyFor = users.filter((u) => !userKeysSet.has(`${projectId}-${u.userId}`));
if (usersToAddProjectKeyFor.length) {
// there are users who need to be shared keys
// process adding bulk users to projects for each project individually
const ghostUser = await projectDAL.findProjectGhostUser(projectId, tx);
if (!ghostUser) {
throw new BadRequestError({
message: "Failed to find sudo user"
});
}
const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, projectId, tx);
if (!ghostUserLatestKey) {
throw new BadRequestError({
message: "Failed to find sudo user latest key"
});
}
const bot = await projectBotDAL.findOne({ projectId }, tx);
if (!bot) {
throw new BadRequestError({
message: "Failed to find bot"
});
}
const botPrivateKey = infisicalSymmetricDecrypt({
keyEncoding: bot.keyEncoding as SecretKeyEncoding,
iv: bot.iv,
tag: bot.tag,
ciphertext: bot.encryptedPrivateKey
});
const plaintextProjectKey = decryptAsymmetric({
ciphertext: ghostUserLatestKey.encryptedKey,
nonce: ghostUserLatestKey.nonce,
publicKey: ghostUserLatestKey.sender.publicKey,
privateKey: botPrivateKey
});
const projectKeysToAdd = usersToAddProjectKeyFor.map((user) => {
const { ciphertext: encryptedKey, nonce } = encryptAsymmetric(
plaintextProjectKey,
user.publicKey,
botPrivateKey
);
return {
encryptedKey,
nonce,
senderId: ghostUser.id,
receiverId: user.userId,
projectId
};
});
await projectKeyDAL.insertMany(projectKeysToAdd, tx);
}
}
};
/**
* Add users with user ids [userIds] to group [group].
* - Users may or may not have finished completing their accounts; this function will
* handle both adding users to groups directly and via pending group additions.
* @param {group} group - group to add user(s) to
* @param {string[]} userIds - id(s) of user(s) to add to group
*/
export const addUsersToGroupByUserIds = async ({
group,
userIds,
userDAL,
userGroupMembershipDAL,
orgDAL,
groupProjectDAL,
projectKeyDAL,
projectDAL,
projectBotDAL,
tx: outerTx
}: TAddUsersToGroupByUserIds) => {
const processAddition = async (tx: Knex) => {
const foundMembers = await userDAL.find(
{
$in: {
id: userIds
}
},
{ tx }
);
const foundMembersIdsSet = new Set(foundMembers.map((member) => member.id));
const isCompleteMatch = userIds.every((userId) => foundMembersIdsSet.has(userId));
if (!isCompleteMatch) {
throw new ScimRequestError({
detail: "Members not found",
status: 404
});
}
// check if user(s) group membership(s) already exists
const existingUserGroupMemberships = await userGroupMembershipDAL.find(
{
groupId: group.id,
$in: {
userId: userIds
}
},
{ tx }
);
if (existingUserGroupMemberships.length) {
throw new BadRequestError({
message: `User(s) are already part of the group ${group.slug}`
});
}
// check if all user(s) are part of the organization
const existingUserOrgMemberships = await orgDAL.findMembership(
{
orgId: group.orgId,
$in: {
userId: userIds
}
},
{ tx }
);
const existingUserOrgMembershipsUserIdsSet = new Set(existingUserOrgMemberships.map((u) => u.userId));
userIds.forEach((userId) => {
if (!existingUserOrgMembershipsUserIdsSet.has(userId))
throw new BadRequestError({
message: `User with id ${userId} is not part of the organization`
});
});
const membersToAddToGroupNonPending: TUsers[] = [];
const membersToAddToGroupPending: TUsers[] = [];
foundMembers.forEach((member) => {
if (member.isAccepted) {
// add accepted member to group
membersToAddToGroupNonPending.push(member);
} else {
// add incomplete member to pending group addition
membersToAddToGroupPending.push(member);
}
});
if (membersToAddToGroupNonPending.length) {
await addAcceptedUsersToGroup({
userIds: membersToAddToGroupNonPending.map((member) => member.id),
group,
userDAL,
userGroupMembershipDAL,
groupProjectDAL,
projectKeyDAL,
projectDAL,
projectBotDAL,
tx
});
}
if (membersToAddToGroupPending.length) {
await userGroupMembershipDAL.insertMany(
membersToAddToGroupPending.map((member) => ({
userId: member.id,
groupId: group.id,
isPending: true
})),
tx
);
}
return membersToAddToGroupNonPending.concat(membersToAddToGroupPending);
};
if (outerTx) {
return processAddition(outerTx);
}
return userDAL.transaction(async (tx) => {
return processAddition(tx);
});
};
/**
* Remove users with user ids [userIds] from group [group].
* - Users may be part of the group (non-pending + pending);
* this function will handle both cases.
* @param {group} group - group to remove user(s) from
* @param {string[]} userIds - id(s) of user(s) to remove from group
*/
export const removeUsersFromGroupByUserIds = async ({
group,
userIds,
userDAL,
userGroupMembershipDAL,
groupProjectDAL,
projectKeyDAL,
tx: outerTx
}: TRemoveUsersFromGroupByUserIds) => {
const processRemoval = async (tx: Knex) => {
const foundMembers = await userDAL.find({
$in: {
id: userIds
}
});
const foundMembersIdsSet = new Set(foundMembers.map((member) => member.id));
const isCompleteMatch = userIds.every((userId) => foundMembersIdsSet.has(userId));
if (!isCompleteMatch) {
throw new ScimRequestError({
detail: "Members not found",
status: 404
});
}
// check if user group membership already exists
const existingUserGroupMemberships = await userGroupMembershipDAL.find(
{
groupId: group.id,
$in: {
userId: userIds
}
},
{ tx }
);
const existingUserGroupMembershipsUserIdsSet = new Set(existingUserGroupMemberships.map((u) => u.userId));
userIds.forEach((userId) => {
if (!existingUserGroupMembershipsUserIdsSet.has(userId))
throw new BadRequestError({
message: `User(s) are not part of the group ${group.slug}`
});
});
const membersToRemoveFromGroupNonPending: TUsers[] = [];
const membersToRemoveFromGroupPending: TUsers[] = [];
foundMembers.forEach((member) => {
if (member.isAccepted) {
// remove accepted member from group
membersToRemoveFromGroupNonPending.push(member);
} else {
// remove incomplete member from pending group addition
membersToRemoveFromGroupPending.push(member);
}
});
if (membersToRemoveFromGroupNonPending.length) {
// check which projects the group is part of
const projectIds = Array.from(
new Set(
(
await groupProjectDAL.find(
{
groupId: group.id
},
{ tx }
)
).map((gp) => gp.projectId)
)
);
// TODO: this part can be optimized
for await (const userId of userIds) {
const t = await userGroupMembershipDAL.filterProjectsByUserMembership(userId, group.id, projectIds, tx);
const projectsToDeleteKeyFor = projectIds.filter((p) => !t.has(p));
if (projectsToDeleteKeyFor.length) {
await projectKeyDAL.delete(
{
receiverId: userId,
$in: {
projectId: projectsToDeleteKeyFor
}
},
tx
);
}
await userGroupMembershipDAL.delete(
{
groupId: group.id,
userId
},
tx
);
}
}
if (membersToRemoveFromGroupPending.length) {
await userGroupMembershipDAL.delete({
groupId: group.id,
$in: {
userId: membersToRemoveFromGroupPending.map((member) => member.id)
}
});
}
return membersToRemoveFromGroupNonPending.concat(membersToRemoveFromGroupPending);
};
if (outerTx) {
return processRemoval(outerTx);
}
return userDAL.transaction(async (tx) => {
return processRemoval(tx);
});
};
/**
* Convert pending group additions for users with ids [userIds] to group memberships.
* @param {string[]} userIds - id(s) of user(s) to try to convert pending group additions to group memberships
*/
export const convertPendingGroupAdditionsToGroupMemberships = async ({
userIds,
userDAL,
userGroupMembershipDAL,
groupProjectDAL,
projectKeyDAL,
projectDAL,
projectBotDAL,
tx: outerTx
}: TConvertPendingGroupAdditionsToGroupMemberships) => {
const processConversion = async (tx: Knex) => {
const users = await userDAL.find(
{
$in: {
id: userIds
}
},
{ tx }
);
const usersUserIdsSet = new Set(users.map((u) => u.id));
userIds.forEach((userId) => {
if (!usersUserIdsSet.has(userId)) {
throw new BadRequestError({
message: `Failed to find user with id ${userId}`
});
}
});
users.forEach((user) => {
if (!user.isAccepted) {
throw new BadRequestError({
message: `Failed to convert pending group additions to group memberships for user ${user.username} because they have not confirmed their account`
});
}
});
const pendingGroupAdditions = await userGroupMembershipDAL.deletePendingUserGroupMembershipsByUserIds(userIds, tx);
for await (const pendingGroupAddition of pendingGroupAdditions) {
await addAcceptedUsersToGroup({
userIds: [pendingGroupAddition.user.id],
group: pendingGroupAddition.group,
userDAL,
userGroupMembershipDAL,
groupProjectDAL,
projectKeyDAL,
projectDAL,
projectBotDAL,
tx
});
}
};
if (outerTx) {
return processConversion(outerTx);
}
return userDAL.transaction(async (tx) => {
await processConversion(tx);
});
};

Some files were not shown because too many files have changed in this diff Show More