Compare commits

..

1 Commits

Author SHA1 Message Date
be810ba475 Scaffolding for OIDC 2024-02-29 09:38:50 -08:00
1321 changed files with 13620 additions and 59250 deletions

View File

@ -3,6 +3,9 @@
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NEVER BE USED FOR PRODUCTION
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
# Required
DB_CONNECTION_URI=postgres://infisical:infisical@db:5432/infisical
# JWT
# Required secrets to sign JWT tokens
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
@ -13,9 +16,6 @@ POSTGRES_PASSWORD=infisical
POSTGRES_USER=infisical
POSTGRES_DB=infisical
# Required
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
# Redis
REDIS_URL=redis://redis:6379

View File

@ -1,190 +0,0 @@
# inspired by https://www.photoroom.com/inside-photoroom/how-we-automated-our-changelog-thanks-to-chatgpt
import os
import requests
import re
from openai import OpenAI
import subprocess
from datetime import datetime
import uuid
# Constants
REPO_OWNER = "infisical"
REPO_NAME = "infisical"
TOKEN = os.environ["GITHUB_TOKEN"]
SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"]
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]
SLACK_MSG_COLOR = "#36a64f"
headers = {
"Authorization": f"Bearer {TOKEN}",
"Accept": "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
}
def set_multiline_output(name, value):
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
delimiter = uuid.uuid1()
print(f'{name}<<{delimiter}', file=fh)
print(value, file=fh)
print(delimiter, file=fh)
def post_changelog_to_slack(changelog, tag):
slack_payload = {
"text": "Hey team, it's changelog time! :wave:",
"attachments": [
{
"color": SLACK_MSG_COLOR,
"title": f"🗓Infisical Changelog - {tag}",
"text": changelog,
}
],
}
response = requests.post(SLACK_WEBHOOK_URL, json=slack_payload)
if response.status_code != 200:
raise Exception("Failed to post changelog to Slack.")
def find_previous_release_tag(release_tag:str):
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{release_tag}^"]).decode("utf-8").strip()
while not(previous_tag.startswith("infisical/")):
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{previous_tag}^"]).decode("utf-8").strip()
return previous_tag
def get_tag_creation_date(tag_name):
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/git/refs/tags/{tag_name}"
response = requests.get(url, headers=headers)
response.raise_for_status()
commit_sha = response.json()['object']['sha']
commit_url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/commits/{commit_sha}"
commit_response = requests.get(commit_url, headers=headers)
commit_response.raise_for_status()
creation_date = commit_response.json()['commit']['author']['date']
return datetime.strptime(creation_date, '%Y-%m-%dT%H:%M:%SZ')
def fetch_prs_between_tags(previous_tag_date:datetime, release_tag_date:datetime):
# Use GitHub API to fetch PRs merged between the commits
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/pulls?state=closed&merged=true"
response = requests.get(url, headers=headers)
if response.status_code != 200:
raise Exception("Error fetching PRs from GitHub API!")
prs = []
for pr in response.json():
# the idea is as tags happen recently we get last 100 closed PRs and then filter by tag creation date
if pr["merged_at"] and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') < release_tag_date and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') > previous_tag_date:
prs.append(pr)
return prs
def extract_commit_details_from_prs(prs):
commit_details = []
for pr in prs:
commit_message = pr["title"]
commit_url = pr["html_url"]
pr_number = pr["number"]
branch_name = pr["head"]["ref"]
issue_numbers = re.findall(r"(www-\d+|web-\d+)", branch_name)
# If no issue numbers are found, add the PR details without issue numbers and URLs
if not issue_numbers:
commit_details.append(
{
"message": commit_message,
"pr_number": pr_number,
"pr_url": commit_url,
"issue_number": None,
"issue_url": None,
}
)
continue
for issue in issue_numbers:
commit_details.append(
{
"message": commit_message,
"pr_number": pr_number,
"pr_url": commit_url,
"issue_number": issue,
}
)
return commit_details
# Function to generate changelog using OpenAI
def generate_changelog_with_openai(commit_details):
commit_messages = []
for details in commit_details:
base_message = f"{details['pr_url']} - {details['message']}"
# Add the issue URL if available
# if details["issue_url"]:
# base_message += f" (Linear Issue: {details['issue_url']})"
commit_messages.append(base_message)
commit_list = "\n".join(commit_messages)
prompt = """
Generate a changelog for Infisical, opensource secretops
The changelog should:
1. Be Informative: Using the provided list of GitHub commits, break them down into categories such as Features, Fixes & Improvements, and Technical Updates. Summarize each commit concisely, ensuring the key points are highlighted.
2. Have a Professional yet Friendly tone: The tone should be balanced, not too corporate or too informal.
3. Celebratory Introduction and Conclusion: Start the changelog with a celebratory note acknowledging the team's hard work and progress. End with a shoutout to the team and wishes for a pleasant weekend.
4. Formatting: you cannot use Markdown formatting, and you can only use emojis for the introductory paragraph or the conclusion paragraph, nowhere else.
5. Links: the syntax to create links is the following: `<http://www.example.com|This message is a link>`.
6. Linear Links: note that the Linear link is optional, include it only if provided.
7. Do not wrap your answer in a codeblock. Just output the text, nothing else
Here's a good example to follow, please try to match the formatting as closely as possible, only changing the content of the changelog and have some liberty with the introduction. Notice the importance of the formatting of a changelog item:
- <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>))
And here's an example of the full changelog:
*Features*
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
*Fixes & Improvements*
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
*Technical Updates*
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
Stay tuned for more exciting updates coming soon!
And here are the commits:
{}
""".format(
commit_list
)
client = OpenAI(api_key=OPENAI_API_KEY)
messages = [{"role": "user", "content": prompt}]
response = client.chat.completions.create(model="gpt-3.5-turbo", messages=messages)
if "error" in response.choices[0].message:
raise Exception("Error generating changelog with OpenAI!")
return response.choices[0].message.content.strip()
if __name__ == "__main__":
try:
# Get the latest and previous release tags
latest_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0"]).decode("utf-8").strip()
previous_tag = find_previous_release_tag(latest_tag)
latest_tag_date = get_tag_creation_date(latest_tag)
previous_tag_date = get_tag_creation_date(previous_tag)
prs = fetch_prs_between_tags(previous_tag_date,latest_tag_date)
pr_details = extract_commit_details_from_prs(prs)
# Generate changelog
changelog = generate_changelog_with_openai(pr_details)
post_changelog_to_slack(changelog,latest_tag)
# Print or post changelog to Slack
# set_multiline_output("changelog", changelog)
except Exception as e:
print(str(e))

View File

@ -1,26 +0,0 @@
import os
from datetime import datetime, timedelta
def rename_migrations():
migration_folder = "./backend/src/db/migrations"
with open("added_files.txt", "r") as file:
changed_files = file.readlines()
# Find the latest file among the changed files
latest_timestamp = datetime.now() # utc time
for file_path in changed_files:
file_path = file_path.strip()
# each new file bump by 1s
latest_timestamp = latest_timestamp + timedelta(seconds=1)
new_filename = os.path.join(migration_folder, latest_timestamp.strftime("%Y%m%d%H%M%S") + f"_{file_path.split('_')[1]}")
old_filename = os.path.join(migration_folder, file_path)
os.rename(old_filename, new_filename)
print(f"Renamed {old_filename} to {new_filename}")
if len(changed_files) == 0:
print("No new files added to migration folder")
if __name__ == "__main__":
rename_migrations()

15
.github/values.yaml vendored
View File

@ -13,10 +13,11 @@ fullnameOverride: ""
##
infisical:
autoDatabaseSchemaMigration: false
## @param backend.enabled Enable backend
##
enabled: false
## @param backend.name Backend name
##
name: infisical
replicaCount: 3
image:
@ -27,7 +28,7 @@ infisical:
deploymentAnnotations:
secrets.infisical.com/auto-reload: "true"
kubeSecretRef: "managed-secret"
kubeSecretRef: "infisical-gamma-secrets"
ingress:
## @param ingress.enabled Enable ingress
@ -49,9 +50,3 @@ ingress:
- secretName: letsencrypt-prod
hosts:
- gamma.infisical.com
postgresql:
enabled: false
redis:
enabled: false

View File

@ -41,7 +41,6 @@ jobs:
load: true
context: backend
tags: infisical/infisical:test
platforms: linux/amd64,linux/arm64
- name: ⏻ Spawn backend container and dependencies
run: |
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
@ -93,7 +92,6 @@ jobs:
project: 64mmf0n610
context: frontend
tags: infisical/frontend:test
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}

View File

@ -1,38 +0,0 @@
name: Build patroni
on: [workflow_dispatch]
jobs:
patroni-image:
name: Build patroni
runs-on: ubuntu-latest
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
repository: 'zalando/patroni'
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 🏗️ Build backend and push to docker hub
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile
tags: |
infisical/patroni:${{ steps.commit.outputs.short }}
infisical/patroni:latest
platforms: linux/amd64,linux/arm64

View File

@ -1,140 +0,0 @@
name: Deployment pipeline
on: [workflow_dispatch]
permissions:
id-token: write
contents: read
jobs:
infisical-image:
name: Build backend image
runs-on: ubuntu-latest
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 🏗️ Build backend and push to docker hub
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile.standalone-infisical
tags: |
infisical/staging_infisical:${{ steps.commit.outputs.short }}
infisical/staging_infisical:latest
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
gamma-deployment:
name: Deploy to gamma
runs-on: ubuntu-latest
needs: [infisical-image]
environment:
name: Gamma
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-prod-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-prod-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-prod-platform
cluster: infisical-prod-platform
wait-for-service-stability: true
production-postgres-deployment:
name: Deploy to production
runs-on: ubuntu-latest
needs: [gamma-deployment]
environment:
name: Production
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-prod-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-prod-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-prod-platform
cluster: infisical-prod-platform
wait-for-service-stability: true

View File

@ -0,0 +1,120 @@
name: Build, Publish and Deploy to Gamma
on: [workflow_dispatch]
jobs:
infisical-image:
name: Build backend image
runs-on: ubuntu-latest
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
# - name: 🧪 Run tests
# run: npm run test:ci
# working-directory: backend
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
load: true
context: .
file: Dockerfile.standalone-infisical
tags: infisical/infisical:test
# - name: ⏻ Spawn backend container and dependencies
# run: |
# docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
# - name: 🧪 Test backend image
# run: |
# ./.github/resources/healthcheck.sh infisical-backend-test
# - name: ⏻ Shut down backend container and dependencies
# run: |
# docker compose -f .github/resources/docker-compose.be-test.yml down
- name: 🏗️ Build backend and push
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile.standalone-infisical
tags: |
infisical/staging_infisical:${{ steps.commit.outputs.short }}
infisical/staging_infisical:latest
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
postgres-migration:
name: Run latest migration files
runs-on: ubuntu-latest
needs: [infisical-image]
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
# - name: Run postgres DB migration files
# env:
# DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
# run: npm run migration:latest
gamma-deployment:
name: Deploy to gamma
runs-on: ubuntu-latest
needs: [postgres-migration]
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install infisical helm chart
run: |
helm repo add infisical-helm-charts 'https://dl.cloudsmith.io/public/infisical/helm-charts/helm/charts/'
helm repo update
- name: Install kubectl
uses: azure/setup-kubectl@v3
- name: Install doctl
uses: digitalocean/action-doctl@v2
with:
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
- name: Save DigitalOcean kubeconfig with short-lived credentials
run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 infisical-gamma-postgres
- name: switch to gamma namespace
run: kubectl config set-context --current --namespace=gamma
- name: test kubectl
run: kubectl get ingress
- name: Download helm values to file and upgrade gamma deploy
run: |
wget https://raw.githubusercontent.com/Infisical/infisical/main/.github/values.yaml
helm upgrade infisical infisical-helm-charts/infisical-standalone --values values.yaml --wait --install
if [[ $(helm status infisical) == *"FAILED"* ]]; then
echo "Helm upgrade failed"
exit 1
else
echo "Helm upgrade was successful"
fi

View File

@ -5,7 +5,6 @@ on:
types: [opened, synchronize]
paths:
- "backend/src/server/routes/**"
- "backend/src/ee/routes/**"
jobs:
check-be-api-changes:

View File

@ -1,34 +0,0 @@
name: Generate Changelog
permissions:
contents: write
on:
workflow_dispatch:
push:
tags:
- "infisical/v*.*.*-postgres"
jobs:
generate_changelog:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-tags: true
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12.0"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install requests openai
- name: Generate Changelog and Post to Slack
id: gen-changelog
run: python .github/resources/changelog-generator.py
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -1,72 +1,58 @@
name: Build and release CLI
on:
workflow_dispatch:
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
permissions:
contents: write
# packages: write
# issues: write
jobs:
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
contents: write
# packages: write
# issues: write
goreleaser:
runs-on: ubuntu-20.04
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: libssl1.1 => libssl1.0-dev for OSXCross
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
jobs:
goreleaser:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: libssl1.1 => libssl1.0-dev for OSXCross
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,47 +0,0 @@
name: Go CLI Tests
on:
pull_request:
types: [opened, synchronize]
paths:
- "cli/**"
workflow_dispatch:
workflow_call:
secrets:
CLI_TESTS_UA_CLIENT_ID:
required: true
CLI_TESTS_UA_CLIENT_SECRET:
required: true
CLI_TESTS_SERVICE_TOKEN:
required: true
CLI_TESTS_PROJECT_ID:
required: true
CLI_TESTS_ENV_SLUG:
required: true
jobs:
test:
defaults:
run:
working-directory: ./cli
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: "1.21.x"
- name: Install dependencies
run: go get .
- name: Test with the Go CLI
env:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
run: go test -v -count=1 ./test

View File

@ -1,59 +0,0 @@
name: Rename Migrations
on:
pull_request:
types: [closed]
paths:
- 'backend/src/db/migrations/**'
jobs:
rename:
runs-on: ubuntu-latest
if: github.event.pull_request.merged == true
steps:
- name: Check out repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get list of newly added files in migration folder
run: |
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' | cut -f2 | xargs -n1 basename > added_files.txt
if [ ! -s added_files.txt ]; then
echo "No new files added. Skipping"
echo "SKIP_RENAME=true" >> $GITHUB_ENV
fi
- name: Script to rename migrations
if: env.SKIP_RENAME != 'true'
run: python .github/resources/rename_migration_files.py
- name: Commit and push changes
if: env.SKIP_RENAME != 'true'
run: |
git config user.name github-actions
git config user.email github-actions@github.com
git add ./backend/src/db/migrations
rm added_files.txt
git commit -m "chore: renamed new migration files to latest timestamp (gh-action)"
- name: Get PR details
id: pr_details
run: |
PR_NUMBER=${{ github.event.pull_request.number }}
PR_MERGER=$(curl -s "https://api.github.com/repos/${{ github.repository }}/pulls/$PR_NUMBER" | jq -r '.merged_by.login')
echo "PR Number: $PR_NUMBER"
echo "PR Merger: $PR_MERGER"
echo "pr_merger=$PR_MERGER" >> $GITHUB_OUTPUT
- name: Create Pull Request
if: env.SKIP_RENAME != 'true'
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: 'chore: renamed new migration files to latest UTC (gh-action)'
title: 'GH Action: rename new migration file timestamp'
branch-suffix: timestamp
reviewers: ${{ steps.pr_details.outputs.pr_merger }}

4
.gitignore vendored
View File

@ -59,13 +59,9 @@ yarn-error.log*
# Infisical init
.infisical.json
.infisicalignore
# Editor specific
.vscode/*
frontend-build
*.tgz
cli/infisical-merge
cli/test/infisical-merge

View File

@ -190,34 +190,10 @@ dockers:
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:latest-amd64"
build_flag_templates:
- "--pull"
- "--platform=linux/amd64"
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- "infisical/cli:latest-arm64"
build_flag_templates:
- "--pull"
- "--platform=linux/arm64"
docker_manifests:
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- name_template: "infisical/cli:latest"
image_templates:
- "infisical/cli:latest-amd64"
- "infisical/cli:latest-arm64"
- "infisical/cli:{{ .Version }}"
- "infisical/cli:{{ .Major }}.{{ .Minor }}"
- "infisical/cli:{{ .Major }}"
- "infisical/cli:latest"

View File

@ -1,7 +1 @@
.github/resources/docker-compose.be-test.yml:generic-api-key:16
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/IdentityRbacSection.tsx:generic-api-key:206
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:304
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/MemberRbacSection.tsx:generic-api-key:206
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451

View File

@ -1,7 +1,6 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG SAML_ORG_SLUG=saml-org-slug-default
FROM node:20-alpine AS base
@ -36,8 +35,6 @@ ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
# Build
RUN npm run build
@ -103,9 +100,6 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
ARG INTERCOM_ID=intercom-id
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
WORKDIR /
@ -124,6 +118,9 @@ WORKDIR /backend
ENV TELEMETRY_ENABLED true
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
EXPOSE 8080
EXPOSE 443

View File

@ -7,11 +7,8 @@ push:
up-dev:
docker compose -f docker-compose.dev.yml up --build
up-dev-ldap:
docker compose -f docker-compose.dev.yml --profile ldap up --build
up-prod:
docker-compose -f docker-compose.prod.yml up --build
down:
docker compose -f docker-compose.dev.yml down
docker-compose down

View File

@ -10,8 +10,7 @@
<a href="https://infisical.com/">Infisical Cloud</a> |
<a href="https://infisical.com/docs/self-hosting/overview">Self-Hosting</a> |
<a href="https://infisical.com/docs/documentation/getting-started/introduction">Docs</a> |
<a href="https://www.infisical.com">Website</a> |
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
<a href="https://www.infisical.com">Website</a>
</h4>
<p align="center">
@ -76,7 +75,7 @@ Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/int
| Use Infisical Cloud | Deploy Infisical on premise |
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2"><img src=".github/images/deploy-to-aws.png" width="150" width="300" /></a> <a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean"> <img width="217" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/> </a> <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
### Run Infisical locally

View File

@ -23,17 +23,16 @@ module.exports = {
root: true,
overrides: [
{
files: ["./e2e-test/**/*", "./src/db/migrations/**/*"],
files: ["./e2e-test/**/*"],
rules: {
"@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-assignment": "off",
"@typescript-eslint/no-unsafe-argument": "off",
"@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unsafe-call": "off"
"@typescript-eslint/no-unsafe-call": "off",
}
}
],
rules: {
"@typescript-eslint/no-empty-function": "off",
"@typescript-eslint/no-unsafe-enum-comparison": "off",

View File

@ -1,30 +0,0 @@
import { TKeyStoreFactory } from "@app/keystore/keystore";
export const mockKeyStore = (): TKeyStoreFactory => {
const store: Record<string, string | number | Buffer> = {};
return {
setItem: async (key, value) => {
store[key] = value;
return "OK";
},
setItemWithExpiry: async (key, value) => {
store[key] = value;
return "OK";
},
deleteItem: async (key) => {
delete store[key];
return 1;
},
getItem: async (key) => {
const value = store[key];
if (typeof value === "string") {
return value;
}
return null;
},
incrementBy: async () => {
return 1;
}
};
};

View File

@ -46,7 +46,7 @@ const deleteSecretImport = async (id: string) => {
describe("Secret Import Router", async () => {
test.each([
{ importEnv: "prod", importPath: "/" }, // one in root
{ importEnv: "dev", importPath: "/" }, // one in root
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
// check for default environments
@ -66,7 +66,7 @@ describe("Secret Import Router", async () => {
});
test("Get secret imports", async () => {
const createdImport1 = await createSecretImport("/", "prod");
const createdImport1 = await createSecretImport("/", "dev");
const createdImport2 = await createSecretImport("/", "staging");
const res = await testServer.inject({
method: "GET",
@ -103,10 +103,10 @@ describe("Secret Import Router", async () => {
});
test("Update secret import position", async () => {
const prodImportDetails = { path: "/", envSlug: "prod" };
const devImportDetails = { path: "/", envSlug: "dev" };
const stagingImportDetails = { path: "/", envSlug: "staging" };
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
const createdImport1 = await createSecretImport(devImportDetails.path, devImportDetails.envSlug);
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
const updateImportRes = await testServer.inject({
@ -136,7 +136,7 @@ describe("Secret Import Router", async () => {
position: 2,
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.stringMatching(prodImportDetails.envSlug),
slug: expect.stringMatching(devImportDetails.envSlug),
id: expect.any(String)
})
})
@ -166,7 +166,7 @@ describe("Secret Import Router", async () => {
});
test("Delete secret import position", async () => {
const createdImport1 = await createSecretImport("/", "prod");
const createdImport1 = await createSecretImport("/", "dev");
const createdImport2 = await createSecretImport("/", "staging");
const deletedImport = await deleteSecretImport(createdImport1.id);
// check for default environments

View File

@ -942,113 +942,6 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual([]);
});
test.each(testRawSecrets)("Bulk create secret raw in path $path", async ({ path, secret }) => {
const createSecretReqBody = {
projectSlug: seedData1.project.slug,
environment: seedData1.environment.slug,
secretPath: path,
secrets: [
{
secretKey: secret.key,
secretValue: secret.value,
secretComment: secret.comment
}
]
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secrets");
// fetch secrets
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: secret.key,
value: secret.value,
type: SecretType.Shared
})
])
);
await deleteRawSecret({ path, key: secret.key });
});
test.each(testRawSecrets)("Bulk update secret raw in path $path", async ({ secret, path }) => {
await createRawSecret({ path, ...secret });
const updateSecretReqBody = {
projectSlug: seedData1.project.slug,
environment: seedData1.environment.slug,
secretPath: path,
secrets: [
{
secretValue: "new-value",
secretKey: secret.key
}
]
};
const updateSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: updateSecretReqBody
});
expect(updateSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secrets");
// fetch secrets
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: secret.key,
value: "new-value",
version: 2,
type: SecretType.Shared
})
])
);
await deleteRawSecret({ path, key: secret.key });
});
test.each(testRawSecrets)("Bulk delete secret raw in path $path", async ({ path, secret }) => {
await createRawSecret({ path, ...secret });
const deletedSecretReqBody = {
projectSlug: seedData1.project.slug,
environment: seedData1.environment.slug,
secretPath: path,
secrets: [{ secretKey: secret.key }]
};
const deletedSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: deletedSecretReqBody
});
expect(deletedSecRes.statusCode).toBe(200);
const deletedSecretPayload = JSON.parse(deletedSecRes.payload);
expect(deletedSecretPayload).toHaveProperty("secrets");
// fetch secrets
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual([]);
});
}
);

View File

@ -10,11 +10,10 @@ import { seedData1 } from "@app/db/seed-data";
import { initEnvConfig } from "@app/lib/config/env";
import { initLogger } from "@app/lib/logger";
import { main } from "@app/server/app";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { AuthTokenType } from "@app/services/auth/auth-type";
import { mockQueue } from "./mocks/queue";
import { mockSmtpServer } from "./mocks/smtp";
import { mockKeyStore } from "./mocks/keystore";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
@ -42,8 +41,7 @@ export default {
await db.seed.run();
const smtp = mockSmtpServer();
const queue = mockQueue();
const keyStore = mockKeyStore();
const server = await main({ db, smtp, logger, queue, keyStore });
const server = await main({ db, smtp, logger, queue });
// @ts-expect-error type
globalThis.testServer = server;
// @ts-expect-error type
@ -52,8 +50,6 @@ export default {
authTokenType: AuthTokenType.ACCESS_TOKEN,
userId: seedData1.id,
tokenVersionId: seedData1.token.id,
authMethod: AuthMethod.EMAIL,
organizationId: seedData1.organization.id,
accessVersion: 1
},
cfg.AUTH_SECRET,

2011
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -41,6 +41,7 @@
"@types/nodemailer": "^6.4.14",
"@types/passport-github": "^1.1.12",
"@types/passport-google-oauth20": "^2.0.14",
"@types/passport-openidconnect": "^0.1.3",
"@types/pg": "^8.10.9",
"@types/picomatch": "^2.3.3",
"@types/prompt-sync": "^4.2.3",
@ -70,10 +71,9 @@
"vitest": "^1.2.2"
},
"dependencies": {
"@aws-sdk/client-iam": "^3.525.0",
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@casl/ability": "^6.5.0",
"@fastify/cookie": "^9.3.1",
"@fastify/cookie": "^9.2.0",
"@fastify/cors": "^8.5.0",
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
@ -91,12 +91,11 @@
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"aws-sdk": "^2.1549.0",
"axios": "^1.6.7",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"bullmq": "^5.3.3",
"cassandra-driver": "^4.7.2",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",
@ -106,24 +105,21 @@
"jsonwebtoken": "^9.0.2",
"jsrp": "^0.2.4",
"knex": "^3.0.1",
"ldapjs": "^3.0.7",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
"mysql2": "^3.9.7",
"mysql2": "^3.9.1",
"nanoid": "^5.0.4",
"node-cache": "^5.1.2",
"nodemailer": "^6.9.9",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",
"passport-ldapauth": "^3.0.1",
"passport-openidconnect": "^0.1.2",
"pg": "^8.11.3",
"pg-query-stream": "^4.5.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"posthog-node": "^3.6.2",
"posthog-node": "^3.6.0",
"probot": "^13.0.0",
"smee-client": "^2.0.0",
"tweetnacl": "^1.0.3",

View File

@ -103,15 +103,11 @@ export const ${dalName} = (db: TDbClient) => {
`import { z } from "zod";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { readLimit } from "@app/server/config/rateLimiter";
export const register${pascalCase}Router = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
method: "GET",
schema: {
params: z.object({}),
response: {

View File

@ -7,10 +7,10 @@ const prompt = promptSync({ sigint: true });
const migrationName = prompt("Enter name for migration: ");
// Remove spaces from migration name and replace with hyphens
const formattedMigrationName = migrationName.replace(/\s+/g, "-");
execSync(
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,
`npx knex migrate:make --knexfile ${path.join(
__dirname,
"../src/db/knexfile.ts"
)} -x ts ${migrationName}`,
{ stdio: "inherit" }
);

View File

@ -44,7 +44,7 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
if (!value || value === "null") return;
switch (type) {
case "uuid":
return `.default("00000000-0000-0000-0000-000000000000")`;
return;
case "character varying": {
if (value === "gen_random_uuid()") return;
if (typeof value === "string" && value.includes("::")) {
@ -100,8 +100,7 @@ const main = async () => {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
// don't put optional on id
if (colInfo.defaultValue && columnName !== "id") {
if (colInfo.defaultValue) {
const { defaultValue } = colInfo;
const zSchema = getZodDefaultValue(colInfo.type, defaultValue);
if (zSchema) {
@ -121,7 +120,6 @@ const main = async () => {
.split("_")
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
// the insert and update are changed to zod input type to use default cases
writeFileSync(
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
`// Code generated by automation script, DO NOT EDIT.
@ -136,8 +134,8 @@ import { TImmutableDBKeys } from "./models";
export const ${pascalCase}Schema = z.object({${schema}});
export type T${pascalCase} = z.infer<typeof ${pascalCase}Schema>;
export type T${pascalCase}Insert = Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>;
export type T${pascalCase}Update = Partial<Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>>;
export type T${pascalCase}Insert = Omit<T${pascalCase}, TImmutableDBKeys>;
export type T${pascalCase}Update = Partial<Omit<T${pascalCase}, TImmutableDBKeys>>;
`
);
}

View File

@ -1,19 +1,10 @@
import "fastify";
import { TUsers } from "@app/db/schemas";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
@ -27,12 +18,10 @@ import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { ActorType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityAwsIamAuthServiceFactory } from "@app/services/identity-aws-iam-auth/identity-aws-iam-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
@ -69,10 +58,9 @@ declare module "fastify" {
// identity injection. depending on which kinda of token the information is filled in auth
auth: TAuthMode;
permission: {
authMethod: ActorAuthMethod;
type: ActorType;
id: string;
orgId: string;
orgId?: string;
};
// passport data
passportUser: {
@ -81,7 +69,6 @@ declare module "fastify" {
};
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
}
interface FastifyInstance {
@ -95,8 +82,6 @@ declare module "fastify" {
orgRole: TOrgRoleServiceFactory;
superAdmin: TSuperAdminServiceFactory;
user: TUserServiceFactory;
group: TGroupServiceFactory;
groupProject: TGroupProjectServiceFactory;
apiKey: TApiKeyServiceFactory;
project: TProjectServiceFactory;
projectMembership: TProjectMembershipServiceFactory;
@ -116,27 +101,18 @@ declare module "fastify" {
identityAccessToken: TIdentityAccessTokenServiceFactory;
identityProject: TIdentityProjectServiceFactory;
identityUa: TIdentityUaServiceFactory;
identityAwsIamAuth: TIdentityAwsIamAuthServiceFactory;
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
secretApprovalRequest: TSecretApprovalRequestServiceFactory;
secretRotation: TSecretRotationServiceFactory;
snapshot: TSecretSnapshotServiceFactory;
saml: TSamlConfigServiceFactory;
scim: TScimServiceFactory;
ldap: TLdapConfigServiceFactory;
auditLog: TAuditLogServiceFactory;
auditLogStream: TAuditLogStreamServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;
trustedIp: TTrustedIpServiceFactory;
secretBlindIndex: TSecretBlindIndexServiceFactory;
telemetry: TTelemetryServiceFactory;
dynamicSecret: TDynamicSecretServiceFactory;
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@ -2,26 +2,11 @@ import { Knex } from "knex";
import {
TableName,
TAccessApprovalPolicies,
TAccessApprovalPoliciesApprovers,
TAccessApprovalPoliciesApproversInsert,
TAccessApprovalPoliciesApproversUpdate,
TAccessApprovalPoliciesInsert,
TAccessApprovalPoliciesUpdate,
TAccessApprovalRequests,
TAccessApprovalRequestsInsert,
TAccessApprovalRequestsReviewers,
TAccessApprovalRequestsReviewersInsert,
TAccessApprovalRequestsReviewersUpdate,
TAccessApprovalRequestsUpdate,
TApiKeys,
TApiKeysInsert,
TApiKeysUpdate,
TAuditLogs,
TAuditLogsInsert,
TAuditLogStreams,
TAuditLogStreamsInsert,
TAuditLogStreamsUpdate,
TAuditLogsUpdate,
TAuthTokens,
TAuthTokenSessions,
@ -32,45 +17,21 @@ import {
TBackupPrivateKey,
TBackupPrivateKeyInsert,
TBackupPrivateKeyUpdate,
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate,
TDynamicSecrets,
TDynamicSecretsInsert,
TDynamicSecretsUpdate,
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate,
TGitAppOrg,
TGitAppOrgInsert,
TGitAppOrgUpdate,
TGroupProjectMembershipRoles,
TGroupProjectMembershipRolesInsert,
TGroupProjectMembershipRolesUpdate,
TGroupProjectMemberships,
TGroupProjectMembershipsInsert,
TGroupProjectMembershipsUpdate,
TGroups,
TGroupsInsert,
TGroupsUpdate,
TIdentities,
TIdentitiesInsert,
TIdentitiesUpdate,
TIdentityAccessTokens,
TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate,
TIdentityAwsIamAuths,
TIdentityAwsIamAuthsInsert,
TIdentityAwsIamAuthsUpdate,
TIdentityOrgMemberships,
TIdentityOrgMembershipsInsert,
TIdentityOrgMembershipsUpdate,
TIdentityProjectAdditionalPrivilege,
TIdentityProjectAdditionalPrivilegeInsert,
TIdentityProjectAdditionalPrivilegeUpdate,
TIdentityProjectMembershipRole,
TIdentityProjectMembershipRoleInsert,
TIdentityProjectMembershipRoleUpdate,
TIdentityProjectMemberships,
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate,
@ -89,12 +50,6 @@ import {
TIntegrations,
TIntegrationsInsert,
TIntegrationsUpdate,
TLdapConfigs,
TLdapConfigsInsert,
TLdapConfigsUpdate,
TLdapGroupMaps,
TLdapGroupMapsInsert,
TLdapGroupMapsUpdate,
TOrganizations,
TOrganizationsInsert,
TOrganizationsUpdate,
@ -125,12 +80,6 @@ import {
TProjects,
TProjectsInsert,
TProjectsUpdate,
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
TProjectUserAdditionalPrivilegeUpdate,
TProjectUserMembershipRoles,
TProjectUserMembershipRolesInsert,
TProjectUserMembershipRolesUpdate,
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
@ -212,15 +161,9 @@ import {
TUserActions,
TUserActionsInsert,
TUserActionsUpdate,
TUserAliases,
TUserAliasesInsert,
TUserAliasesUpdate,
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
TUserEncryptionKeysUpdate,
TUserGroupMembership,
TUserGroupMembershipInsert,
TUserGroupMembershipUpdate,
TUsers,
TUsersInsert,
TUsersUpdate,
@ -232,23 +175,6 @@ import {
declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: Knex.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.UserGroupMembership]: Knex.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,
TUserGroupMembershipUpdate
>;
[TableName.GroupProjectMembership]: Knex.CompositeTableType<
TGroupProjectMemberships,
TGroupProjectMembershipsInsert,
TGroupProjectMembershipsUpdate
>;
[TableName.GroupProjectMembershipRole]: Knex.CompositeTableType<
TGroupProjectMembershipRoles,
TGroupProjectMembershipRolesInsert,
TGroupProjectMembershipRolesUpdate
>;
[TableName.UserAliases]: Knex.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
@ -288,17 +214,7 @@ declare module "knex/types/tables" {
TProjectEnvironmentsUpdate
>;
[TableName.ProjectBot]: Knex.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
[TableName.ProjectUserMembershipRole]: Knex.CompositeTableType<
TProjectUserMembershipRoles,
TProjectUserMembershipRolesInsert,
TProjectUserMembershipRolesUpdate
>;
[TableName.ProjectRoles]: Knex.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
[TableName.ProjectUserAdditionalPrivilege]: Knex.CompositeTableType<
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
TProjectUserAdditionalPrivilegeUpdate
>;
[TableName.ProjectKeys]: Knex.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
[TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
[TableName.SecretBlindIndex]: Knex.CompositeTableType<
@ -329,11 +245,6 @@ declare module "knex/types/tables" {
TIdentityUniversalAuthsInsert,
TIdentityUniversalAuthsUpdate
>;
[TableName.IdentityAwsIamAuth]: Knex.CompositeTableType<
TIdentityAwsIamAuths,
TIdentityAwsIamAuthsInsert,
TIdentityAwsIamAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: Knex.CompositeTableType<
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
@ -354,41 +265,6 @@ declare module "knex/types/tables" {
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate
>;
[TableName.IdentityProjectMembershipRole]: Knex.CompositeTableType<
TIdentityProjectMembershipRole,
TIdentityProjectMembershipRoleInsert,
TIdentityProjectMembershipRoleUpdate
>;
[TableName.IdentityProjectAdditionalPrivilege]: Knex.CompositeTableType<
TIdentityProjectAdditionalPrivilege,
TIdentityProjectAdditionalPrivilegeInsert,
TIdentityProjectAdditionalPrivilegeUpdate
>;
[TableName.AccessApprovalPolicy]: Knex.CompositeTableType<
TAccessApprovalPolicies,
TAccessApprovalPoliciesInsert,
TAccessApprovalPoliciesUpdate
>;
[TableName.AccessApprovalPolicyApprover]: Knex.CompositeTableType<
TAccessApprovalPoliciesApprovers,
TAccessApprovalPoliciesApproversInsert,
TAccessApprovalPoliciesApproversUpdate
>;
[TableName.AccessApprovalRequest]: Knex.CompositeTableType<
TAccessApprovalRequests,
TAccessApprovalRequestsInsert,
TAccessApprovalRequestsUpdate
>;
[TableName.AccessApprovalRequestReviewer]: Knex.CompositeTableType<
TAccessApprovalRequestsReviewers,
TAccessApprovalRequestsReviewersInsert,
TAccessApprovalRequestsReviewersUpdate
>;
[TableName.ScimToken]: Knex.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
[TableName.SecretApprovalPolicy]: Knex.CompositeTableType<
TSecretApprovalPolicies,
@ -441,22 +317,9 @@ declare module "knex/types/tables" {
TSecretSnapshotFoldersInsert,
TSecretSnapshotFoldersUpdate
>;
[TableName.DynamicSecret]: Knex.CompositeTableType<TDynamicSecrets, TDynamicSecretsInsert, TDynamicSecretsUpdate>;
[TableName.DynamicSecretLease]: Knex.CompositeTableType<
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate
>;
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
[TableName.LdapGroupMap]: Knex.CompositeTableType<TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate>;
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
[TableName.AuditLogStream]: Knex.CompositeTableType<
TAuditLogStreams,
TAuditLogStreamsInsert,
TAuditLogStreamsUpdate
>;
[TableName.GitAppInstallSession]: Knex.CompositeTableType<
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,

6
backend/src/cache/redis.ts vendored Normal file
View File

@ -0,0 +1,6 @@
import Redis from "ioredis";
export const initRedisConnection = (redisUrl: string) => {
const redis = new Redis(redisUrl);
return redis;
};

View File

@ -6,13 +6,6 @@ export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnection
client: "pg",
connection: {
connectionString: dbConnectionUri,
host: process.env.DB_HOST,
// @ts-expect-error I have no clue why only for the port there is a type error
// eslint-disable-next-line
port: process.env.DB_PORT,
user: process.env.DB_USER,
database: process.env.DB_NAME,
password: process.env.DB_PASSWORD,
ssl: dbRootCert
? {
rejectUnauthorized: true,

View File

@ -7,29 +7,17 @@ import path from "path";
// Update with your config settings. .
dotenv.config({
path: path.join(__dirname, "../../../.env.migration")
path: path.join(__dirname, "../../../.env.migration"),
debug: true
});
dotenv.config({
path: path.join(__dirname, "../../../.env")
path: path.join(__dirname, "../../../.env"),
debug: true
});
export default {
development: {
client: "postgres",
connection: {
connectionString: process.env.DB_CONNECTION_URI,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
database: process.env.DB_NAME,
password: process.env.DB_PASSWORD,
ssl: process.env.DB_ROOT_CERT
? {
rejectUnauthorized: true,
ca: Buffer.from(process.env.DB_ROOT_CERT, "base64").toString("ascii")
}
: false
},
connection: process.env.DB_CONNECTION_URI,
pool: {
min: 2,
max: 10
@ -43,20 +31,7 @@ export default {
},
production: {
client: "postgres",
connection: {
connectionString: process.env.DB_CONNECTION_URI,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
database: process.env.DB_NAME,
password: process.env.DB_PASSWORD,
ssl: process.env.DB_ROOT_CERT
? {
rejectUnauthorized: true,
ca: Buffer.from(process.env.DB_ROOT_CERT, "base64").toString("ascii")
}
: false
},
connection: process.env.DB_CONNECTION_URI,
pool: {
min: 2,
max: 10

View File

@ -1,25 +0,0 @@
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-nocheck
import { Knex } from "knex";
import { TableName } from "../schemas";
const ADMIN_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.uuid("instanceId").notNullable().defaultTo(knex.fn.uuid());
});
const superUserConfigExists = await knex(TableName.SuperAdmin).where("id", ADMIN_CONFIG_UUID).first();
if (!superUserConfigExists) {
// eslint-disable-next-line
await knex(TableName.SuperAdmin).update({ id: ADMIN_CONFIG_UUID }).whereNotNull("id").limit(1);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("instanceId");
});
}

View File

@ -1,15 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.Integration, (t) => {
t.datetime("lastUsed");
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.Integration, (t) => {
t.dropColumn("lastUsed");
});
}

View File

@ -1,68 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.LdapConfig))) {
await knex.schema.createTable(TableName.LdapConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable().unique();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.boolean("isActive").notNullable();
t.string("url").notNullable();
t.string("encryptedBindDN").notNullable();
t.string("bindDNIV").notNullable();
t.string("bindDNTag").notNullable();
t.string("encryptedBindPass").notNullable();
t.string("bindPassIV").notNullable();
t.string("bindPassTag").notNullable();
t.string("searchBase").notNullable();
t.text("encryptedCACert").notNullable();
t.string("caCertIV").notNullable();
t.string("caCertTag").notNullable();
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.LdapConfig);
if (!(await knex.schema.hasTable(TableName.UserAliases))) {
await knex.schema.createTable(TableName.UserAliases, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("userId").notNullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.string("username").notNullable();
t.string("aliasType").notNullable();
t.string("externalId").notNullable();
t.specificType("emails", "text[]");
t.uuid("orgId").nullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.UserAliases);
await knex.schema.alterTable(TableName.Users, (t) => {
t.string("username").unique();
t.string("email").nullable().alter();
t.dropUnique(["email"]);
});
await knex(TableName.Users).update("username", knex.ref("email"));
await knex.schema.alterTable(TableName.Users, (t) => {
t.string("username").notNullable().alter();
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.LdapConfig);
await knex.schema.dropTableIfExists(TableName.UserAliases);
await knex.schema.alterTable(TableName.Users, (t) => {
t.dropColumn("username");
// t.string("email").notNullable().alter();
});
await dropOnUpdateTrigger(knex, TableName.LdapConfig);
}

View File

@ -1,50 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const doesTableExist = await knex.schema.hasTable(TableName.ProjectUserMembershipRole);
if (!doesTableExist) {
await knex.schema.createTable(TableName.ProjectUserMembershipRole, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("role").notNullable();
t.uuid("projectMembershipId").notNullable();
t.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
// until role is changed/removed the role should not deleted
t.uuid("customRoleId");
t.foreign("customRoleId").references("id").inTable(TableName.ProjectRoles);
t.boolean("isTemporary").notNullable().defaultTo(false);
t.string("temporaryMode");
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
t.datetime("temporaryAccessStartTime");
t.datetime("temporaryAccessEndTime");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.ProjectUserMembershipRole);
const projectMemberships = await knex(TableName.ProjectMembership).select(
"id",
"role",
"createdAt",
"updatedAt",
knex.ref("roleId").withSchema(TableName.ProjectMembership).as("customRoleId")
);
if (projectMemberships.length)
await knex.batchInsert(
TableName.ProjectUserMembershipRole,
projectMemberships.map((data) => ({ ...data, projectMembershipId: data.id }))
);
// will be dropped later
// await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
// t.dropColumn("roleId");
// t.dropColumn("role");
// });
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ProjectUserMembershipRole);
await dropOnUpdateTrigger(knex, TableName.ProjectUserMembershipRole);
}

View File

@ -1,52 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const doesTableExist = await knex.schema.hasTable(TableName.IdentityProjectMembershipRole);
if (!doesTableExist) {
await knex.schema.createTable(TableName.IdentityProjectMembershipRole, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("role").notNullable();
t.uuid("projectMembershipId").notNullable();
t.foreign("projectMembershipId")
.references("id")
.inTable(TableName.IdentityProjectMembership)
.onDelete("CASCADE");
// until role is changed/removed the role should not deleted
t.uuid("customRoleId");
t.foreign("customRoleId").references("id").inTable(TableName.ProjectRoles);
t.boolean("isTemporary").notNullable().defaultTo(false);
t.string("temporaryMode");
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
t.datetime("temporaryAccessStartTime");
t.datetime("temporaryAccessEndTime");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.IdentityProjectMembershipRole);
const identityMemberships = await knex(TableName.IdentityProjectMembership).select(
"id",
"role",
"createdAt",
"updatedAt",
knex.ref("roleId").withSchema(TableName.IdentityProjectMembership).as("customRoleId")
);
if (identityMemberships.length)
await knex.batchInsert(
TableName.IdentityProjectMembershipRole,
identityMemberships.map((data) => ({ ...data, projectMembershipId: data.id }))
);
// await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
// t.dropColumn("roleId");
// t.dropColumn("role");
// });
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityProjectMembershipRole);
await dropOnUpdateTrigger(knex, TableName.IdentityProjectMembershipRole);
}

View File

@ -1,58 +0,0 @@
import { Knex } from "knex";
import { SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const doesTableExist = await knex.schema.hasTable(TableName.DynamicSecret);
if (!doesTableExist) {
await knex.schema.createTable(TableName.DynamicSecret, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name").notNullable();
t.integer("version").notNullable();
t.string("type").notNullable();
t.string("defaultTTL").notNullable();
t.string("maxTTL");
t.string("inputIV").notNullable();
t.text("inputCiphertext").notNullable();
t.string("inputTag").notNullable();
t.string("algorithm").notNullable().defaultTo(SecretEncryptionAlgo.AES_256_GCM);
t.string("keyEncoding").notNullable().defaultTo(SecretKeyEncoding.UTF8);
t.uuid("folderId").notNullable();
// for background process communication
t.string("status");
t.string("statusDetails");
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE");
t.unique(["name", "folderId"]);
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.DynamicSecret);
const doesTableDynamicSecretLease = await knex.schema.hasTable(TableName.DynamicSecretLease);
if (!doesTableDynamicSecretLease) {
await knex.schema.createTable(TableName.DynamicSecretLease, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.integer("version").notNullable();
t.string("externalEntityId").notNullable();
t.datetime("expireAt").notNullable();
// for background process communication
t.string("status");
t.string("statusDetails");
t.uuid("dynamicSecretId").notNullable();
t.foreign("dynamicSecretId").references("id").inTable(TableName.DynamicSecret).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.DynamicSecretLease);
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.DynamicSecretLease);
await knex.schema.dropTableIfExists(TableName.DynamicSecretLease);
await dropOnUpdateTrigger(knex, TableName.DynamicSecret);
await knex.schema.dropTableIfExists(TableName.DynamicSecret);
}

View File

@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.ProjectUserAdditionalPrivilege))) {
await knex.schema.createTable(TableName.ProjectUserAdditionalPrivilege, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("slug", 60).notNullable();
t.uuid("projectMembershipId").notNullable();
t.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
t.boolean("isTemporary").notNullable().defaultTo(false);
t.string("temporaryMode");
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
t.datetime("temporaryAccessStartTime");
t.datetime("temporaryAccessEndTime");
t.jsonb("permissions").notNullable();
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.ProjectUserAdditionalPrivilege);
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.ProjectUserAdditionalPrivilege);
await knex.schema.dropTableIfExists(TableName.ProjectUserAdditionalPrivilege);
}

View File

@ -1,32 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityProjectAdditionalPrivilege))) {
await knex.schema.createTable(TableName.IdentityProjectAdditionalPrivilege, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("slug", 60).notNullable();
t.uuid("projectMembershipId").notNullable();
t.foreign("projectMembershipId")
.references("id")
.inTable(TableName.IdentityProjectMembership)
.onDelete("CASCADE");
t.boolean("isTemporary").notNullable().defaultTo(false);
t.string("temporaryMode");
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
t.datetime("temporaryAccessStartTime");
t.datetime("temporaryAccessEndTime");
t.jsonb("permissions").notNullable();
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.IdentityProjectAdditionalPrivilege);
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.IdentityProjectAdditionalPrivilege);
await knex.schema.dropTableIfExists(TableName.IdentityProjectAdditionalPrivilege);
}

View File

@ -1,112 +0,0 @@
import { Knex } from "knex";
import { z } from "zod";
import { TableName, TOrgMemberships } from "../schemas";
const validateOrgMembership = (membershipToValidate: TOrgMemberships, firstMembership: TOrgMemberships) => {
const firstOrgId = firstMembership.orgId;
const firstUserId = firstMembership.userId;
if (membershipToValidate.id === firstMembership.id) {
return;
}
if (membershipToValidate.inviteEmail !== firstMembership.inviteEmail) {
throw new Error(`Invite emails are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
}
if (membershipToValidate.orgId !== firstMembership.orgId) {
throw new Error(`OrgIds are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
}
if (membershipToValidate.role !== firstMembership.role) {
throw new Error(`Roles are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
}
if (membershipToValidate.roleId !== firstMembership.roleId) {
throw new Error(`RoleIds are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
}
if (membershipToValidate.status !== firstMembership.status) {
throw new Error(`Statuses are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
}
if (membershipToValidate.userId !== firstMembership.userId) {
throw new Error(`UserIds are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
}
};
export async function up(knex: Knex): Promise<void> {
const RowSchema = z.object({
userId: z.string(),
orgId: z.string(),
cnt: z.string()
});
// Transactional find and delete duplicate rows
await knex.transaction(async (tx) => {
const duplicateRows = await tx(TableName.OrgMembership)
.select("userId", "orgId") // Select the userId and orgId so we can group by them
.whereNotNull("userId") // Ensure that the userId is not null
.count("* as cnt") // Count the number of rows for each userId and orgId, so we can make sure there are more than 1 row (a duplicate)
.groupBy("userId", "orgId")
.havingRaw("count(*) > ?", [1]); // Using havingRaw for direct SQL expressions
// Parse the rows to ensure they are in the correct format, and for type safety
const parsedRows = RowSchema.array().parse(duplicateRows);
// For each of the duplicate rows, loop through and find the actual memberships to delete
for (const row of parsedRows) {
const count = Number(row.cnt);
// An extra check to ensure that the count is actually a number, and the number is greater than 2
if (typeof count !== "number" || count < 2) {
// eslint-disable-next-line no-continue
continue;
}
// Find all the organization memberships that have the same userId and orgId
// eslint-disable-next-line no-await-in-loop
const rowsToDelete = await tx(TableName.OrgMembership).where({
userId: row.userId,
orgId: row.orgId
});
// Ensure that all the rows have exactly the same value, except id, createdAt, updatedAt
for (const rowToDelete of rowsToDelete) {
validateOrgMembership(rowToDelete, rowsToDelete[0]);
}
// Find the row with the latest createdAt, which we will keep
let lowestCreatedAt: number | null = null;
let latestCreatedRow: TOrgMemberships | null = null;
for (const rowToDelete of rowsToDelete) {
if (lowestCreatedAt === null || rowToDelete.createdAt.getTime() < lowestCreatedAt) {
lowestCreatedAt = rowToDelete.createdAt.getTime();
latestCreatedRow = rowToDelete;
}
}
if (!latestCreatedRow) {
throw new Error("Failed to find last created membership");
}
// Filter out the latest row from the rows to delete
const membershipIdsToDelete = rowsToDelete.map((r) => r.id).filter((id) => id !== latestCreatedRow!.id);
// eslint-disable-next-line no-await-in-loop
const numberOfRowsDeleted = await tx(TableName.OrgMembership).whereIn("id", membershipIdsToDelete).delete();
// eslint-disable-next-line no-console
console.log(
`Deleted ${numberOfRowsDeleted} duplicate organization memberships for ${row.userId} and ${row.orgId}`
);
}
});
await knex.schema.alterTable(TableName.OrgMembership, (table) => {
table.unique(["userId", "orgId"]);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.OrgMembership, (table) => {
table.dropUnique(["userId", "orgId"]);
});
}

View File

@ -1,82 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.Groups))) {
await knex.schema.createTable(TableName.Groups, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.string("name").notNullable();
t.string("slug").notNullable();
t.unique(["orgId", "slug"]);
t.string("role").notNullable();
t.uuid("roleId");
t.foreign("roleId").references("id").inTable(TableName.OrgRoles);
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.Groups);
if (!(await knex.schema.hasTable(TableName.UserGroupMembership))) {
await knex.schema.createTable(TableName.UserGroupMembership, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); // link to user and link to groups cascade on groups
t.uuid("userId").notNullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.uuid("groupId").notNullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.UserGroupMembership);
if (!(await knex.schema.hasTable(TableName.GroupProjectMembership))) {
await knex.schema.createTable(TableName.GroupProjectMembership, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("groupId").notNullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.GroupProjectMembership);
if (!(await knex.schema.hasTable(TableName.GroupProjectMembershipRole))) {
await knex.schema.createTable(TableName.GroupProjectMembershipRole, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("role").notNullable();
t.uuid("projectMembershipId").notNullable();
t.foreign("projectMembershipId").references("id").inTable(TableName.GroupProjectMembership).onDelete("CASCADE");
// until role is changed/removed the role should not deleted
t.uuid("customRoleId");
t.foreign("customRoleId").references("id").inTable(TableName.ProjectRoles);
t.boolean("isTemporary").notNullable().defaultTo(false);
t.string("temporaryMode");
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
t.datetime("temporaryAccessStartTime");
t.datetime("temporaryAccessEndTime");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.GroupProjectMembershipRole);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.GroupProjectMembershipRole);
await dropOnUpdateTrigger(knex, TableName.GroupProjectMembershipRole);
await knex.schema.dropTableIfExists(TableName.UserGroupMembership);
await dropOnUpdateTrigger(knex, TableName.UserGroupMembership);
await knex.schema.dropTableIfExists(TableName.GroupProjectMembership);
await dropOnUpdateTrigger(knex, TableName.GroupProjectMembership);
await knex.schema.dropTableIfExists(TableName.Groups);
await dropOnUpdateTrigger(knex, TableName.Groups);
}

View File

@ -1,47 +0,0 @@
import { Knex } from "knex";
import { ProjectMembershipRole, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesProjectRoleFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "role");
const doesProjectRoleIdFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "roleId");
await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
if (doesProjectRoleFieldExist) t.dropColumn("roleId");
if (doesProjectRoleIdFieldExist) t.dropColumn("role");
});
const doesIdentityProjectRoleFieldExist = await knex.schema.hasColumn(TableName.IdentityProjectMembership, "role");
const doesIdentityProjectRoleIdFieldExist = await knex.schema.hasColumn(
TableName.IdentityProjectMembership,
"roleId"
);
await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
if (doesIdentityProjectRoleFieldExist) t.dropColumn("roleId");
if (doesIdentityProjectRoleIdFieldExist) t.dropColumn("role");
});
}
export async function down(knex: Knex): Promise<void> {
const doesProjectRoleFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "role");
const doesProjectRoleIdFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "roleId");
await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
if (!doesProjectRoleFieldExist) t.string("role").defaultTo(ProjectMembershipRole.Member);
if (!doesProjectRoleIdFieldExist) {
t.uuid("roleId");
t.foreign("roleId").references("id").inTable(TableName.ProjectRoles);
}
});
const doesIdentityProjectRoleFieldExist = await knex.schema.hasColumn(TableName.IdentityProjectMembership, "role");
const doesIdentityProjectRoleIdFieldExist = await knex.schema.hasColumn(
TableName.IdentityProjectMembership,
"roleId"
);
await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
if (!doesIdentityProjectRoleFieldExist) t.string("role").defaultTo(ProjectMembershipRole.Member);
if (!doesIdentityProjectRoleIdFieldExist) {
t.uuid("roleId");
t.foreign("roleId").references("id").inTable(TableName.ProjectRoles);
}
});
}

View File

@ -1,15 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.UserGroupMembership, (t) => {
t.boolean("isPending").notNullable().defaultTo(false);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.UserGroupMembership, (t) => {
t.dropColumn("isPending");
});
}

View File

@ -1,34 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.LdapGroupMap))) {
await knex.schema.createTable(TableName.LdapGroupMap, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("ldapConfigId").notNullable();
t.foreign("ldapConfigId").references("id").inTable(TableName.LdapConfig).onDelete("CASCADE");
t.string("ldapGroupCN").notNullable();
t.uuid("groupId").notNullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.unique(["ldapGroupCN", "groupId", "ldapConfigId"]);
});
}
await createOnUpdateTrigger(knex, TableName.LdapGroupMap);
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
t.string("groupSearchBase").notNullable().defaultTo("");
t.string("groupSearchFilter").notNullable().defaultTo("");
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.LdapGroupMap);
await dropOnUpdateTrigger(knex, TableName.LdapGroupMap);
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
t.dropColumn("groupSearchBase");
t.dropColumn("groupSearchFilter");
});
}

View File

@ -1,15 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
t.string("searchFilter").notNullable().defaultTo("");
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
t.dropColumn("searchFilter");
});
}

View File

@ -1,28 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
const doesCreatedAtExist = await knex.schema.hasColumn(TableName.AuditLog, "createdAt");
if (await knex.schema.hasTable(TableName.AuditLog)) {
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesProjectIdExist && doesCreatedAtExist) t.index(["projectId", "createdAt"]);
if (doesOrgIdExist && doesCreatedAtExist) t.index(["orgId", "createdAt"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
const doesCreatedAtExist = await knex.schema.hasColumn(TableName.AuditLog, "createdAt");
if (await knex.schema.hasTable(TableName.AuditLog)) {
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesProjectIdExist && doesCreatedAtExist) t.dropIndex(["projectId", "createdAt"]);
if (doesOrgIdExist && doesCreatedAtExist) t.dropIndex(["orgId", "createdAt"]);
});
}
}

View File

@ -1,28 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.AuditLogStream))) {
await knex.schema.createTable(TableName.AuditLogStream, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("url").notNullable();
t.text("encryptedHeadersCiphertext");
t.text("encryptedHeadersIV");
t.text("encryptedHeadersTag");
t.string("encryptedHeadersAlgorithm");
t.string("encryptedHeadersKeyEncoding");
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.AuditLogStream);
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.AuditLogStream);
await knex.schema.dropTableIfExists(TableName.AuditLogStream);
}

View File

@ -1,54 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const isUsersTablePresent = await knex.schema.hasTable(TableName.Users);
if (isUsersTablePresent) {
const hasIsEmailVerifiedColumn = await knex.schema.hasColumn(TableName.Users, "isEmailVerified");
if (!hasIsEmailVerifiedColumn) {
await knex.schema.alterTable(TableName.Users, (t) => {
t.boolean("isEmailVerified").defaultTo(false);
});
}
// Backfilling the isEmailVerified to true where isAccepted is true
await knex(TableName.Users).update({ isEmailVerified: true }).where("isAccepted", true);
}
const isUserAliasTablePresent = await knex.schema.hasTable(TableName.UserAliases);
if (isUserAliasTablePresent) {
await knex.schema.alterTable(TableName.UserAliases, (t) => {
t.string("username").nullable().alter();
});
}
const isSuperAdminTablePresent = await knex.schema.hasTable(TableName.SuperAdmin);
if (isSuperAdminTablePresent) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.boolean("trustSamlEmails").defaultTo(false);
t.boolean("trustLdapEmails").defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Users, "isEmailVerified")) {
await knex.schema.alterTable(TableName.Users, (t) => {
t.dropColumn("isEmailVerified");
});
}
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustSamlEmails")) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("trustSamlEmails");
});
}
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustLdapEmails")) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("trustLdapEmails");
});
}
}

View File

@ -1,41 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicy))) {
await knex.schema.createTable(TableName.AccessApprovalPolicy, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name").notNullable();
t.integer("approvals").defaultTo(1).notNullable();
t.string("secretPath");
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicy);
}
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover))) {
await knex.schema.createTable(TableName.AccessApprovalPolicyApprover, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("approverId").notNullable();
t.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
t.uuid("policyId").notNullable();
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicyApprover);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicyApprover);
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicy);
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicyApprover);
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicy);
}

View File

@ -1,51 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.AccessApprovalRequest))) {
await knex.schema.createTable(TableName.AccessApprovalRequest, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("policyId").notNullable();
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
t.uuid("privilegeId").nullable();
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("CASCADE");
t.uuid("requestedBy").notNullable();
t.foreign("requestedBy").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
// We use these values to create the actual privilege at a later point in time.
t.boolean("isTemporary").notNullable();
t.string("temporaryRange").nullable();
t.jsonb("permissions").notNullable();
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.AccessApprovalRequest);
if (!(await knex.schema.hasTable(TableName.AccessApprovalRequestReviewer))) {
await knex.schema.createTable(TableName.AccessApprovalRequestReviewer, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("member").notNullable();
t.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
t.string("status").notNullable();
t.uuid("requestId").notNullable();
t.foreign("requestId").references("id").inTable(TableName.AccessApprovalRequest).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.AccessApprovalRequestReviewer);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.AccessApprovalRequestReviewer);
await knex.schema.dropTableIfExists(TableName.AccessApprovalRequest);
await dropOnUpdateTrigger(knex, TableName.AccessApprovalRequestReviewer);
await dropOnUpdateTrigger(knex, TableName.AccessApprovalRequest);
}

View File

@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityAwsIamAuth))) {
await knex.schema.createTable(TableName.IdentityAwsIamAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("stsEndpoint").notNullable();
t.string("allowedPrincipalArns").notNullable();
t.string("allowedAccountIds").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.IdentityAwsIamAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityAwsIamAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityAwsIamAuth);
}

View File

@ -1,25 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const AccessApprovalPoliciesApproversSchema = z.object({
id: z.string().uuid(),
approverId: z.string().uuid(),
policyId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>;
export type TAccessApprovalPoliciesApproversInsert = Omit<
z.input<typeof AccessApprovalPoliciesApproversSchema>,
TImmutableDBKeys
>;
export type TAccessApprovalPoliciesApproversUpdate = Partial<
Omit<z.input<typeof AccessApprovalPoliciesApproversSchema>, TImmutableDBKeys>
>;

View File

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const AccessApprovalPoliciesSchema = z.object({
id: z.string().uuid(),
name: z.string(),
approvals: z.number().default(1),
envId: z.string().uuid(),
secretPath: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;
export type TAccessApprovalPoliciesInsert = Omit<z.input<typeof AccessApprovalPoliciesSchema>, TImmutableDBKeys>;
export type TAccessApprovalPoliciesUpdate = Partial<
Omit<z.input<typeof AccessApprovalPoliciesSchema>, TImmutableDBKeys>
>;

View File

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const AccessApprovalRequestsReviewersSchema = z.object({
id: z.string().uuid(),
member: z.string().uuid(),
status: z.string(),
requestId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAccessApprovalRequestsReviewers = z.infer<typeof AccessApprovalRequestsReviewersSchema>;
export type TAccessApprovalRequestsReviewersInsert = Omit<
z.input<typeof AccessApprovalRequestsReviewersSchema>,
TImmutableDBKeys
>;
export type TAccessApprovalRequestsReviewersUpdate = Partial<
Omit<z.input<typeof AccessApprovalRequestsReviewersSchema>, TImmutableDBKeys>
>;

View File

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const AccessApprovalRequestsSchema = z.object({
id: z.string().uuid(),
policyId: z.string().uuid(),
privilegeId: z.string().uuid().nullable().optional(),
requestedBy: z.string().uuid(),
isTemporary: z.boolean(),
temporaryRange: z.string().nullable().optional(),
permissions: z.unknown(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;
export type TAccessApprovalRequestsInsert = Omit<z.input<typeof AccessApprovalRequestsSchema>, TImmutableDBKeys>;
export type TAccessApprovalRequestsUpdate = Partial<
Omit<z.input<typeof AccessApprovalRequestsSchema>, TImmutableDBKeys>
>;

View File

@ -19,5 +19,5 @@ export const ApiKeysSchema = z.object({
});
export type TApiKeys = z.infer<typeof ApiKeysSchema>;
export type TApiKeysInsert = Omit<z.input<typeof ApiKeysSchema>, TImmutableDBKeys>;
export type TApiKeysUpdate = Partial<Omit<z.input<typeof ApiKeysSchema>, TImmutableDBKeys>>;
export type TApiKeysInsert = Omit<TApiKeys, TImmutableDBKeys>;
export type TApiKeysUpdate = Partial<Omit<TApiKeys, TImmutableDBKeys>>;

View File

@ -1,25 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const AuditLogStreamsSchema = z.object({
id: z.string().uuid(),
url: z.string(),
encryptedHeadersCiphertext: z.string().nullable().optional(),
encryptedHeadersIV: z.string().nullable().optional(),
encryptedHeadersTag: z.string().nullable().optional(),
encryptedHeadersAlgorithm: z.string().nullable().optional(),
encryptedHeadersKeyEncoding: z.string().nullable().optional(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAuditLogStreams = z.infer<typeof AuditLogStreamsSchema>;
export type TAuditLogStreamsInsert = Omit<z.input<typeof AuditLogStreamsSchema>, TImmutableDBKeys>;
export type TAuditLogStreamsUpdate = Partial<Omit<z.input<typeof AuditLogStreamsSchema>, TImmutableDBKeys>>;

View File

@ -24,5 +24,5 @@ export const AuditLogsSchema = z.object({
});
export type TAuditLogs = z.infer<typeof AuditLogsSchema>;
export type TAuditLogsInsert = Omit<z.input<typeof AuditLogsSchema>, TImmutableDBKeys>;
export type TAuditLogsUpdate = Partial<Omit<z.input<typeof AuditLogsSchema>, TImmutableDBKeys>>;
export type TAuditLogsInsert = Omit<TAuditLogs, TImmutableDBKeys>;
export type TAuditLogsUpdate = Partial<Omit<TAuditLogs, TImmutableDBKeys>>;

View File

@ -20,5 +20,5 @@ export const AuthTokenSessionsSchema = z.object({
});
export type TAuthTokenSessions = z.infer<typeof AuthTokenSessionsSchema>;
export type TAuthTokenSessionsInsert = Omit<z.input<typeof AuthTokenSessionsSchema>, TImmutableDBKeys>;
export type TAuthTokenSessionsUpdate = Partial<Omit<z.input<typeof AuthTokenSessionsSchema>, TImmutableDBKeys>>;
export type TAuthTokenSessionsInsert = Omit<TAuthTokenSessions, TImmutableDBKeys>;
export type TAuthTokenSessionsUpdate = Partial<Omit<TAuthTokenSessions, TImmutableDBKeys>>;

View File

@ -21,5 +21,5 @@ export const AuthTokensSchema = z.object({
});
export type TAuthTokens = z.infer<typeof AuthTokensSchema>;
export type TAuthTokensInsert = Omit<z.input<typeof AuthTokensSchema>, TImmutableDBKeys>;
export type TAuthTokensUpdate = Partial<Omit<z.input<typeof AuthTokensSchema>, TImmutableDBKeys>>;
export type TAuthTokensInsert = Omit<TAuthTokens, TImmutableDBKeys>;
export type TAuthTokensUpdate = Partial<Omit<TAuthTokens, TImmutableDBKeys>>;

View File

@ -22,5 +22,5 @@ export const BackupPrivateKeySchema = z.object({
});
export type TBackupPrivateKey = z.infer<typeof BackupPrivateKeySchema>;
export type TBackupPrivateKeyInsert = Omit<z.input<typeof BackupPrivateKeySchema>, TImmutableDBKeys>;
export type TBackupPrivateKeyUpdate = Partial<Omit<z.input<typeof BackupPrivateKeySchema>, TImmutableDBKeys>>;
export type TBackupPrivateKeyInsert = Omit<TBackupPrivateKey, TImmutableDBKeys>;
export type TBackupPrivateKeyUpdate = Partial<Omit<TBackupPrivateKey, TImmutableDBKeys>>;

View File

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const DynamicSecretLeasesSchema = z.object({
id: z.string().uuid(),
version: z.number(),
externalEntityId: z.string(),
expireAt: z.date(),
status: z.string().nullable().optional(),
statusDetails: z.string().nullable().optional(),
dynamicSecretId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TDynamicSecretLeases = z.infer<typeof DynamicSecretLeasesSchema>;
export type TDynamicSecretLeasesInsert = Omit<z.input<typeof DynamicSecretLeasesSchema>, TImmutableDBKeys>;
export type TDynamicSecretLeasesUpdate = Partial<Omit<z.input<typeof DynamicSecretLeasesSchema>, TImmutableDBKeys>>;

View File

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const DynamicSecretsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
version: z.number(),
type: z.string(),
defaultTTL: z.string(),
maxTTL: z.string().nullable().optional(),
inputIV: z.string(),
inputCiphertext: z.string(),
inputTag: z.string(),
algorithm: z.string().default("aes-256-gcm"),
keyEncoding: z.string().default("utf8"),
folderId: z.string().uuid(),
status: z.string().nullable().optional(),
statusDetails: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
export type TDynamicSecretsInsert = Omit<z.input<typeof DynamicSecretsSchema>, TImmutableDBKeys>;
export type TDynamicSecretsUpdate = Partial<Omit<z.input<typeof DynamicSecretsSchema>, TImmutableDBKeys>>;

View File

@ -17,5 +17,5 @@ export const GitAppInstallSessionsSchema = z.object({
});
export type TGitAppInstallSessions = z.infer<typeof GitAppInstallSessionsSchema>;
export type TGitAppInstallSessionsInsert = Omit<z.input<typeof GitAppInstallSessionsSchema>, TImmutableDBKeys>;
export type TGitAppInstallSessionsUpdate = Partial<Omit<z.input<typeof GitAppInstallSessionsSchema>, TImmutableDBKeys>>;
export type TGitAppInstallSessionsInsert = Omit<TGitAppInstallSessions, TImmutableDBKeys>;
export type TGitAppInstallSessionsUpdate = Partial<Omit<TGitAppInstallSessions, TImmutableDBKeys>>;

View File

@ -17,5 +17,5 @@ export const GitAppOrgSchema = z.object({
});
export type TGitAppOrg = z.infer<typeof GitAppOrgSchema>;
export type TGitAppOrgInsert = Omit<z.input<typeof GitAppOrgSchema>, TImmutableDBKeys>;
export type TGitAppOrgUpdate = Partial<Omit<z.input<typeof GitAppOrgSchema>, TImmutableDBKeys>>;
export type TGitAppOrgInsert = Omit<TGitAppOrg, TImmutableDBKeys>;
export type TGitAppOrgUpdate = Partial<Omit<TGitAppOrg, TImmutableDBKeys>>;

View File

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const GroupProjectMembershipRolesSchema = z.object({
id: z.string().uuid(),
role: z.string(),
projectMembershipId: z.string().uuid(),
customRoleId: z.string().uuid().nullable().optional(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TGroupProjectMembershipRoles = z.infer<typeof GroupProjectMembershipRolesSchema>;
export type TGroupProjectMembershipRolesInsert = Omit<
z.input<typeof GroupProjectMembershipRolesSchema>,
TImmutableDBKeys
>;
export type TGroupProjectMembershipRolesUpdate = Partial<
Omit<z.input<typeof GroupProjectMembershipRolesSchema>, TImmutableDBKeys>
>;

View File

@ -1,22 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const GroupProjectMembershipsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
groupId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TGroupProjectMemberships = z.infer<typeof GroupProjectMembershipsSchema>;
export type TGroupProjectMembershipsInsert = Omit<z.input<typeof GroupProjectMembershipsSchema>, TImmutableDBKeys>;
export type TGroupProjectMembershipsUpdate = Partial<
Omit<z.input<typeof GroupProjectMembershipsSchema>, TImmutableDBKeys>
>;

View File

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const GroupsSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
name: z.string(),
slug: z.string(),
role: z.string(),
roleId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TGroups = z.infer<typeof GroupsSchema>;
export type TGroupsInsert = Omit<z.input<typeof GroupsSchema>, TImmutableDBKeys>;
export type TGroupsUpdate = Partial<Omit<z.input<typeof GroupsSchema>, TImmutableDBKeys>>;

View File

@ -16,5 +16,5 @@ export const IdentitiesSchema = z.object({
});
export type TIdentities = z.infer<typeof IdentitiesSchema>;
export type TIdentitiesInsert = Omit<z.input<typeof IdentitiesSchema>, TImmutableDBKeys>;
export type TIdentitiesUpdate = Partial<Omit<z.input<typeof IdentitiesSchema>, TImmutableDBKeys>>;
export type TIdentitiesInsert = Omit<TIdentities, TImmutableDBKeys>;
export type TIdentitiesUpdate = Partial<Omit<TIdentities, TImmutableDBKeys>>;

View File

@ -23,5 +23,5 @@ export const IdentityAccessTokensSchema = z.object({
});
export type TIdentityAccessTokens = z.infer<typeof IdentityAccessTokensSchema>;
export type TIdentityAccessTokensInsert = Omit<z.input<typeof IdentityAccessTokensSchema>, TImmutableDBKeys>;
export type TIdentityAccessTokensUpdate = Partial<Omit<z.input<typeof IdentityAccessTokensSchema>, TImmutableDBKeys>>;
export type TIdentityAccessTokensInsert = Omit<TIdentityAccessTokens, TImmutableDBKeys>;
export type TIdentityAccessTokensUpdate = Partial<Omit<TIdentityAccessTokens, TImmutableDBKeys>>;

View File

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityAwsIamAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
stsEndpoint: z.string(),
allowedPrincipalArns: z.string(),
allowedAccountIds: z.string()
});
export type TIdentityAwsIamAuths = z.infer<typeof IdentityAwsIamAuthsSchema>;
export type TIdentityAwsIamAuthsInsert = Omit<z.input<typeof IdentityAwsIamAuthsSchema>, TImmutableDBKeys>;
export type TIdentityAwsIamAuthsUpdate = Partial<Omit<z.input<typeof IdentityAwsIamAuthsSchema>, TImmutableDBKeys>>;

View File

@ -18,7 +18,5 @@ export const IdentityOrgMembershipsSchema = z.object({
});
export type TIdentityOrgMemberships = z.infer<typeof IdentityOrgMembershipsSchema>;
export type TIdentityOrgMembershipsInsert = Omit<z.input<typeof IdentityOrgMembershipsSchema>, TImmutableDBKeys>;
export type TIdentityOrgMembershipsUpdate = Partial<
Omit<z.input<typeof IdentityOrgMembershipsSchema>, TImmutableDBKeys>
>;
export type TIdentityOrgMembershipsInsert = Omit<TIdentityOrgMemberships, TImmutableDBKeys>;
export type TIdentityOrgMembershipsUpdate = Partial<Omit<TIdentityOrgMemberships, TImmutableDBKeys>>;

View File

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityProjectAdditionalPrivilegeSchema = z.object({
id: z.string().uuid(),
slug: z.string(),
projectMembershipId: z.string().uuid(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional(),
permissions: z.unknown(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityProjectAdditionalPrivilege = z.infer<typeof IdentityProjectAdditionalPrivilegeSchema>;
export type TIdentityProjectAdditionalPrivilegeInsert = Omit<
z.input<typeof IdentityProjectAdditionalPrivilegeSchema>,
TImmutableDBKeys
>;
export type TIdentityProjectAdditionalPrivilegeUpdate = Partial<
Omit<z.input<typeof IdentityProjectAdditionalPrivilegeSchema>, TImmutableDBKeys>
>;

View File

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityProjectMembershipRoleSchema = z.object({
id: z.string().uuid(),
role: z.string(),
projectMembershipId: z.string().uuid(),
customRoleId: z.string().uuid().nullable().optional(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityProjectMembershipRole = z.infer<typeof IdentityProjectMembershipRoleSchema>;
export type TIdentityProjectMembershipRoleInsert = Omit<
z.input<typeof IdentityProjectMembershipRoleSchema>,
TImmutableDBKeys
>;
export type TIdentityProjectMembershipRoleUpdate = Partial<
Omit<z.input<typeof IdentityProjectMembershipRoleSchema>, TImmutableDBKeys>
>;

View File

@ -9,6 +9,8 @@ import { TImmutableDBKeys } from "./models";
export const IdentityProjectMembershipsSchema = z.object({
id: z.string().uuid(),
role: z.string(),
roleId: z.string().uuid().nullable().optional(),
projectId: z.string(),
identityId: z.string().uuid(),
createdAt: z.date(),
@ -16,10 +18,5 @@ export const IdentityProjectMembershipsSchema = z.object({
});
export type TIdentityProjectMemberships = z.infer<typeof IdentityProjectMembershipsSchema>;
export type TIdentityProjectMembershipsInsert = Omit<
z.input<typeof IdentityProjectMembershipsSchema>,
TImmutableDBKeys
>;
export type TIdentityProjectMembershipsUpdate = Partial<
Omit<z.input<typeof IdentityProjectMembershipsSchema>, TImmutableDBKeys>
>;
export type TIdentityProjectMembershipsInsert = Omit<TIdentityProjectMemberships, TImmutableDBKeys>;
export type TIdentityProjectMembershipsUpdate = Partial<Omit<TIdentityProjectMemberships, TImmutableDBKeys>>;

View File

@ -23,7 +23,5 @@ export const IdentityUaClientSecretsSchema = z.object({
});
export type TIdentityUaClientSecrets = z.infer<typeof IdentityUaClientSecretsSchema>;
export type TIdentityUaClientSecretsInsert = Omit<z.input<typeof IdentityUaClientSecretsSchema>, TImmutableDBKeys>;
export type TIdentityUaClientSecretsUpdate = Partial<
Omit<z.input<typeof IdentityUaClientSecretsSchema>, TImmutableDBKeys>
>;
export type TIdentityUaClientSecretsInsert = Omit<TIdentityUaClientSecrets, TImmutableDBKeys>;
export type TIdentityUaClientSecretsUpdate = Partial<Omit<TIdentityUaClientSecrets, TImmutableDBKeys>>;

View File

@ -21,7 +21,5 @@ export const IdentityUniversalAuthsSchema = z.object({
});
export type TIdentityUniversalAuths = z.infer<typeof IdentityUniversalAuthsSchema>;
export type TIdentityUniversalAuthsInsert = Omit<z.input<typeof IdentityUniversalAuthsSchema>, TImmutableDBKeys>;
export type TIdentityUniversalAuthsUpdate = Partial<
Omit<z.input<typeof IdentityUniversalAuthsSchema>, TImmutableDBKeys>
>;
export type TIdentityUniversalAuthsInsert = Omit<TIdentityUniversalAuths, TImmutableDBKeys>;
export type TIdentityUniversalAuthsUpdate = Partial<Omit<TIdentityUniversalAuths, TImmutableDBKeys>>;

View File

@ -16,5 +16,5 @@ export const IncidentContactsSchema = z.object({
});
export type TIncidentContacts = z.infer<typeof IncidentContactsSchema>;
export type TIncidentContactsInsert = Omit<z.input<typeof IncidentContactsSchema>, TImmutableDBKeys>;
export type TIncidentContactsUpdate = Partial<Omit<z.input<typeof IncidentContactsSchema>, TImmutableDBKeys>>;
export type TIncidentContactsInsert = Omit<TIncidentContacts, TImmutableDBKeys>;
export type TIncidentContactsUpdate = Partial<Omit<TIncidentContacts, TImmutableDBKeys>>;

View File

@ -1,34 +1,19 @@
export * from "./access-approval-policies";
export * from "./access-approval-policies-approvers";
export * from "./access-approval-requests";
export * from "./access-approval-requests-reviewers";
export * from "./api-keys";
export * from "./audit-log-streams";
export * from "./audit-logs";
export * from "./auth-token-sessions";
export * from "./auth-tokens";
export * from "./backup-private-key";
export * from "./dynamic-secret-leases";
export * from "./dynamic-secrets";
export * from "./git-app-install-sessions";
export * from "./git-app-org";
export * from "./group-project-membership-roles";
export * from "./group-project-memberships";
export * from "./groups";
export * from "./identities";
export * from "./identity-access-tokens";
export * from "./identity-aws-iam-auths";
export * from "./identity-org-memberships";
export * from "./identity-project-additional-privilege";
export * from "./identity-project-membership-role";
export * from "./identity-project-memberships";
export * from "./identity-ua-client-secrets";
export * from "./identity-universal-auths";
export * from "./incident-contacts";
export * from "./integration-auths";
export * from "./integrations";
export * from "./ldap-configs";
export * from "./ldap-group-maps";
export * from "./models";
export * from "./org-bots";
export * from "./org-memberships";
@ -39,8 +24,6 @@ export * from "./project-environments";
export * from "./project-keys";
export * from "./project-memberships";
export * from "./project-roles";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
export * from "./projects";
export * from "./saml-configs";
export * from "./scim-tokens";
@ -69,8 +52,6 @@ export * from "./service-tokens";
export * from "./super-admin";
export * from "./trusted-ips";
export * from "./user-actions";
export * from "./user-aliases";
export * from "./user-encryption-keys";
export * from "./user-group-membership";
export * from "./users";
export * from "./webhooks";

View File

@ -33,5 +33,5 @@ export const IntegrationAuthsSchema = z.object({
});
export type TIntegrationAuths = z.infer<typeof IntegrationAuthsSchema>;
export type TIntegrationAuthsInsert = Omit<z.input<typeof IntegrationAuthsSchema>, TImmutableDBKeys>;
export type TIntegrationAuthsUpdate = Partial<Omit<z.input<typeof IntegrationAuthsSchema>, TImmutableDBKeys>>;
export type TIntegrationAuthsInsert = Omit<TIntegrationAuths, TImmutableDBKeys>;
export type TIntegrationAuthsUpdate = Partial<Omit<TIntegrationAuths, TImmutableDBKeys>>;

View File

@ -27,10 +27,9 @@ export const IntegrationsSchema = z.object({
envId: z.string().uuid(),
secretPath: z.string().default("/"),
createdAt: z.date(),
updatedAt: z.date(),
lastUsed: z.date().nullable().optional()
updatedAt: z.date()
});
export type TIntegrations = z.infer<typeof IntegrationsSchema>;
export type TIntegrationsInsert = Omit<z.input<typeof IntegrationsSchema>, TImmutableDBKeys>;
export type TIntegrationsUpdate = Partial<Omit<z.input<typeof IntegrationsSchema>, TImmutableDBKeys>>;
export type TIntegrationsInsert = Omit<TIntegrations, TImmutableDBKeys>;
export type TIntegrationsUpdate = Partial<Omit<TIntegrations, TImmutableDBKeys>>;

View File

@ -1,34 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const LdapConfigsSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
isActive: z.boolean(),
url: z.string(),
encryptedBindDN: z.string(),
bindDNIV: z.string(),
bindDNTag: z.string(),
encryptedBindPass: z.string(),
bindPassIV: z.string(),
bindPassTag: z.string(),
searchBase: z.string(),
encryptedCACert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
groupSearchBase: z.string().default(""),
groupSearchFilter: z.string().default(""),
searchFilter: z.string().default("")
});
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
export type TLdapConfigsInsert = Omit<z.input<typeof LdapConfigsSchema>, TImmutableDBKeys>;
export type TLdapConfigsUpdate = Partial<Omit<z.input<typeof LdapConfigsSchema>, TImmutableDBKeys>>;

View File

@ -1,19 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const LdapGroupMapsSchema = z.object({
id: z.string().uuid(),
ldapConfigId: z.string().uuid(),
ldapGroupCN: z.string(),
groupId: z.string().uuid()
});
export type TLdapGroupMaps = z.infer<typeof LdapGroupMapsSchema>;
export type TLdapGroupMapsInsert = Omit<z.input<typeof LdapGroupMapsSchema>, TImmutableDBKeys>;
export type TLdapGroupMapsUpdate = Partial<Omit<z.input<typeof LdapGroupMapsSchema>, TImmutableDBKeys>>;

View File

@ -2,11 +2,6 @@ import { z } from "zod";
export enum TableName {
Users = "users",
Groups = "groups",
GroupProjectMembership = "group_project_memberships",
GroupProjectMembershipRole = "group_project_membership_roles",
UserGroupMembership = "user_group_membership",
UserAliases = "user_aliases",
UserEncryptionKey = "user_encryption_keys",
AuthTokens = "auth_tokens",
AuthTokenSession = "auth_token_sessions",
@ -24,8 +19,6 @@ export enum TableName {
Environment = "project_environments",
ProjectMembership = "project_memberships",
ProjectRoles = "project_roles",
ProjectUserAdditionalPrivilege = "project_user_additional_privilege",
ProjectUserMembershipRole = "project_user_membership_roles",
ProjectKeys = "project_keys",
Secret = "secrets",
SecretBlindIndex = "secret_blind_indexes",
@ -45,16 +38,9 @@ export enum TableName {
IdentityAccessToken = "identity_access_tokens",
IdentityUniversalAuth = "identity_universal_auths",
IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityAwsIamAuth = "identity_aws_iam_auths",
IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
ScimToken = "scim_tokens",
AccessApprovalPolicy = "access_approval_policies",
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
AccessApprovalRequest = "access_approval_requests",
AccessApprovalRequestReviewer = "access_approval_requests_reviewers",
SecretApprovalPolicy = "secret_approval_policies",
SecretApprovalPolicyApprover = "secret_approval_policies_approvers",
SecretApprovalRequest = "secret_approval_requests",
@ -64,16 +50,11 @@ export enum TableName {
SecretRotation = "secret_rotations",
SecretRotationOutput = "secret_rotation_outputs",
SamlConfig = "saml_configs",
LdapConfig = "ldap_configs",
LdapGroupMap = "ldap_group_maps",
AuditLog = "audit_logs",
AuditLogStream = "audit_log_streams",
GitAppInstallSession = "git_app_install_sessions",
GitAppOrg = "git_app_org",
SecretScanningGitRisk = "secret_scanning_git_risks",
TrustedIps = "trusted_ips",
DynamicSecret = "dynamic_secrets",
DynamicSecretLease = "dynamic_secret_leases",
// junction tables with tags
JnSecretTag = "secret_tag_junction",
SecretVersionTag = "secret_version_tag_junction"
@ -143,6 +124,5 @@ export enum ProjectUpgradeStatus {
}
export enum IdentityAuthMethod {
Univeral = "universal-auth",
AWS_IAM_AUTH = "aws-iam-auth"
Univeral = "universal-auth"
}

View File

@ -27,5 +27,5 @@ export const OrgBotsSchema = z.object({
});
export type TOrgBots = z.infer<typeof OrgBotsSchema>;
export type TOrgBotsInsert = Omit<z.input<typeof OrgBotsSchema>, TImmutableDBKeys>;
export type TOrgBotsUpdate = Partial<Omit<z.input<typeof OrgBotsSchema>, TImmutableDBKeys>>;
export type TOrgBotsInsert = Omit<TOrgBots, TImmutableDBKeys>;
export type TOrgBotsUpdate = Partial<Omit<TOrgBots, TImmutableDBKeys>>;

View File

@ -20,5 +20,5 @@ export const OrgMembershipsSchema = z.object({
});
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;
export type TOrgMembershipsInsert = Omit<z.input<typeof OrgMembershipsSchema>, TImmutableDBKeys>;
export type TOrgMembershipsUpdate = Partial<Omit<z.input<typeof OrgMembershipsSchema>, TImmutableDBKeys>>;
export type TOrgMembershipsInsert = Omit<TOrgMemberships, TImmutableDBKeys>;
export type TOrgMembershipsUpdate = Partial<Omit<TOrgMemberships, TImmutableDBKeys>>;

View File

@ -19,5 +19,5 @@ export const OrgRolesSchema = z.object({
});
export type TOrgRoles = z.infer<typeof OrgRolesSchema>;
export type TOrgRolesInsert = Omit<z.input<typeof OrgRolesSchema>, TImmutableDBKeys>;
export type TOrgRolesUpdate = Partial<Omit<z.input<typeof OrgRolesSchema>, TImmutableDBKeys>>;
export type TOrgRolesInsert = Omit<TOrgRoles, TImmutableDBKeys>;
export type TOrgRolesUpdate = Partial<Omit<TOrgRoles, TImmutableDBKeys>>;

View File

@ -19,5 +19,5 @@ export const OrganizationsSchema = z.object({
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
export type TOrganizationsInsert = Omit<z.input<typeof OrganizationsSchema>, TImmutableDBKeys>;
export type TOrganizationsUpdate = Partial<Omit<z.input<typeof OrganizationsSchema>, TImmutableDBKeys>>;
export type TOrganizationsInsert = Omit<TOrganizations, TImmutableDBKeys>;
export type TOrganizationsUpdate = Partial<Omit<TOrganizations, TImmutableDBKeys>>;

View File

@ -26,5 +26,5 @@ export const ProjectBotsSchema = z.object({
});
export type TProjectBots = z.infer<typeof ProjectBotsSchema>;
export type TProjectBotsInsert = Omit<z.input<typeof ProjectBotsSchema>, TImmutableDBKeys>;
export type TProjectBotsUpdate = Partial<Omit<z.input<typeof ProjectBotsSchema>, TImmutableDBKeys>>;
export type TProjectBotsInsert = Omit<TProjectBots, TImmutableDBKeys>;
export type TProjectBotsUpdate = Partial<Omit<TProjectBots, TImmutableDBKeys>>;

View File

@ -18,5 +18,5 @@ export const ProjectEnvironmentsSchema = z.object({
});
export type TProjectEnvironments = z.infer<typeof ProjectEnvironmentsSchema>;
export type TProjectEnvironmentsInsert = Omit<z.input<typeof ProjectEnvironmentsSchema>, TImmutableDBKeys>;
export type TProjectEnvironmentsUpdate = Partial<Omit<z.input<typeof ProjectEnvironmentsSchema>, TImmutableDBKeys>>;
export type TProjectEnvironmentsInsert = Omit<TProjectEnvironments, TImmutableDBKeys>;
export type TProjectEnvironmentsUpdate = Partial<Omit<TProjectEnvironments, TImmutableDBKeys>>;

View File

@ -19,5 +19,5 @@ export const ProjectKeysSchema = z.object({
});
export type TProjectKeys = z.infer<typeof ProjectKeysSchema>;
export type TProjectKeysInsert = Omit<z.input<typeof ProjectKeysSchema>, TImmutableDBKeys>;
export type TProjectKeysUpdate = Partial<Omit<z.input<typeof ProjectKeysSchema>, TImmutableDBKeys>>;
export type TProjectKeysInsert = Omit<TProjectKeys, TImmutableDBKeys>;
export type TProjectKeysUpdate = Partial<Omit<TProjectKeys, TImmutableDBKeys>>;

View File

@ -9,12 +9,14 @@ import { TImmutableDBKeys } from "./models";
export const ProjectMembershipsSchema = z.object({
id: z.string().uuid(),
role: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
userId: z.string().uuid(),
projectId: z.string()
projectId: z.string(),
roleId: z.string().uuid().nullable().optional()
});
export type TProjectMemberships = z.infer<typeof ProjectMembershipsSchema>;
export type TProjectMembershipsInsert = Omit<z.input<typeof ProjectMembershipsSchema>, TImmutableDBKeys>;
export type TProjectMembershipsUpdate = Partial<Omit<z.input<typeof ProjectMembershipsSchema>, TImmutableDBKeys>>;
export type TProjectMembershipsInsert = Omit<TProjectMemberships, TImmutableDBKeys>;
export type TProjectMembershipsUpdate = Partial<Omit<TProjectMemberships, TImmutableDBKeys>>;

View File

@ -19,5 +19,5 @@ export const ProjectRolesSchema = z.object({
});
export type TProjectRoles = z.infer<typeof ProjectRolesSchema>;
export type TProjectRolesInsert = Omit<z.input<typeof ProjectRolesSchema>, TImmutableDBKeys>;
export type TProjectRolesUpdate = Partial<Omit<z.input<typeof ProjectRolesSchema>, TImmutableDBKeys>>;
export type TProjectRolesInsert = Omit<TProjectRoles, TImmutableDBKeys>;
export type TProjectRolesUpdate = Partial<Omit<TProjectRoles, TImmutableDBKeys>>;

View File

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectUserAdditionalPrivilegeSchema = z.object({
id: z.string().uuid(),
slug: z.string(),
projectMembershipId: z.string().uuid(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional(),
permissions: z.unknown(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TProjectUserAdditionalPrivilege = z.infer<typeof ProjectUserAdditionalPrivilegeSchema>;
export type TProjectUserAdditionalPrivilegeInsert = Omit<
z.input<typeof ProjectUserAdditionalPrivilegeSchema>,
TImmutableDBKeys
>;
export type TProjectUserAdditionalPrivilegeUpdate = Partial<
Omit<z.input<typeof ProjectUserAdditionalPrivilegeSchema>, TImmutableDBKeys>
>;

View File

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectUserMembershipRolesSchema = z.object({
id: z.string().uuid(),
role: z.string(),
projectMembershipId: z.string().uuid(),
customRoleId: z.string().uuid().nullable().optional(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TProjectUserMembershipRoles = z.infer<typeof ProjectUserMembershipRolesSchema>;
export type TProjectUserMembershipRolesInsert = Omit<
z.input<typeof ProjectUserMembershipRolesSchema>,
TImmutableDBKeys
>;
export type TProjectUserMembershipRolesUpdate = Partial<
Omit<z.input<typeof ProjectUserMembershipRolesSchema>, TImmutableDBKeys>
>;

View File

@ -20,5 +20,5 @@ export const ProjectsSchema = z.object({
});
export type TProjects = z.infer<typeof ProjectsSchema>;
export type TProjectsInsert = Omit<z.input<typeof ProjectsSchema>, TImmutableDBKeys>;
export type TProjectsUpdate = Partial<Omit<z.input<typeof ProjectsSchema>, TImmutableDBKeys>>;
export type TProjectsInsert = Omit<TProjects, TImmutableDBKeys>;
export type TProjectsUpdate = Partial<Omit<TProjects, TImmutableDBKeys>>;

View File

@ -27,5 +27,5 @@ export const SamlConfigsSchema = z.object({
});
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;
export type TSamlConfigsInsert = Omit<z.input<typeof SamlConfigsSchema>, TImmutableDBKeys>;
export type TSamlConfigsUpdate = Partial<Omit<z.input<typeof SamlConfigsSchema>, TImmutableDBKeys>>;
export type TSamlConfigsInsert = Omit<TSamlConfigs, TImmutableDBKeys>;
export type TSamlConfigsUpdate = Partial<Omit<TSamlConfigs, TImmutableDBKeys>>;

Some files were not shown because too many files have changed in this diff Show More