Compare commits

..

1 Commits

Author SHA1 Message Date
d4a7cf93b0 test 2024-02-05 14:52:58 -05:00
555 changed files with 12481 additions and 21033 deletions

View File

@ -3,22 +3,24 @@
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NEVER BE USED FOR PRODUCTION
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
# Required
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
# JWT
# Required secrets to sign JWT tokens
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
AUTH_SECRET=5lrMXKKWCVocS/uerPsl7V+TX/aaUaI7iDkgl3tSmLE=
# Postgres creds
POSTGRES_PASSWORD=infisical
POSTGRES_USER=infisical
POSTGRES_DB=infisical
# MongoDB
# Backend will connect to the MongoDB instance at connection string MONGO_URL which can either be a ref
# to the MongoDB container instance or Mongo Cloud
# Required
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
# Redis
REDIS_URL=redis://redis:6379
# Optional credentials for MongoDB container instance and Mongo-Express
MONGO_USERNAME=root
MONGO_PASSWORD=example
# Website URL
# Required
SITE_URL=http://localhost:8080

View File

@ -1 +0,0 @@
DB_CONNECTION_URI=

View File

@ -1,4 +0,0 @@
REDIS_URL=redis://localhost:6379
DB_CONNECTION_URI=postgres://infisical:infisical@localhost/infisical?sslmode=disable
AUTH_SECRET=4bnfe4e407b8921c104518903515b218
ENCRYPTION_KEY=4bnfe4e407b8921c104518903515b218

View File

@ -1,190 +0,0 @@
# inspired by https://www.photoroom.com/inside-photoroom/how-we-automated-our-changelog-thanks-to-chatgpt
import os
import requests
import re
from openai import OpenAI
import subprocess
from datetime import datetime
import uuid
# Constants
REPO_OWNER = "infisical"
REPO_NAME = "infisical"
TOKEN = os.environ["GITHUB_TOKEN"]
SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"]
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]
SLACK_MSG_COLOR = "#36a64f"
headers = {
"Authorization": f"Bearer {TOKEN}",
"Accept": "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
}
def set_multiline_output(name, value):
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
delimiter = uuid.uuid1()
print(f'{name}<<{delimiter}', file=fh)
print(value, file=fh)
print(delimiter, file=fh)
def post_changelog_to_slack(changelog, tag):
slack_payload = {
"text": "Hey team, it's changelog time! :wave:",
"attachments": [
{
"color": SLACK_MSG_COLOR,
"title": f"🗓Infisical Changelog - {tag}",
"text": changelog,
}
],
}
response = requests.post(SLACK_WEBHOOK_URL, json=slack_payload)
if response.status_code != 200:
raise Exception("Failed to post changelog to Slack.")
def find_previous_release_tag(release_tag:str):
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{release_tag}^"]).decode("utf-8").strip()
while not(previous_tag.startswith("infisical/")):
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{previous_tag}^"]).decode("utf-8").strip()
return previous_tag
def get_tag_creation_date(tag_name):
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/git/refs/tags/{tag_name}"
response = requests.get(url, headers=headers)
response.raise_for_status()
commit_sha = response.json()['object']['sha']
commit_url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/commits/{commit_sha}"
commit_response = requests.get(commit_url, headers=headers)
commit_response.raise_for_status()
creation_date = commit_response.json()['commit']['author']['date']
return datetime.strptime(creation_date, '%Y-%m-%dT%H:%M:%SZ')
def fetch_prs_between_tags(previous_tag_date:datetime, release_tag_date:datetime):
# Use GitHub API to fetch PRs merged between the commits
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/pulls?state=closed&merged=true"
response = requests.get(url, headers=headers)
if response.status_code != 200:
raise Exception("Error fetching PRs from GitHub API!")
prs = []
for pr in response.json():
# the idea is as tags happen recently we get last 100 closed PRs and then filter by tag creation date
if pr["merged_at"] and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') < release_tag_date and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') > previous_tag_date:
prs.append(pr)
return prs
def extract_commit_details_from_prs(prs):
commit_details = []
for pr in prs:
commit_message = pr["title"]
commit_url = pr["html_url"]
pr_number = pr["number"]
branch_name = pr["head"]["ref"]
issue_numbers = re.findall(r"(www-\d+|web-\d+)", branch_name)
# If no issue numbers are found, add the PR details without issue numbers and URLs
if not issue_numbers:
commit_details.append(
{
"message": commit_message,
"pr_number": pr_number,
"pr_url": commit_url,
"issue_number": None,
"issue_url": None,
}
)
continue
for issue in issue_numbers:
commit_details.append(
{
"message": commit_message,
"pr_number": pr_number,
"pr_url": commit_url,
"issue_number": issue,
}
)
return commit_details
# Function to generate changelog using OpenAI
def generate_changelog_with_openai(commit_details):
commit_messages = []
for details in commit_details:
base_message = f"{details['pr_url']} - {details['message']}"
# Add the issue URL if available
# if details["issue_url"]:
# base_message += f" (Linear Issue: {details['issue_url']})"
commit_messages.append(base_message)
commit_list = "\n".join(commit_messages)
prompt = """
Generate a changelog for Infisical, opensource secretops
The changelog should:
1. Be Informative: Using the provided list of GitHub commits, break them down into categories such as Features, Fixes & Improvements, and Technical Updates. Summarize each commit concisely, ensuring the key points are highlighted.
2. Have a Professional yet Friendly tone: The tone should be balanced, not too corporate or too informal.
3. Celebratory Introduction and Conclusion: Start the changelog with a celebratory note acknowledging the team's hard work and progress. End with a shoutout to the team and wishes for a pleasant weekend.
4. Formatting: you cannot use Markdown formatting, and you can only use emojis for the introductory paragraph or the conclusion paragraph, nowhere else.
5. Links: the syntax to create links is the following: `<http://www.example.com|This message is a link>`.
6. Linear Links: note that the Linear link is optional, include it only if provided.
7. Do not wrap your answer in a codeblock. Just output the text, nothing else
Here's a good example to follow, please try to match the formatting as closely as possible, only changing the content of the changelog and have some liberty with the introduction. Notice the importance of the formatting of a changelog item:
- <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>))
And here's an example of the full changelog:
*Features*
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
*Fixes & Improvements*
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
*Technical Updates*
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
Stay tuned for more exciting updates coming soon!
And here are the commits:
{}
""".format(
commit_list
)
client = OpenAI(api_key=OPENAI_API_KEY)
messages = [{"role": "user", "content": prompt}]
response = client.chat.completions.create(model="gpt-3.5-turbo", messages=messages)
if "error" in response.choices[0].message:
raise Exception("Error generating changelog with OpenAI!")
return response.choices[0].message.content.strip()
if __name__ == "__main__":
try:
# Get the latest and previous release tags
latest_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0"]).decode("utf-8").strip()
previous_tag = find_previous_release_tag(latest_tag)
latest_tag_date = get_tag_creation_date(latest_tag)
previous_tag_date = get_tag_creation_date(previous_tag)
prs = fetch_prs_between_tags(previous_tag_date,latest_tag_date)
pr_details = extract_commit_details_from_prs(prs)
# Generate changelog
changelog = generate_changelog_with_openai(pr_details)
post_changelog_to_slack(changelog,latest_tag)
# Print or post changelog to Slack
# set_multiline_output("changelog", changelog)
except Exception as e:
print(str(e))

15
.github/values.yaml vendored
View File

@ -13,10 +13,11 @@ fullnameOverride: ""
##
infisical:
autoDatabaseSchemaMigration: false
## @param backend.enabled Enable backend
##
enabled: false
## @param backend.name Backend name
##
name: infisical
replicaCount: 3
image:
@ -27,7 +28,7 @@ infisical:
deploymentAnnotations:
secrets.infisical.com/auto-reload: "true"
kubeSecretRef: "managed-secret"
kubeSecretRef: "infisical-gamma-secrets"
ingress:
## @param ingress.enabled Enable ingress
@ -49,9 +50,3 @@ ingress:
- secretName: letsencrypt-prod
hosts:
- gamma.infisical.com
postgresql:
enabled: false
redis:
enabled: false

View File

@ -1,75 +0,0 @@
name: "Check API For Breaking Changes"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/src/server/routes/**"
jobs:
check-be-api-changes:
name: Check API Changes
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Checkout source
uses: actions/checkout@v3
# - name: Setup Node 20
# uses: actions/setup-node@v3
# with:
# node-version: "20"
# uncomment this when testing locally using nektos/act
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker-compose` for local simulations
with:
version: "2.14.2"
- name: 📦Build the latest image
run: docker build --tag infisical-api .
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
- name: Start the server
run: |
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
JWT_AUTH_SECRET: something-random
- uses: actions/setup-go@v5
with:
go-version: '1.21.5'
- name: Wait for container to be stable and check logs
run: |
SECONDS=0
HEALTHY=0
while [ $SECONDS -lt 60 ]; do
if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy."
HEALTHY=1
break
fi
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
docker logs infisical-api
sleep 2
SECONDS=$((SECONDS+2))
done
if [ $HEALTHY -ne 1 ]; then
echo "Container did not become healthy in time"
exit 1
fi
- name: Install openapi-diff
run: go install github.com/tufin/oasdiff@latest
- name: Running OpenAPI Spec diff action
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
run: |
docker-compose -f "docker-compose.dev.yml" down
docker stop infisical-api
docker remove infisical-api

View File

@ -0,0 +1,43 @@
name: "Check Backend Pull Request"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/**"
- "!backend/README.md"
- "!backend/.*"
- "backend/.eslintrc.js"
jobs:
check-be-pr:
name: Check
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 16
uses: actions/setup-node@v3
with:
node-version: "16"
cache: "npm"
cache-dependency-path: backend/package-lock.json
- name: 📦 Install dependencies
run: npm ci --only-production
working-directory: backend
# - name: 🧪 Run tests
# run: npm run test:ci
# working-directory: backend
# - name: 📁 Upload test results
# uses: actions/upload-artifact@v3
# if: always()
# with:
# name: be-test-results
# path: |
# ./backend/reports
# ./backend/coverage
- name: 🏗️ Run build
run: npm run build
working-directory: backend

View File

@ -1,35 +0,0 @@
name: "Check Backend PR types and lint"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/**"
- "!backend/README.md"
- "!backend/.*"
- "backend/.eslintrc.js"
jobs:
check-be-pr:
name: Check TS and Lint
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 20
uses: actions/setup-node@v3
with:
node-version: "20"
cache: "npm"
cache-dependency-path: backend/package-lock.json
- name: Install dependencies
run: npm install
working-directory: backend
- name: Run type check
run: npm run type:check
working-directory: backend
- name: Run lint check
run: npm run lint
working-directory: backend

View File

@ -1,4 +1,4 @@
name: Check Frontend Type and Lint check
name: Check Frontend Pull Request
on:
pull_request:
@ -10,8 +10,8 @@ on:
- "frontend/.eslintrc.js"
jobs:
check-fe-ts-lint:
name: Check Frontend Type and Lint check
check-fe-pr:
name: Check
runs-on: ubuntu-latest
timeout-minutes: 15
@ -25,11 +25,12 @@ jobs:
cache: "npm"
cache-dependency-path: frontend/package-lock.json
- name: 📦 Install dependencies
run: npm install
run: npm ci --only-production --ignore-scripts
working-directory: frontend
- name: 🏗️ Run Type check
run: npm run type:check
working-directory: frontend
- name: 🏗️ Run Link check
run: npm run lint:fix
# -
# name: 🧪 Run tests
# run: npm run test:ci
# working-directory: frontend
- name: 🏗️ Run build
run: npm run build
working-directory: frontend

View File

@ -1,34 +0,0 @@
name: Generate Changelog
permissions:
contents: write
on:
workflow_dispatch:
push:
tags:
- "infisical/v*.*.*-postgres"
jobs:
generate_changelog:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-tags: true
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12.0"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install requests openai
- name: Generate Changelog and Post to Slack
id: gen-changelog
run: python .github/resources/changelog-generator.py
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -5,14 +5,9 @@ on:
- "infisical/v*.*.*-postgres"
jobs:
infisical-tests:
name: Run tests before deployment
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version

View File

@ -23,8 +23,6 @@ jobs:
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3

View File

@ -1,47 +0,0 @@
name: "Run backend tests"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/**"
- "!backend/README.md"
- "!backend/.*"
- "backend/.eslintrc.js"
workflow_call:
jobs:
check-be-pr:
name: Run integration test
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker-compose` for local simulations
with:
version: "2.14.2"
- name: 🔧 Setup Node 20
uses: actions/setup-node@v3
with:
node-version: "20"
cache: "npm"
cache-dependency-path: backend/package-lock.json
- name: Install dependencies
run: npm install
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
- name: Start integration test
run: npm run test:e2e
working-directory: backend
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
AUTH_SECRET: something-random
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- name: cleanup
run: |
docker-compose -f "docker-compose.dev.yml" down

4
.gitignore vendored
View File

@ -6,7 +6,7 @@ node_modules
.env.gamma
.env.prod
.env.infisical
.env.migration
*~
*.swp
*.swo
@ -63,5 +63,3 @@ yarn-error.log*
.vscode/*
frontend-build
*.tgz

View File

@ -190,34 +190,10 @@ dockers:
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:latest-amd64"
build_flag_templates:
- "--pull"
- "--platform=linux/amd64"
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- "infisical/cli:latest-arm64"
build_flag_templates:
- "--pull"
- "--platform=linux/arm64"
docker_manifests:
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- name_template: "infisical/cli:latest"
image_templates:
- "infisical/cli:latest-amd64"
- "infisical/cli:latest-arm64"
- "infisical/cli:{{ .Version }}"
- "infisical/cli:{{ .Major }}.{{ .Minor }}"
- "infisical/cli:{{ .Major }}"
- "infisical/cli:latest"

View File

@ -2,6 +2,6 @@
Thanks for taking the time to contribute! 😃 🚀
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/getting-started/overview) for instructions on how to contribute.
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/overview) for instructions on how to contribute.
We also have some 🔥amazing🔥 merch for our contributors. Please reach out to tony@infisical.com for more info 👀

View File

@ -104,6 +104,7 @@ ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
WORKDIR /
COPY --from=backend-runner /app /backend
COPY --from=backend-runner /app/dist/services/smtp/templates /backend/dist/templates
COPY --from=frontend-runner /app ./backend/frontend-build

View File

@ -5,13 +5,16 @@ push:
docker-compose -f docker-compose.yml push
up-dev:
docker compose -f docker-compose.dev.yml up --build
docker-compose -f docker-compose.dev.yml up --build
up-dev-ldap:
docker compose -f docker-compose.dev.yml --profile ldap up --build
up-pg-dev:
docker compose -f docker-compose.pg.yml up --build
i-dev:
infisical run -- docker-compose -f docker-compose.dev.yml up --build
up-prod:
docker-compose -f docker-compose.prod.yml up --build
docker-compose -f docker-compose.yml up --build
down:
docker compose -f docker-compose.dev.yml down
docker-compose down

View File

@ -33,7 +33,7 @@
<img src="https://img.shields.io/github/commit-activity/m/infisical/infisical" alt="git commit activity" />
</a>
<a href="https://cloudsmith.io/~infisical/repos/">
<img src="https://img.shields.io/badge/Downloads-6.95M-orange" alt="Cloudsmith downloads" />
<img src="https://img.shields.io/badge/Downloads-2.58M-orange" alt="Cloudsmith downloads" />
</a>
<a href="https://infisical.com/slack">
<img src="https://img.shields.io/badge/chat-on%20Slack-blueviolet" alt="Slack community channel" />
@ -53,19 +53,17 @@ We're on a mission to make secret management more accessible to everyone, not ju
## Features
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
- **[Infisical Kubernetes operator](https://infisical.com/docs/documentation/getting-started/kubernetes)** to managed secrets in k8s, automatically reload deployments, and more.
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
- **[Self-hosting and on-prem](https://infisical.com/docs/self-hosting/overview)** to get complete control over your data.
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
- **[Simple on-premise deployments](https://infisical.com/docs/self-hosting/overview)** to AWS, Digital Ocean, and more.
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.)
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like GitHub, Vercel, Netlify, and more
- [**Automatic Kubernetes deployment secret reloads**](https://infisical.com/docs/documentation/getting-started/kubernetes)
- **[Complete control over your data](https://infisical.com/docs/self-hosting/overview)** - host it yourself on any infrastructure
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery]()** to version every secret and project state
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project
- **Role-based Access Controls** per environment
- [**Simple on-premise deployments** to AWS, Digital Ocean, and more](https://infisical.com/docs/self-hosting/overview)
- [**Secret Scanning and Leak Prevention**](https://infisical.com/docs/cli/scanning-overview)
And much more.
@ -84,13 +82,13 @@ To set up and run Infisical locally, make sure you have Git and Docker installed
Linux/macOS:
```console
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.yml up
```
Windows Command Prompt:
```console
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.yml up
```
Create an account at `http://localhost:80`
@ -117,9 +115,9 @@ Lean about Infisical's code scanning feature [here](https://infisical.com/docs/c
This repo available under the [MIT expat license](https://github.com/Infisical/infisical/blob/main/LICENSE), with the exception of the `ee` directory which will contain premium enterprise features requiring a Infisical license.
If you are interested in managed Infisical Cloud of self-hosted Enterprise Offering, take a look at [our website](https://infisical.com/) or [book a meeting with us](https://infisical.cal.com/vlad/infisical-demo):
If you are interested in managed Infisical Cloud of self-hosted Enterprise Offering, take a look at [our website](https://infisical.com/) or [book a meeting with us](https://cal.com/vmatsiiako/infisical-demo):
<a href="[https://infisical.cal.com/vlad/infisical-demo](https://infisical.cal.com/vlad/infisical-demo)"><img alt="Schedule a meeting" src="https://cal.com/book-with-cal-dark.svg" /></a>
<a href="https://cal.com/vmatsiiako/infisical-demo"><img alt="Schedule a meeting" src="https://cal.com/book-with-cal-dark.svg" /></a>
## Security

View File

@ -1,3 +1,2 @@
vitest-environment-infisical.ts
vitest.config.ts
vitest.e2e.config.ts

View File

@ -21,18 +21,6 @@ module.exports = {
tsconfigRootDir: __dirname
},
root: true,
overrides: [
{
files: ["./e2e-test/**/*"],
rules: {
"@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-assignment": "off",
"@typescript-eslint/no-unsafe-argument": "off",
"@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unsafe-call": "off",
}
}
],
rules: {
"@typescript-eslint/no-empty-function": "off",
"@typescript-eslint/no-unsafe-enum-comparison": "off",

View File

@ -1,30 +0,0 @@
import { TKeyStoreFactory } from "@app/keystore/keystore";
export const mockKeyStore = (): TKeyStoreFactory => {
const store: Record<string, string | number | Buffer> = {};
return {
setItem: async (key, value) => {
store[key] = value;
return "OK";
},
setItemWithExpiry: async (key, value) => {
store[key] = value;
return "OK";
},
deleteItem: async (key) => {
delete store[key];
return 1;
},
getItem: async (key) => {
const value = store[key];
if (typeof value === "string") {
return value;
}
return null;
},
incrementBy: async () => {
return 1;
}
};
};

View File

@ -1,71 +0,0 @@
import { OrgMembershipRole } from "@app/db/schemas";
import { seedData1 } from "@app/db/seed-data";
export const createIdentity = async (name: string, role: string) => {
const createIdentityRes = await testServer.inject({
method: "POST",
url: "/api/v1/identities",
body: {
name,
role,
organizationId: seedData1.organization.id
},
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(createIdentityRes.statusCode).toBe(200);
return createIdentityRes.json().identity;
};
export const deleteIdentity = async (id: string) => {
const deleteIdentityRes = await testServer.inject({
method: "DELETE",
url: `/api/v1/identities/${id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(deleteIdentityRes.statusCode).toBe(200);
return deleteIdentityRes.json().identity;
};
describe("Identity v1", async () => {
test("Create identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
expect(newIdentity.name).toBe("mac1");
expect(newIdentity.authMethod).toBeNull();
await deleteIdentity(newIdentity.id);
});
test("Update identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
expect(newIdentity.name).toBe("mac1");
expect(newIdentity.authMethod).toBeNull();
const updatedIdentity = await testServer.inject({
method: "PATCH",
url: `/api/v1/identities/${newIdentity.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name: "updated-mac-1",
role: OrgMembershipRole.Member
}
});
expect(updatedIdentity.statusCode).toBe(200);
expect(updatedIdentity.json().identity.name).toBe("updated-mac-1");
await deleteIdentity(newIdentity.id);
});
test("Delete Identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
const deletedIdentity = await deleteIdentity(newIdentity.id);
expect(deletedIdentity.name).toBe("mac1");
});
});

View File

@ -1,6 +1,5 @@
import jsrp from "jsrp";
import { seedData1 } from "@app/db/seed-data";
import jsrp from "jsrp";
describe("Login V1 Router", async () => {
// eslint-disable-next-line

View File

@ -1,40 +1,6 @@
import { seedData1 } from "@app/db/seed-data";
import { DEFAULT_PROJECT_ENVS } from "@app/db/seeds/3-project";
const createProjectEnvironment = async (name: string, slug: string) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/workspace/${seedData1.project.id}/environments`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name,
slug
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
return payload.environment;
};
const deleteProjectEnvironment = async (envId: string) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/workspace/${seedData1.project.id}/environments/${envId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
return payload.environment;
};
describe("Project Environment Router", async () => {
test("Get default environments", async () => {
const res = await testServer.inject({
@ -65,10 +31,24 @@ describe("Project Environment Router", async () => {
expect(payload.workspace.environments.length).toBe(3);
});
const mockProjectEnv = { name: "temp", slug: "temp" }; // id will be filled in create op
const mockProjectEnv = { name: "temp", slug: "temp", id: "" }; // id will be filled in create op
test("Create environment", async () => {
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
expect(newEnvironment).toEqual(
const res = await testServer.inject({
method: "POST",
url: `/api/v1/workspace/${seedData1.project.id}/environments`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name: mockProjectEnv.name,
slug: mockProjectEnv.slug
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
expect(payload.environment).toEqual(
expect.objectContaining({
id: expect.any(String),
name: mockProjectEnv.name,
@ -79,15 +59,14 @@ describe("Project Environment Router", async () => {
updatedAt: expect.any(String)
})
);
await deleteProjectEnvironment(newEnvironment.id);
mockProjectEnv.id = payload.environment.id;
});
test("Update environment", async () => {
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
const updatedName = { name: "temp#2", slug: "temp2" };
const res = await testServer.inject({
method: "PATCH",
url: `/api/v1/workspace/${seedData1.project.id}/environments/${newEnvironment.id}`,
url: `/api/v1/workspace/${seedData1.project.id}/environments/${mockProjectEnv.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
@ -103,7 +82,7 @@ describe("Project Environment Router", async () => {
expect(payload).toHaveProperty("environment");
expect(payload.environment).toEqual(
expect.objectContaining({
id: newEnvironment.id,
id: expect.any(String),
name: updatedName.name,
slug: updatedName.slug,
projectId: seedData1.project.id,
@ -112,21 +91,61 @@ describe("Project Environment Router", async () => {
updatedAt: expect.any(String)
})
);
await deleteProjectEnvironment(newEnvironment.id);
mockProjectEnv.name = updatedName.name;
mockProjectEnv.slug = updatedName.slug;
});
test("Delete environment", async () => {
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
const deletedProjectEnvironment = await deleteProjectEnvironment(newEnvironment.id);
expect(deletedProjectEnvironment).toEqual(
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/workspace/${seedData1.project.id}/environments/${mockProjectEnv.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
expect(payload.environment).toEqual(
expect.objectContaining({
id: deletedProjectEnvironment.id,
id: expect.any(String),
name: mockProjectEnv.name,
slug: mockProjectEnv.slug,
position: 4,
position: 1,
createdAt: expect.any(String),
updatedAt: expect.any(String)
})
);
});
// after all these opreations the list of environment should be still same
test("Default list of environment", async () => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/workspace/${seedData1.project.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("workspace");
// check for default environments
expect(payload).toEqual({
workspace: expect.objectContaining({
name: seedData1.project.name,
id: seedData1.project.id,
slug: seedData1.project.slug,
environments: expect.arrayContaining([
expect.objectContaining(DEFAULT_PROJECT_ENVS[0]),
expect.objectContaining(DEFAULT_PROJECT_ENVS[1]),
expect.objectContaining(DEFAULT_PROJECT_ENVS[2])
])
})
});
// ensure only two default environments exist
expect(payload.workspace.environments.length).toBe(3);
});
});

View File

@ -1,40 +1,5 @@
import { seedData1 } from "@app/db/seed-data";
const createFolder = async (dto: { path: string; name: string }) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: dto.name,
path: dto.path
}
});
expect(res.statusCode).toBe(200);
return res.json().folder;
};
const deleteFolder = async (dto: { path: string; id: string }) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${dto.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: dto.path
}
});
expect(res.statusCode).toBe(200);
return res.json().folder;
};
describe("Secret Folder Router", async () => {
test.each([
{ name: "folder1", path: "/" }, // one in root
@ -42,15 +7,30 @@ describe("Secret Folder Router", async () => {
{ name: "folder2", path: "/" },
{ name: "folder1", path: "/level1/level2" } // this should not create folder return same thing
])("Create folder $name in $path", async ({ name, path }) => {
const createdFolder = await createFolder({ path, name });
const res = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name,
path
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("folder");
// check for default environments
expect(createdFolder).toEqual(
expect.objectContaining({
expect(payload).toEqual({
folder: expect.objectContaining({
name,
id: expect.any(String)
})
);
await deleteFolder({ path, id: createdFolder.id });
});
});
test.each([
@ -63,8 +43,6 @@ describe("Secret Folder Router", async () => {
},
{ path: "/level1/level2", expected: { folders: [{ name: "folder1" }], length: 1 } }
])("Get folders $path", async ({ path, expected }) => {
const newFolders = await Promise.all(expected.folders.map(({ name }) => createFolder({ name, path })));
const res = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
@ -81,22 +59,36 @@ describe("Secret Folder Router", async () => {
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("folders");
expect(payload.folders.length >= expected.folders.length).toBeTruthy();
expect(payload).toEqual({
folders: expect.arrayContaining(expected.folders.map((el) => expect.objectContaining(el)))
});
await Promise.all(newFolders.map(({ id }) => deleteFolder({ path, id })));
expect(payload.folders.length).toBe(expected.length);
expect(payload).toEqual({ folders: expected.folders.map((el) => expect.objectContaining(el)) });
});
let toBeDeleteFolderId = "";
test("Update a deep folder", async () => {
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
expect(newFolder).toEqual(
const res = await testServer.inject({
method: "PATCH",
url: `/api/v1/folders/folder1`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: "folder-updated",
path: "/level1/level2"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("folder");
expect(payload.folder).toEqual(
expect.objectContaining({
id: expect.any(String),
name: "folder-updated"
})
);
toBeDeleteFolderId = payload.folder.id;
const resUpdatedFolders = await testServer.inject({
method: "GET",
@ -114,16 +106,14 @@ describe("Secret Folder Router", async () => {
expect(resUpdatedFolders.statusCode).toBe(200);
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
expect(updatedFolderList).toHaveProperty("folders");
expect(updatedFolderList.folders.length).toEqual(1);
expect(updatedFolderList.folders[0].name).toEqual("folder-updated");
await deleteFolder({ path: "/level1/level2", id: newFolder.id });
});
test("Delete a deep folder", async () => {
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${newFolder.id}`,
url: `/api/v1/folders/${toBeDeleteFolderId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},

View File

@ -1,57 +1,32 @@
import { seedData1 } from "@app/db/seed-data";
const createSecretImport = async (importPath: string, importEnv: string) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/",
import: {
environment: importEnv,
path: importPath
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport;
};
const deleteSecretImport = async (id: string) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport;
};
describe("Secret Import Router", async () => {
describe("Secret Folder Router", async () => {
test.each([
{ importEnv: "dev", importPath: "/" }, // one in root
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/",
import: {
environment: importEnv,
path: importPath
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
// check for default environments
const payload = await createSecretImport(importPath, importEnv);
expect(payload).toEqual(
expect(payload.secretImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: expect.any(String),
@ -62,12 +37,10 @@ describe("Secret Import Router", async () => {
})
})
);
await deleteSecretImport(payload.id);
});
let testSecretImportId = "";
test("Get secret imports", async () => {
const createdImport1 = await createSecretImport("/", "dev");
const createdImport2 = await createSecretImport("/", "staging");
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
@ -85,6 +58,7 @@ describe("Secret Import Router", async () => {
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImports");
expect(payload.secretImports.length).toBe(2);
testSecretImportId = payload.secretImports[0].id;
expect(payload.secretImports).toEqual(
expect.arrayContaining([
expect.objectContaining({
@ -98,20 +72,12 @@ describe("Secret Import Router", async () => {
})
])
);
await deleteSecretImport(createdImport1.id);
await deleteSecretImport(createdImport2.id);
});
test("Update secret import position", async () => {
const devImportDetails = { path: "/", envSlug: "dev" };
const stagingImportDetails = { path: "/", envSlug: "staging" };
const createdImport1 = await createSecretImport(devImportDetails.path, devImportDetails.envSlug);
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
const updateImportRes = await testServer.inject({
const res = await testServer.inject({
method: "PATCH",
url: `/api/v1/secret-imports/${createdImport1.id}`,
url: `/api/v1/secret-imports/${testSecretImportId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
@ -125,8 +91,8 @@ describe("Secret Import Router", async () => {
}
});
expect(updateImportRes.statusCode).toBe(200);
const payload = JSON.parse(updateImportRes.payload);
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
// check for default environments
expect(payload.secretImport).toEqual(
@ -136,7 +102,7 @@ describe("Secret Import Router", async () => {
position: 2,
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.stringMatching(devImportDetails.envSlug),
slug: expect.any(String),
id: expect.any(String)
})
})
@ -158,19 +124,28 @@ describe("Secret Import Router", async () => {
expect(secretImportsListRes.statusCode).toBe(200);
const secretImportList = JSON.parse(secretImportsListRes.payload);
expect(secretImportList).toHaveProperty("secretImports");
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
await deleteSecretImport(createdImport1.id);
await deleteSecretImport(createdImport2.id);
expect(secretImportList.secretImports[1].id).toEqual(testSecretImportId);
});
test("Delete secret import position", async () => {
const createdImport1 = await createSecretImport("/", "dev");
const createdImport2 = await createSecretImport("/", "staging");
const deletedImport = await deleteSecretImport(createdImport1.id);
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${testSecretImportId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
// check for default environments
expect(deletedImport).toEqual(
expect(payload.secretImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: expect.any(String),
@ -200,7 +175,5 @@ describe("Secret Import Router", async () => {
expect(secretImportList).toHaveProperty("secretImports");
expect(secretImportList.secretImports.length).toEqual(1);
expect(secretImportList.secretImports[0].position).toEqual(1);
await deleteSecretImport(createdImport2.id);
});
});

View File

@ -1,579 +0,0 @@
import crypto from "node:crypto";
import { SecretType, TSecrets } from "@app/db/schemas";
import { decryptSecret, encryptSecret, getUserPrivateKey, seedData1 } from "@app/db/seed-data";
import { decryptAsymmetric, decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
const createServiceToken = async (
scopes: { environment: string; secretPath: string }[],
permissions: ("read" | "write")[]
) => {
const projectKeyRes = await testServer.inject({
method: "GET",
url: `/api/v2/workspace/${seedData1.project.id}/encrypted-key`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
const projectKeyEnc = JSON.parse(projectKeyRes.payload);
const userInfoRes = await testServer.inject({
method: "GET",
url: "/api/v2/users/me",
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
const { user: userInfo } = JSON.parse(userInfoRes.payload);
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
const projectKey = decryptAsymmetric({
ciphertext: projectKeyEnc.encryptedKey,
nonce: projectKeyEnc.nonce,
publicKey: projectKeyEnc.sender.publicKey,
privateKey
});
const randomBytes = crypto.randomBytes(16).toString("hex");
const { ciphertext, iv, tag } = encryptSymmetric128BitHexKeyUTF8(projectKey, randomBytes);
const serviceTokenRes = await testServer.inject({
method: "POST",
url: "/api/v2/service-token",
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name: "test-token",
workspaceId: seedData1.project.id,
scopes,
encryptedKey: ciphertext,
iv,
tag,
permissions,
expiresIn: null
}
});
expect(serviceTokenRes.statusCode).toBe(200);
const serviceTokenInfo = serviceTokenRes.json();
expect(serviceTokenInfo).toHaveProperty("serviceToken");
expect(serviceTokenInfo).toHaveProperty("serviceTokenData");
return `${serviceTokenInfo.serviceToken}.${randomBytes}`;
};
const deleteServiceToken = async () => {
const serviceTokenListRes = await testServer.inject({
method: "GET",
url: `/api/v1/workspace/${seedData1.project.id}/service-token-data`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(serviceTokenListRes.statusCode).toBe(200);
const serviceTokens = JSON.parse(serviceTokenListRes.payload).serviceTokenData as { name: string; id: string }[];
expect(serviceTokens.length).toBeGreaterThan(0);
const serviceTokenInfo = serviceTokens.find(({ name }) => name === "test-token");
expect(serviceTokenInfo).toBeDefined();
const deleteTokenRes = await testServer.inject({
method: "DELETE",
url: `/api/v2/service-token/${serviceTokenInfo?.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(deleteTokenRes.statusCode).toBe(200);
};
const createSecret = async (dto: {
projectKey: string;
path: string;
key: string;
value: string;
comment: string;
type?: SecretType;
token: string;
}) => {
const createSecretReqBody = {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
type: dto.type || SecretType.Shared,
secretPath: dto.path,
...encryptSecret(dto.projectKey, dto.key, dto.value, dto.comment)
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/${dto.key}`,
headers: {
authorization: `Bearer ${dto.token}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secret");
return createdSecretPayload.secret;
};
const deleteSecret = async (dto: { path: string; key: string; token: string }) => {
const deleteSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/${dto.key}`,
headers: {
authorization: `Bearer ${dto.token}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: dto.path
}
});
expect(deleteSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
return updatedSecretPayload.secret;
};
describe("Service token secret ops", async () => {
let serviceToken = "";
let projectKey = "";
let folderId = "";
beforeAll(async () => {
serviceToken = await createServiceToken(
[{ secretPath: "/**", environment: seedData1.environment.slug }],
["read", "write"]
);
// this is ensure cli service token decryptiong working fine
const serviceTokenInfoRes = await testServer.inject({
method: "GET",
url: "/api/v2/service-token",
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(serviceTokenInfoRes.statusCode).toBe(200);
const serviceTokenInfo = serviceTokenInfoRes.json();
const serviceTokenParts = serviceToken.split(".");
projectKey = decryptSymmetric128BitHexKeyUTF8({
key: serviceTokenParts[3],
tag: serviceTokenInfo.tag,
ciphertext: serviceTokenInfo.encryptedKey,
iv: serviceTokenInfo.iv
});
// create a deep folder
const folderCreate = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: "folder",
path: "/nested1/nested2"
}
});
expect(folderCreate.statusCode).toBe(200);
folderId = folderCreate.json().folder.id;
});
afterAll(async () => {
await deleteServiceToken();
// create a deep folder
const deleteFolder = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${folderId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/nested1/nested2"
}
});
expect(deleteFolder.statusCode).toBe(200);
});
const testSecrets = [
{
path: "/",
secret: {
key: "ST-SEC",
value: "something-secret",
comment: "some comment"
}
},
{
path: "/nested1/nested2/folder",
secret: {
key: "NESTED-ST-SEC",
value: "something-secret",
comment: "some comment"
}
}
];
const getSecrets = async (environment: string, secretPath = "/") => {
const res = await testServer.inject({
method: "GET",
url: `/api/v3/secrets`,
headers: {
authorization: `Bearer ${serviceToken}`
},
query: {
secretPath,
environment,
workspaceId: seedData1.project.id
}
});
const secrets: TSecrets[] = JSON.parse(res.payload).secrets || [];
return secrets.map((el) => ({ ...decryptSecret(projectKey, el), type: el.type }));
};
test.each(testSecrets)("Create secret in path $path", async ({ secret, path }) => {
const createdSecret = await createSecret({ projectKey, path, ...secret, token: serviceToken });
const decryptedSecret = decryptSecret(projectKey, createdSecret);
expect(decryptedSecret.key).toEqual(secret.key);
expect(decryptedSecret.value).toEqual(secret.value);
expect(decryptedSecret.comment).toEqual(secret.comment);
expect(decryptedSecret.version).toEqual(1);
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: secret.key,
value: secret.value,
type: SecretType.Shared
})
])
);
await deleteSecret({ path, key: secret.key, token: serviceToken });
});
test.each(testSecrets)("Get secret by name in path $path", async ({ secret, path }) => {
await createSecret({ projectKey, path, ...secret, token: serviceToken });
const getSecByNameRes = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/${secret.key}`,
headers: {
authorization: `Bearer ${serviceToken}`
},
query: {
secretPath: path,
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug
}
});
expect(getSecByNameRes.statusCode).toBe(200);
const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload);
expect(getSecretByNamePayload).toHaveProperty("secret");
const decryptedSecret = decryptSecret(projectKey, getSecretByNamePayload.secret);
expect(decryptedSecret.key).toEqual(secret.key);
expect(decryptedSecret.value).toEqual(secret.value);
expect(decryptedSecret.comment).toEqual(secret.comment);
await deleteSecret({ path, key: secret.key, token: serviceToken });
});
test.each(testSecrets)("Update secret in path $path", async ({ path, secret }) => {
await createSecret({ projectKey, path, ...secret, token: serviceToken });
const updateSecretReqBody = {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
type: SecretType.Shared,
secretPath: path,
...encryptSecret(projectKey, secret.key, "new-value", secret.comment)
};
const updateSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/${secret.key}`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: updateSecretReqBody
});
expect(updateSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
const decryptedSecret = decryptSecret(projectKey, updatedSecretPayload.secret);
expect(decryptedSecret.key).toEqual(secret.key);
expect(decryptedSecret.value).toEqual("new-value");
expect(decryptedSecret.comment).toEqual(secret.comment);
// list secret should have updated value
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: secret.key,
value: "new-value",
type: SecretType.Shared
})
])
);
await deleteSecret({ path, key: secret.key, token: serviceToken });
});
test.each(testSecrets)("Delete secret in path $path", async ({ secret, path }) => {
await createSecret({ projectKey, path, ...secret, token: serviceToken });
const deletedSecret = await deleteSecret({ path, key: secret.key, token: serviceToken });
const decryptedSecret = decryptSecret(projectKey, deletedSecret);
expect(decryptedSecret.key).toEqual(secret.key);
// shared secret deletion should delete personal ones also
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.not.arrayContaining([
expect.objectContaining({
key: secret.key,
type: SecretType.Shared
})
])
);
});
test.each(testSecrets)("Bulk create secrets in path $path", async ({ secret, path }) => {
const createSharedSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretName: `BULK-${secret.key}-${i + 1}`,
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
}))
}
});
expect(createSharedSecRes.statusCode).toBe(200);
const createSharedSecPayload = JSON.parse(createSharedSecRes.payload);
expect(createSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
key: `BULK-${secret.key}-${i + 1}`,
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) =>
deleteSecret({ path, token: serviceToken, key: `BULK-${secret.key}-${i + 1}` })
)
);
});
test.each(testSecrets)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => {
await createSecret({ projectKey, ...secret, key: `BULK-${secret.key}-1`, path, token: serviceToken });
const createSharedSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretName: `BULK-${secret.key}-${i + 1}`,
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
}))
}
});
expect(createSharedSecRes.statusCode).toBe(400);
await deleteSecret({ path, key: `BULK-${secret.key}-1`, token: serviceToken });
});
test.each(testSecrets)("Bulk update secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) =>
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
)
);
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretName: `BULK-${secret.key}-${i + 1}`,
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, "update-value", secret.comment)
}))
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
key: `BULK-${secret.key}-${i + 1}`,
value: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) =>
deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}`, token: serviceToken })
)
);
});
test.each(testSecrets)("Bulk delete secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) =>
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
)
);
const deletedSharedSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/batch`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretName: `BULK-${secret.key}-${i + 1}`
}))
}
});
expect(deletedSharedSecRes.statusCode).toBe(200);
const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload);
expect(deletedSecretPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.not.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
key: `BULK-${secret.value}-${i + 1}`,
type: SecretType.Shared
})
)
)
);
});
});
describe("Service token fail cases", async () => {
test("Unauthorized secret path access", async () => {
const serviceToken = await createServiceToken(
[{ secretPath: "/", environment: seedData1.environment.slug }],
["read", "write"]
);
const fetchSecrets = await testServer.inject({
method: "GET",
url: "/api/v3/secrets",
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: "/nested/deep"
},
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(401);
expect(fetchSecrets.json().error).toBe("PermissionDenied");
await deleteServiceToken();
});
test("Unauthorized secret environment access", async () => {
const serviceToken = await createServiceToken(
[{ secretPath: "/", environment: seedData1.environment.slug }],
["read", "write"]
);
const fetchSecrets = await testServer.inject({
method: "GET",
url: "/api/v3/secrets",
query: {
workspaceId: seedData1.project.id,
environment: "prod",
secretPath: "/"
},
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(401);
expect(fetchSecrets.json().error).toBe("PermissionDenied");
await deleteServiceToken();
});
test("Unauthorized write operation", async () => {
const serviceToken = await createServiceToken(
[{ secretPath: "/", environment: seedData1.environment.slug }],
["read"]
);
const writeSecrets = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/NEW`,
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
type: SecretType.Shared,
secretPath: "/",
// doesn't matter project key because this will fail before that due to read only access
...encryptSecret(crypto.randomBytes(16).toString("hex"), "NEW", "value", "")
},
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(writeSecrets.statusCode).toBe(401);
expect(writeSecrets.json().error).toBe("PermissionDenied");
// but read access should still work fine
const fetchSecrets = await testServer.inject({
method: "GET",
url: "/api/v3/secrets",
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: "/"
},
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(200);
await deleteServiceToken();
});
});

File diff suppressed because it is too large Load Diff

View File

@ -1,31 +1,26 @@
// eslint-disable-next-line
import "ts-node/register";
// import { main } from "@app/server/app";
import { initEnvConfig } from "@app/lib/config/env";
import dotenv from "dotenv";
import jwt from "jsonwebtoken";
import knex from "knex";
import path from "path";
import { seedData1 } from "@app/db/seed-data";
import { initEnvConfig } from "@app/lib/config/env";
import { initLogger } from "@app/lib/logger";
import { main } from "@app/server/app";
import { AuthTokenType } from "@app/services/auth/auth-type";
import { mockQueue } from "./mocks/queue";
import { mockSmtpServer } from "./mocks/smtp";
import { mockKeyStore } from "./mocks/keystore";
import { initLogger } from "@app/lib/logger";
import jwt from "jsonwebtoken";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
import "ts-node/register";
import { main } from "@app/server/app";
import { mockQueue } from "./mocks/queue";
import { AuthTokenType } from "@app/services/auth/auth-type";
import { seedData1 } from "@app/db/seed-data";
dotenv.config({ path: path.join(__dirname, "../.env.test") });
export default {
name: "knex-env",
transformMode: "ssr",
async setup() {
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const db = knex({
client: "pg",
connection: cfg.DB_CONNECTION_URI,
connection: process.env.DB_CONNECTION_URI,
migrations: {
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
@ -42,8 +37,9 @@ export default {
await db.seed.run();
const smtp = mockSmtpServer();
const queue = mockQueue();
const keyStore = mockKeyStore();
const server = await main({ db, smtp, logger, queue, keyStore });
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const server = await main({ db, smtp, logger, queue });
// @ts-expect-error type
globalThis.testServer = server;
// @ts-expect-error type
@ -58,7 +54,6 @@ export default {
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
);
} catch (error) {
console.log("[TEST] Error setting up environment", error);
await db.destroy();
throw error;
}

2761
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,12 +2,12 @@
"name": "backend",
"version": "1.0.0",
"description": "",
"main": "./dist/main.mjs",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
"dev:docker": "nodemon",
"build": "tsup",
"build": "rimraf dist && tsup && cp -R ./src/lib/validator/disposable_emails.txt ./dist && cp -R ./src/services/smtp/templates ./dist",
"start": "node dist/main.mjs",
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
@ -24,8 +24,8 @@
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
"seed:run": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest && npm run seed:run"
},
"keywords": [],
"author": "",
@ -44,13 +44,7 @@
"@types/pg": "^8.10.9",
"@types/picomatch": "^2.3.3",
"@types/prompt-sync": "^4.2.3",
"@types/resolve": "^1.20.6",
"@types/uuid": "^9.0.7",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
"eslint": "^8.56.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-airbnb-typescript": "^17.1.0",
"eslint-config-prettier": "^9.1.0",
"eslint-import-resolver-typescript": "^3.6.1",
"eslint-plugin-import": "^2.29.1",
@ -61,42 +55,46 @@
"prompt-sync": "^4.2.0",
"rimraf": "^5.0.5",
"ts-node": "^10.9.1",
"tsc-alias": "^1.8.8",
"tsconfig-paths": "^4.2.0",
"tsup": "^8.0.1",
"tsx": "^4.4.0",
"typescript": "^5.3.2",
"vite-tsconfig-paths": "^4.2.2",
"vitest": "^1.2.2"
"vitest": "^1.0.4"
},
"dependencies": {
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@aws-sdk/client-secrets-manager": "^3.485.0",
"@casl/ability": "^6.5.0",
"@fastify/cookie": "^9.3.1",
"@fastify/cors": "^8.5.0",
"@fastify/cookie": "^9.2.0",
"@fastify/cors": "^8.4.1",
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
"@fastify/helmet": "^11.1.1",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/session": "^10.7.0",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@fastify/swagger": "^8.12.0",
"@fastify/swagger-ui": "^1.10.1",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "^2.2.1",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.6.7",
"aws-sdk": "^2.1532.0",
"axios": "^1.6.2",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"bullmq": "^5.3.3",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"bullmq": "^5.1.1",
"dotenv": "^16.3.1",
"eslint": "^8.56.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-airbnb-typescript": "^17.1.0",
"fastify": "^4.24.3",
"fastify-plugin": "^4.5.1",
"handlebars": "^4.7.8",
"ioredis": "^5.3.2",
@ -106,24 +104,24 @@
"knex": "^3.0.1",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"mysql2": "^3.9.1",
"mysql2": "^3.6.5",
"nanoid": "^5.0.4",
"nodemailer": "^6.9.9",
"node-cache": "^5.1.2",
"nodemailer": "^6.9.7",
"ora": "^7.0.1",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",
"passport-ldapauth": "^3.0.1",
"pg": "^8.11.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"posthog-node": "^3.6.2",
"probot": "^13.0.0",
"posthog-node": "^3.6.0",
"probot": "^12.3.3",
"smee-client": "^2.0.0",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.22.4"
"zod-to-json-schema": "^3.22.0"
}
}

View File

@ -7,10 +7,11 @@ import promptSync from "prompt-sync";
const prompt = promptSync({ sigint: true });
const migrationName = prompt("Enter name for seedfile: ");
const fileCounter = readdirSync(path.join(__dirname, "../src/db/seeds")).length || 1;
const fileCounter = readdirSync(path.join(__dirname, "../src/db/seed")).length || 1;
execSync(
`npx knex seed:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${
fileCounter + 1
}-${migrationName}`,
`npx knex seed:make --knexfile ${path.join(
__dirname,
"../src/db/knexfile.ts"
)} -x ts ${fileCounter}-${migrationName}`,
{ stdio: "inherit" }
);

View File

@ -3,9 +3,13 @@ import dotenv from "dotenv";
import path from "path";
import knex from "knex";
import { writeFileSync } from "fs";
import promptSync from "prompt-sync";
const prompt = promptSync({ sigint: true });
dotenv.config({
path: path.join(__dirname, "../../.env.migration")
path: path.join(__dirname, "../.env"),
debug: true
});
const db = knex({
@ -44,7 +48,7 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
if (!value || value === "null") return;
switch (type) {
case "uuid":
return `.default("00000000-0000-0000-0000-000000000000")`;
return;
case "character varying": {
if (value === "gen_random_uuid()") return;
if (typeof value === "string" && value.includes("::")) {
@ -90,7 +94,17 @@ const main = async () => {
.orderBy("table_name")
).filter((el) => !el.tableName.includes("_migrations"));
console.log("Select a table to generate schema");
console.table(tables);
console.log("all: all tables");
const selectedTables = prompt("Type table numbers comma seperated: ");
const tableNumbers =
selectedTables !== "all" ? selectedTables.split(",").map((el) => Number(el)) : [];
for (let i = 0; i < tables.length; i += 1) {
// skip if not desired table
if (selectedTables !== "all" && !tableNumbers.includes(i)) continue;
const { tableName } = tables[i];
const columns = await db(tableName).columnInfo();
const columnNames = Object.keys(columns);
@ -100,8 +114,7 @@ const main = async () => {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
// don't put optional on id
if (colInfo.defaultValue && columnName !== "id") {
if (colInfo.defaultValue) {
const { defaultValue } = colInfo;
const zSchema = getZodDefaultValue(colInfo.type, defaultValue);
if (zSchema) {
@ -111,17 +124,16 @@ const main = async () => {
if (colInfo.nullable) {
ztype = ztype.concat(".nullable().optional()");
}
schema = schema.concat(
`${!schema ? "\n" : ""} ${columnName}: ${ztype}${colNum === columnNames.length - 1 ? "" : ","}\n`
);
schema = schema.concat(`${!schema ? "\n" : ""} ${columnName}: ${ztype},\n`);
}
const dashcase = tableName.split("_").join("-");
const pascalCase = tableName
.split("_")
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
// the insert and update are changed to zod input type to use default cases
.reduce(
(prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`,
""
);
writeFileSync(
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
`// Code generated by automation script, DO NOT EDIT.
@ -136,10 +148,19 @@ import { TImmutableDBKeys } from "./models";
export const ${pascalCase}Schema = z.object({${schema}});
export type T${pascalCase} = z.infer<typeof ${pascalCase}Schema>;
export type T${pascalCase}Insert = Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>;
export type T${pascalCase}Update = Partial<Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>>;
export type T${pascalCase}Insert = Omit<T${pascalCase}, TImmutableDBKeys>;
export type T${pascalCase}Update = Partial<Omit<T${pascalCase}, TImmutableDBKeys>>;
`
);
// const file = readFileSync(path.join(__dirname, "../src/db/schemas/index.ts"), "utf8");
// if (!file.includes(`export * from "./${dashcase};"`)) {
// appendFileSync(
// path.join(__dirname, "../src/db/schemas/index.ts"),
// `\nexport * from "./${dashcase}";`,
// "utf8"
// );
// }
}
process.exit(0);

View File

@ -3,11 +3,9 @@ import "fastify";
import { TUsers } from "@app/db/schemas";
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
@ -53,7 +51,6 @@ declare module "fastify" {
// used for mfa session authentication
mfa: {
userId: string;
orgId?: string;
user: TUsers;
};
// identity injection. depending on which kinda of token the information is filled in auth
@ -61,7 +58,6 @@ declare module "fastify" {
permission: {
type: ActorType;
id: string;
orgId?: string;
};
// passport data
passportUser: {
@ -70,7 +66,6 @@ declare module "fastify" {
};
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
}
interface FastifyInstance {
@ -108,8 +103,6 @@ declare module "fastify" {
secretRotation: TSecretRotationServiceFactory;
snapshot: TSecretSnapshotServiceFactory;
saml: TSamlConfigServiceFactory;
scim: TScimServiceFactory;
ldap: TLdapConfigServiceFactory;
auditLog: TAuditLogServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;

View File

@ -50,9 +50,6 @@ import {
TIntegrations,
TIntegrationsInsert,
TIntegrationsUpdate,
TLdapConfigs,
TLdapConfigsInsert,
TLdapConfigsUpdate,
TOrganizations,
TOrganizationsInsert,
TOrganizationsUpdate,
@ -86,9 +83,6 @@ import {
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
TScimTokens,
TScimTokensInsert,
TScimTokensUpdate,
TSecretApprovalPolicies,
TSecretApprovalPoliciesApprovers,
TSecretApprovalPoliciesApproversInsert,
@ -164,9 +158,6 @@ import {
TUserActions,
TUserActionsInsert,
TUserActionsUpdate,
TUserAliases,
TUserAliasesInsert,
TUserAliasesUpdate,
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
TUserEncryptionKeysUpdate,
@ -181,7 +172,6 @@ import {
declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.UserAliases]: Knex.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
@ -272,7 +262,6 @@ declare module "knex/types/tables" {
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate
>;
[TableName.ScimToken]: Knex.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
[TableName.SecretApprovalPolicy]: Knex.CompositeTableType<
TSecretApprovalPolicies,
TSecretApprovalPoliciesInsert,
@ -325,7 +314,6 @@ declare module "knex/types/tables" {
TSecretSnapshotFoldersUpdate
>;
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
[TableName.GitAppInstallSession]: Knex.CompositeTableType<

6
backend/src/cache/redis.ts vendored Normal file
View File

@ -0,0 +1,6 @@
import Redis from "ioredis";
export const initRedisConnection = (redisUrl: string) => {
const redis = new Redis(redisUrl);
return redis;
};

View File

@ -1,19 +1,23 @@
import knex from "knex";
export type TDbClient = ReturnType<typeof initDbConnection>;
export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnectionUri: string; dbRootCert?: string }) => {
export const initDbConnection = ({
dbConnectionUri,
dbRootCert
}: {
dbConnectionUri: string;
dbRootCert?: string;
}) => {
const db = knex({
client: "pg",
connection: {
connectionString: dbConnectionUri,
ssl: dbRootCert
? {
rejectUnauthorized: true,
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
ssl: dbRootCert ? {
rejectUnauthorized: true,
ca: Buffer.from(dbRootCert, 'base64').toString('ascii')
} : false
}
});
return db;
};
};

View File

@ -5,27 +5,15 @@ import dotenv from "dotenv";
import type { Knex } from "knex";
import path from "path";
// Update with your config settings. .
// Update with your config settings.
dotenv.config({
path: path.join(__dirname, "../../../.env.migration"),
debug: true
});
dotenv.config({
path: path.join(__dirname, "../../../.env"),
path: path.join(__dirname, "../../.env"),
debug: true
});
export default {
development: {
client: "postgres",
connection: {
connectionString: process.env.DB_CONNECTION_URI,
ssl: process.env.DB_ROOT_CERT
? {
rejectUnauthorized: true,
ca: Buffer.from(process.env.DB_ROOT_CERT, "base64").toString("ascii")
}
: false
},
connection: process.env.DB_CONNECTION_URI,
pool: {
min: 2,
max: 10
@ -39,15 +27,7 @@ export default {
},
production: {
client: "postgres",
connection: {
connectionString: process.env.DB_CONNECTION_URI,
ssl: process.env.DB_ROOT_CERT
? {
rejectUnauthorized: true,
ca: Buffer.from(process.env.DB_ROOT_CERT, "base64").toString("ascii")
}
: false
},
connection: process.env.DB_CONNECTION_URI,
pool: {
min: 2,
max: 10

View File

@ -1,25 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("authEnforced").defaultTo(false);
t.index("slug");
});
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
t.datetime("lastUsed");
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("authEnforced");
t.dropIndex("slug");
});
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
t.dropColumn("lastUsed");
});
}

View File

@ -1,31 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.ScimToken))) {
await knex.schema.createTable(TableName.ScimToken, (t) => {
t.string("id", 36).primary().defaultTo(knex.fn.uuid());
t.bigInteger("ttlDays").defaultTo(365).notNullable();
t.string("description").notNullable();
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("scimEnabled").defaultTo(false);
});
await createOnUpdateTrigger(knex, TableName.ScimToken);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ScimToken);
await dropOnUpdateTrigger(knex, TableName.ScimToken);
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("scimEnabled");
});
}

View File

@ -1,39 +0,0 @@
import { Knex } from "knex";
import { ProjectVersion, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasGhostUserColumn = await knex.schema.hasColumn(TableName.Users, "isGhost");
const hasProjectVersionColumn = await knex.schema.hasColumn(TableName.Project, "version");
if (!hasGhostUserColumn) {
await knex.schema.alterTable(TableName.Users, (t) => {
t.boolean("isGhost").defaultTo(false).notNullable();
});
}
if (!hasProjectVersionColumn) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.integer("version").defaultTo(ProjectVersion.V1).notNullable();
t.string("upgradeStatus").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasGhostUserColumn = await knex.schema.hasColumn(TableName.Users, "isGhost");
const hasProjectVersionColumn = await knex.schema.hasColumn(TableName.Project, "version");
if (hasGhostUserColumn) {
await knex.schema.alterTable(TableName.Users, (t) => {
t.dropColumn("isGhost");
});
}
if (hasProjectVersionColumn) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("version");
t.dropColumn("upgradeStatus");
});
}
}

View File

@ -1,20 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const isTablePresent = await knex.schema.hasTable(TableName.SuperAdmin);
if (isTablePresent) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.string("allowedSignUpDomain");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SuperAdmin, "allowedSignUpDomain")) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("allowedSignUpDomain");
});
}
}

View File

@ -1,25 +0,0 @@
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-nocheck
import { Knex } from "knex";
import { TableName } from "../schemas";
const ADMIN_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.uuid("instanceId").notNullable().defaultTo(knex.fn.uuid());
});
const superUserConfigExists = await knex(TableName.SuperAdmin).where("id", ADMIN_CONFIG_UUID).first();
if (!superUserConfigExists) {
// eslint-disable-next-line
await knex(TableName.SuperAdmin).update({ id: ADMIN_CONFIG_UUID }).whereNotNull("id").limit(1);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("instanceId");
});
}

View File

@ -1,15 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.Integration, (t) => {
t.datetime("lastUsed");
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.Integration, (t) => {
t.dropColumn("lastUsed");
});
}

View File

@ -1,68 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.LdapConfig))) {
await knex.schema.createTable(TableName.LdapConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable().unique();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.boolean("isActive").notNullable();
t.string("url").notNullable();
t.string("encryptedBindDN").notNullable();
t.string("bindDNIV").notNullable();
t.string("bindDNTag").notNullable();
t.string("encryptedBindPass").notNullable();
t.string("bindPassIV").notNullable();
t.string("bindPassTag").notNullable();
t.string("searchBase").notNullable();
t.text("encryptedCACert").notNullable();
t.string("caCertIV").notNullable();
t.string("caCertTag").notNullable();
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.LdapConfig);
if (!(await knex.schema.hasTable(TableName.UserAliases))) {
await knex.schema.createTable(TableName.UserAliases, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("userId").notNullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.string("username").notNullable();
t.string("aliasType").notNullable();
t.string("externalId").notNullable();
t.specificType("emails", "text[]");
t.uuid("orgId").nullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.UserAliases);
await knex.schema.alterTable(TableName.Users, (t) => {
t.string("username").unique();
t.string("email").nullable().alter();
t.dropUnique(["email"]);
});
await knex(TableName.Users).update("username", knex.ref("email"));
await knex.schema.alterTable(TableName.Users, (t) => {
t.string("username").notNullable().alter();
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.LdapConfig);
await knex.schema.dropTableIfExists(TableName.UserAliases);
await knex.schema.alterTable(TableName.Users, (t) => {
t.dropColumn("username");
// t.string("email").notNullable().alter();
});
await dropOnUpdateTrigger(knex, TableName.LdapConfig);
}

View File

@ -19,5 +19,5 @@ export const ApiKeysSchema = z.object({
});
export type TApiKeys = z.infer<typeof ApiKeysSchema>;
export type TApiKeysInsert = Omit<z.input<typeof ApiKeysSchema>, TImmutableDBKeys>;
export type TApiKeysUpdate = Partial<Omit<z.input<typeof ApiKeysSchema>, TImmutableDBKeys>>;
export type TApiKeysInsert = Omit<TApiKeys, TImmutableDBKeys>;
export type TApiKeysUpdate = Partial<Omit<TApiKeys, TImmutableDBKeys>>;

View File

@ -24,5 +24,5 @@ export const AuditLogsSchema = z.object({
});
export type TAuditLogs = z.infer<typeof AuditLogsSchema>;
export type TAuditLogsInsert = Omit<z.input<typeof AuditLogsSchema>, TImmutableDBKeys>;
export type TAuditLogsUpdate = Partial<Omit<z.input<typeof AuditLogsSchema>, TImmutableDBKeys>>;
export type TAuditLogsInsert = Omit<TAuditLogs, TImmutableDBKeys>;
export type TAuditLogsUpdate = Partial<Omit<TAuditLogs, TImmutableDBKeys>>;

View File

@ -20,5 +20,5 @@ export const AuthTokenSessionsSchema = z.object({
});
export type TAuthTokenSessions = z.infer<typeof AuthTokenSessionsSchema>;
export type TAuthTokenSessionsInsert = Omit<z.input<typeof AuthTokenSessionsSchema>, TImmutableDBKeys>;
export type TAuthTokenSessionsUpdate = Partial<Omit<z.input<typeof AuthTokenSessionsSchema>, TImmutableDBKeys>>;
export type TAuthTokenSessionsInsert = Omit<TAuthTokenSessions, TImmutableDBKeys>;
export type TAuthTokenSessionsUpdate = Partial<Omit<TAuthTokenSessions, TImmutableDBKeys>>;

View File

@ -21,5 +21,5 @@ export const AuthTokensSchema = z.object({
});
export type TAuthTokens = z.infer<typeof AuthTokensSchema>;
export type TAuthTokensInsert = Omit<z.input<typeof AuthTokensSchema>, TImmutableDBKeys>;
export type TAuthTokensUpdate = Partial<Omit<z.input<typeof AuthTokensSchema>, TImmutableDBKeys>>;
export type TAuthTokensInsert = Omit<TAuthTokens, TImmutableDBKeys>;
export type TAuthTokensUpdate = Partial<Omit<TAuthTokens, TImmutableDBKeys>>;

View File

@ -22,5 +22,5 @@ export const BackupPrivateKeySchema = z.object({
});
export type TBackupPrivateKey = z.infer<typeof BackupPrivateKeySchema>;
export type TBackupPrivateKeyInsert = Omit<z.input<typeof BackupPrivateKeySchema>, TImmutableDBKeys>;
export type TBackupPrivateKeyUpdate = Partial<Omit<z.input<typeof BackupPrivateKeySchema>, TImmutableDBKeys>>;
export type TBackupPrivateKeyInsert = Omit<TBackupPrivateKey, TImmutableDBKeys>;
export type TBackupPrivateKeyUpdate = Partial<Omit<TBackupPrivateKey, TImmutableDBKeys>>;

View File

@ -17,5 +17,5 @@ export const GitAppInstallSessionsSchema = z.object({
});
export type TGitAppInstallSessions = z.infer<typeof GitAppInstallSessionsSchema>;
export type TGitAppInstallSessionsInsert = Omit<z.input<typeof GitAppInstallSessionsSchema>, TImmutableDBKeys>;
export type TGitAppInstallSessionsUpdate = Partial<Omit<z.input<typeof GitAppInstallSessionsSchema>, TImmutableDBKeys>>;
export type TGitAppInstallSessionsInsert = Omit<TGitAppInstallSessions, TImmutableDBKeys>;
export type TGitAppInstallSessionsUpdate = Partial<Omit<TGitAppInstallSessions, TImmutableDBKeys>>;

View File

@ -17,5 +17,5 @@ export const GitAppOrgSchema = z.object({
});
export type TGitAppOrg = z.infer<typeof GitAppOrgSchema>;
export type TGitAppOrgInsert = Omit<z.input<typeof GitAppOrgSchema>, TImmutableDBKeys>;
export type TGitAppOrgUpdate = Partial<Omit<z.input<typeof GitAppOrgSchema>, TImmutableDBKeys>>;
export type TGitAppOrgInsert = Omit<TGitAppOrg, TImmutableDBKeys>;
export type TGitAppOrgUpdate = Partial<Omit<TGitAppOrg, TImmutableDBKeys>>;

View File

@ -16,5 +16,5 @@ export const IdentitiesSchema = z.object({
});
export type TIdentities = z.infer<typeof IdentitiesSchema>;
export type TIdentitiesInsert = Omit<z.input<typeof IdentitiesSchema>, TImmutableDBKeys>;
export type TIdentitiesUpdate = Partial<Omit<z.input<typeof IdentitiesSchema>, TImmutableDBKeys>>;
export type TIdentitiesInsert = Omit<TIdentities, TImmutableDBKeys>;
export type TIdentitiesUpdate = Partial<Omit<TIdentities, TImmutableDBKeys>>;

View File

@ -23,5 +23,5 @@ export const IdentityAccessTokensSchema = z.object({
});
export type TIdentityAccessTokens = z.infer<typeof IdentityAccessTokensSchema>;
export type TIdentityAccessTokensInsert = Omit<z.input<typeof IdentityAccessTokensSchema>, TImmutableDBKeys>;
export type TIdentityAccessTokensUpdate = Partial<Omit<z.input<typeof IdentityAccessTokensSchema>, TImmutableDBKeys>>;
export type TIdentityAccessTokensInsert = Omit<TIdentityAccessTokens, TImmutableDBKeys>;
export type TIdentityAccessTokensUpdate = Partial<Omit<TIdentityAccessTokens, TImmutableDBKeys>>;

View File

@ -18,7 +18,5 @@ export const IdentityOrgMembershipsSchema = z.object({
});
export type TIdentityOrgMemberships = z.infer<typeof IdentityOrgMembershipsSchema>;
export type TIdentityOrgMembershipsInsert = Omit<z.input<typeof IdentityOrgMembershipsSchema>, TImmutableDBKeys>;
export type TIdentityOrgMembershipsUpdate = Partial<
Omit<z.input<typeof IdentityOrgMembershipsSchema>, TImmutableDBKeys>
>;
export type TIdentityOrgMembershipsInsert = Omit<TIdentityOrgMemberships, TImmutableDBKeys>;
export type TIdentityOrgMembershipsUpdate = Partial<Omit<TIdentityOrgMemberships, TImmutableDBKeys>>;

View File

@ -18,10 +18,5 @@ export const IdentityProjectMembershipsSchema = z.object({
});
export type TIdentityProjectMemberships = z.infer<typeof IdentityProjectMembershipsSchema>;
export type TIdentityProjectMembershipsInsert = Omit<
z.input<typeof IdentityProjectMembershipsSchema>,
TImmutableDBKeys
>;
export type TIdentityProjectMembershipsUpdate = Partial<
Omit<z.input<typeof IdentityProjectMembershipsSchema>, TImmutableDBKeys>
>;
export type TIdentityProjectMembershipsInsert = Omit<TIdentityProjectMemberships, TImmutableDBKeys>;
export type TIdentityProjectMembershipsUpdate = Partial<Omit<TIdentityProjectMemberships, TImmutableDBKeys>>;

View File

@ -23,7 +23,5 @@ export const IdentityUaClientSecretsSchema = z.object({
});
export type TIdentityUaClientSecrets = z.infer<typeof IdentityUaClientSecretsSchema>;
export type TIdentityUaClientSecretsInsert = Omit<z.input<typeof IdentityUaClientSecretsSchema>, TImmutableDBKeys>;
export type TIdentityUaClientSecretsUpdate = Partial<
Omit<z.input<typeof IdentityUaClientSecretsSchema>, TImmutableDBKeys>
>;
export type TIdentityUaClientSecretsInsert = Omit<TIdentityUaClientSecrets, TImmutableDBKeys>;
export type TIdentityUaClientSecretsUpdate = Partial<Omit<TIdentityUaClientSecrets, TImmutableDBKeys>>;

View File

@ -21,7 +21,5 @@ export const IdentityUniversalAuthsSchema = z.object({
});
export type TIdentityUniversalAuths = z.infer<typeof IdentityUniversalAuthsSchema>;
export type TIdentityUniversalAuthsInsert = Omit<z.input<typeof IdentityUniversalAuthsSchema>, TImmutableDBKeys>;
export type TIdentityUniversalAuthsUpdate = Partial<
Omit<z.input<typeof IdentityUniversalAuthsSchema>, TImmutableDBKeys>
>;
export type TIdentityUniversalAuthsInsert = Omit<TIdentityUniversalAuths, TImmutableDBKeys>;
export type TIdentityUniversalAuthsUpdate = Partial<Omit<TIdentityUniversalAuths, TImmutableDBKeys>>;

View File

@ -16,5 +16,5 @@ export const IncidentContactsSchema = z.object({
});
export type TIncidentContacts = z.infer<typeof IncidentContactsSchema>;
export type TIncidentContactsInsert = Omit<z.input<typeof IncidentContactsSchema>, TImmutableDBKeys>;
export type TIncidentContactsUpdate = Partial<Omit<z.input<typeof IncidentContactsSchema>, TImmutableDBKeys>>;
export type TIncidentContactsInsert = Omit<TIncidentContacts, TImmutableDBKeys>;
export type TIncidentContactsUpdate = Partial<Omit<TIncidentContacts, TImmutableDBKeys>>;

View File

@ -14,7 +14,6 @@ export * from "./identity-universal-auths";
export * from "./incident-contacts";
export * from "./integration-auths";
export * from "./integrations";
export * from "./ldap-configs";
export * from "./models";
export * from "./org-bots";
export * from "./org-memberships";
@ -27,7 +26,6 @@ export * from "./project-memberships";
export * from "./project-roles";
export * from "./projects";
export * from "./saml-configs";
export * from "./scim-tokens";
export * from "./secret-approval-policies";
export * from "./secret-approval-policies-approvers";
export * from "./secret-approval-request-secret-tags";
@ -53,7 +51,6 @@ export * from "./service-tokens";
export * from "./super-admin";
export * from "./trusted-ips";
export * from "./user-actions";
export * from "./user-aliases";
export * from "./user-encryption-keys";
export * from "./users";
export * from "./webhooks";

View File

@ -33,5 +33,5 @@ export const IntegrationAuthsSchema = z.object({
});
export type TIntegrationAuths = z.infer<typeof IntegrationAuthsSchema>;
export type TIntegrationAuthsInsert = Omit<z.input<typeof IntegrationAuthsSchema>, TImmutableDBKeys>;
export type TIntegrationAuthsUpdate = Partial<Omit<z.input<typeof IntegrationAuthsSchema>, TImmutableDBKeys>>;
export type TIntegrationAuthsInsert = Omit<TIntegrationAuths, TImmutableDBKeys>;
export type TIntegrationAuthsUpdate = Partial<Omit<TIntegrationAuths, TImmutableDBKeys>>;

View File

@ -27,10 +27,9 @@ export const IntegrationsSchema = z.object({
envId: z.string().uuid(),
secretPath: z.string().default("/"),
createdAt: z.date(),
updatedAt: z.date(),
lastUsed: z.date().nullable().optional()
updatedAt: z.date()
});
export type TIntegrations = z.infer<typeof IntegrationsSchema>;
export type TIntegrationsInsert = Omit<z.input<typeof IntegrationsSchema>, TImmutableDBKeys>;
export type TIntegrationsUpdate = Partial<Omit<z.input<typeof IntegrationsSchema>, TImmutableDBKeys>>;
export type TIntegrationsInsert = Omit<TIntegrations, TImmutableDBKeys>;
export type TIntegrationsUpdate = Partial<Omit<TIntegrations, TImmutableDBKeys>>;

View File

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const LdapConfigsSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
isActive: z.boolean(),
url: z.string(),
encryptedBindDN: z.string(),
bindDNIV: z.string(),
bindDNTag: z.string(),
encryptedBindPass: z.string(),
bindPassIV: z.string(),
bindPassTag: z.string(),
searchBase: z.string(),
encryptedCACert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
export type TLdapConfigsInsert = Omit<z.input<typeof LdapConfigsSchema>, TImmutableDBKeys>;
export type TLdapConfigsUpdate = Partial<Omit<z.input<typeof LdapConfigsSchema>, TImmutableDBKeys>>;

View File

@ -2,7 +2,6 @@ import { z } from "zod";
export enum TableName {
Users = "users",
UserAliases = "user_aliases",
UserEncryptionKey = "user_encryption_keys",
AuthTokens = "auth_tokens",
AuthTokenSession = "auth_token_sessions",
@ -41,7 +40,6 @@ export enum TableName {
IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships",
ScimToken = "scim_tokens",
SecretApprovalPolicy = "secret_approval_policies",
SecretApprovalPolicyApprover = "secret_approval_policies_approvers",
SecretApprovalRequest = "secret_approval_requests",
@ -51,7 +49,6 @@ export enum TableName {
SecretRotation = "secret_rotations",
SecretRotationOutput = "secret_rotation_outputs",
SamlConfig = "saml_configs",
LdapConfig = "ldap_configs",
AuditLog = "audit_logs",
GitAppInstallSession = "git_app_install_sessions",
GitAppOrg = "git_app_org",
@ -114,17 +111,6 @@ export enum SecretType {
Personal = "personal"
}
export enum ProjectVersion {
V1 = 1,
V2 = 2
}
export enum ProjectUpgradeStatus {
InProgress = "IN_PROGRESS",
// Completed -> Will be null if completed. So a completed status is not needed
Failed = "FAILED"
}
export enum IdentityAuthMethod {
Univeral = "universal-auth"
}

View File

@ -27,5 +27,5 @@ export const OrgBotsSchema = z.object({
});
export type TOrgBots = z.infer<typeof OrgBotsSchema>;
export type TOrgBotsInsert = Omit<z.input<typeof OrgBotsSchema>, TImmutableDBKeys>;
export type TOrgBotsUpdate = Partial<Omit<z.input<typeof OrgBotsSchema>, TImmutableDBKeys>>;
export type TOrgBotsInsert = Omit<TOrgBots, TImmutableDBKeys>;
export type TOrgBotsUpdate = Partial<Omit<TOrgBots, TImmutableDBKeys>>;

View File

@ -20,5 +20,5 @@ export const OrgMembershipsSchema = z.object({
});
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;
export type TOrgMembershipsInsert = Omit<z.input<typeof OrgMembershipsSchema>, TImmutableDBKeys>;
export type TOrgMembershipsUpdate = Partial<Omit<z.input<typeof OrgMembershipsSchema>, TImmutableDBKeys>>;
export type TOrgMembershipsInsert = Omit<TOrgMemberships, TImmutableDBKeys>;
export type TOrgMembershipsUpdate = Partial<Omit<TOrgMemberships, TImmutableDBKeys>>;

View File

@ -19,5 +19,5 @@ export const OrgRolesSchema = z.object({
});
export type TOrgRoles = z.infer<typeof OrgRolesSchema>;
export type TOrgRolesInsert = Omit<z.input<typeof OrgRolesSchema>, TImmutableDBKeys>;
export type TOrgRolesUpdate = Partial<Omit<z.input<typeof OrgRolesSchema>, TImmutableDBKeys>>;
export type TOrgRolesInsert = Omit<TOrgRoles, TImmutableDBKeys>;
export type TOrgRolesUpdate = Partial<Omit<TOrgRoles, TImmutableDBKeys>>;

View File

@ -13,11 +13,9 @@ export const OrganizationsSchema = z.object({
customerId: z.string().nullable().optional(),
slug: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
authEnforced: z.boolean().default(false).nullable().optional(),
scimEnabled: z.boolean().default(false).nullable().optional()
updatedAt: z.date()
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
export type TOrganizationsInsert = Omit<z.input<typeof OrganizationsSchema>, TImmutableDBKeys>;
export type TOrganizationsUpdate = Partial<Omit<z.input<typeof OrganizationsSchema>, TImmutableDBKeys>>;
export type TOrganizationsInsert = Omit<TOrganizations, TImmutableDBKeys>;
export type TOrganizationsUpdate = Partial<Omit<TOrganizations, TImmutableDBKeys>>;

View File

@ -26,5 +26,5 @@ export const ProjectBotsSchema = z.object({
});
export type TProjectBots = z.infer<typeof ProjectBotsSchema>;
export type TProjectBotsInsert = Omit<z.input<typeof ProjectBotsSchema>, TImmutableDBKeys>;
export type TProjectBotsUpdate = Partial<Omit<z.input<typeof ProjectBotsSchema>, TImmutableDBKeys>>;
export type TProjectBotsInsert = Omit<TProjectBots, TImmutableDBKeys>;
export type TProjectBotsUpdate = Partial<Omit<TProjectBots, TImmutableDBKeys>>;

View File

@ -18,5 +18,5 @@ export const ProjectEnvironmentsSchema = z.object({
});
export type TProjectEnvironments = z.infer<typeof ProjectEnvironmentsSchema>;
export type TProjectEnvironmentsInsert = Omit<z.input<typeof ProjectEnvironmentsSchema>, TImmutableDBKeys>;
export type TProjectEnvironmentsUpdate = Partial<Omit<z.input<typeof ProjectEnvironmentsSchema>, TImmutableDBKeys>>;
export type TProjectEnvironmentsInsert = Omit<TProjectEnvironments, TImmutableDBKeys>;
export type TProjectEnvironmentsUpdate = Partial<Omit<TProjectEnvironments, TImmutableDBKeys>>;

View File

@ -19,5 +19,5 @@ export const ProjectKeysSchema = z.object({
});
export type TProjectKeys = z.infer<typeof ProjectKeysSchema>;
export type TProjectKeysInsert = Omit<z.input<typeof ProjectKeysSchema>, TImmutableDBKeys>;
export type TProjectKeysUpdate = Partial<Omit<z.input<typeof ProjectKeysSchema>, TImmutableDBKeys>>;
export type TProjectKeysInsert = Omit<TProjectKeys, TImmutableDBKeys>;
export type TProjectKeysUpdate = Partial<Omit<TProjectKeys, TImmutableDBKeys>>;

View File

@ -18,5 +18,5 @@ export const ProjectMembershipsSchema = z.object({
});
export type TProjectMemberships = z.infer<typeof ProjectMembershipsSchema>;
export type TProjectMembershipsInsert = Omit<z.input<typeof ProjectMembershipsSchema>, TImmutableDBKeys>;
export type TProjectMembershipsUpdate = Partial<Omit<z.input<typeof ProjectMembershipsSchema>, TImmutableDBKeys>>;
export type TProjectMembershipsInsert = Omit<TProjectMemberships, TImmutableDBKeys>;
export type TProjectMembershipsUpdate = Partial<Omit<TProjectMemberships, TImmutableDBKeys>>;

View File

@ -19,5 +19,5 @@ export const ProjectRolesSchema = z.object({
});
export type TProjectRoles = z.infer<typeof ProjectRolesSchema>;
export type TProjectRolesInsert = Omit<z.input<typeof ProjectRolesSchema>, TImmutableDBKeys>;
export type TProjectRolesUpdate = Partial<Omit<z.input<typeof ProjectRolesSchema>, TImmutableDBKeys>>;
export type TProjectRolesInsert = Omit<TProjectRoles, TImmutableDBKeys>;
export type TProjectRolesUpdate = Partial<Omit<TProjectRoles, TImmutableDBKeys>>;

View File

@ -14,11 +14,9 @@ export const ProjectsSchema = z.object({
autoCapitalization: z.boolean().default(true).nullable().optional(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
version: z.number().default(1),
upgradeStatus: z.string().nullable().optional()
updatedAt: z.date()
});
export type TProjects = z.infer<typeof ProjectsSchema>;
export type TProjectsInsert = Omit<z.input<typeof ProjectsSchema>, TImmutableDBKeys>;
export type TProjectsUpdate = Partial<Omit<z.input<typeof ProjectsSchema>, TImmutableDBKeys>>;
export type TProjectsInsert = Omit<TProjects, TImmutableDBKeys>;
export type TProjectsUpdate = Partial<Omit<TProjects, TImmutableDBKeys>>;

View File

@ -22,10 +22,9 @@ export const SamlConfigsSchema = z.object({
certTag: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid(),
lastUsed: z.date().nullable().optional()
orgId: z.string().uuid()
});
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;
export type TSamlConfigsInsert = Omit<z.input<typeof SamlConfigsSchema>, TImmutableDBKeys>;
export type TSamlConfigsUpdate = Partial<Omit<z.input<typeof SamlConfigsSchema>, TImmutableDBKeys>>;
export type TSamlConfigsInsert = Omit<TSamlConfigs, TImmutableDBKeys>;
export type TSamlConfigsUpdate = Partial<Omit<TSamlConfigs, TImmutableDBKeys>>;

View File

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ScimTokensSchema = z.object({
id: z.string(),
ttlDays: z.coerce.number().default(365),
description: z.string(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TScimTokens = z.infer<typeof ScimTokensSchema>;
export type TScimTokensInsert = Omit<z.input<typeof ScimTokensSchema>, TImmutableDBKeys>;
export type TScimTokensUpdate = Partial<Omit<z.input<typeof ScimTokensSchema>, TImmutableDBKeys>>;

View File

@ -16,10 +16,5 @@ export const SecretApprovalPoliciesApproversSchema = z.object({
});
export type TSecretApprovalPoliciesApprovers = z.infer<typeof SecretApprovalPoliciesApproversSchema>;
export type TSecretApprovalPoliciesApproversInsert = Omit<
z.input<typeof SecretApprovalPoliciesApproversSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalPoliciesApproversUpdate = Partial<
Omit<z.input<typeof SecretApprovalPoliciesApproversSchema>, TImmutableDBKeys>
>;
export type TSecretApprovalPoliciesApproversInsert = Omit<TSecretApprovalPoliciesApprovers, TImmutableDBKeys>;
export type TSecretApprovalPoliciesApproversUpdate = Partial<Omit<TSecretApprovalPoliciesApprovers, TImmutableDBKeys>>;

View File

@ -18,7 +18,5 @@ export const SecretApprovalPoliciesSchema = z.object({
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
export type TSecretApprovalPoliciesInsert = Omit<z.input<typeof SecretApprovalPoliciesSchema>, TImmutableDBKeys>;
export type TSecretApprovalPoliciesUpdate = Partial<
Omit<z.input<typeof SecretApprovalPoliciesSchema>, TImmutableDBKeys>
>;
export type TSecretApprovalPoliciesInsert = Omit<TSecretApprovalPolicies, TImmutableDBKeys>;
export type TSecretApprovalPoliciesUpdate = Partial<Omit<TSecretApprovalPolicies, TImmutableDBKeys>>;

View File

@ -16,10 +16,5 @@ export const SecretApprovalRequestSecretTagsSchema = z.object({
});
export type TSecretApprovalRequestSecretTags = z.infer<typeof SecretApprovalRequestSecretTagsSchema>;
export type TSecretApprovalRequestSecretTagsInsert = Omit<
z.input<typeof SecretApprovalRequestSecretTagsSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalRequestSecretTagsUpdate = Partial<
Omit<z.input<typeof SecretApprovalRequestSecretTagsSchema>, TImmutableDBKeys>
>;
export type TSecretApprovalRequestSecretTagsInsert = Omit<TSecretApprovalRequestSecretTags, TImmutableDBKeys>;
export type TSecretApprovalRequestSecretTagsUpdate = Partial<Omit<TSecretApprovalRequestSecretTags, TImmutableDBKeys>>;

View File

@ -17,10 +17,5 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
});
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;
export type TSecretApprovalRequestsReviewersInsert = Omit<
z.input<typeof SecretApprovalRequestsReviewersSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalRequestsReviewersUpdate = Partial<
Omit<z.input<typeof SecretApprovalRequestsReviewersSchema>, TImmutableDBKeys>
>;
export type TSecretApprovalRequestsReviewersInsert = Omit<TSecretApprovalRequestsReviewers, TImmutableDBKeys>;
export type TSecretApprovalRequestsReviewersUpdate = Partial<Omit<TSecretApprovalRequestsReviewers, TImmutableDBKeys>>;

View File

@ -35,10 +35,5 @@ export const SecretApprovalRequestsSecretsSchema = z.object({
});
export type TSecretApprovalRequestsSecrets = z.infer<typeof SecretApprovalRequestsSecretsSchema>;
export type TSecretApprovalRequestsSecretsInsert = Omit<
z.input<typeof SecretApprovalRequestsSecretsSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalRequestsSecretsUpdate = Partial<
Omit<z.input<typeof SecretApprovalRequestsSecretsSchema>, TImmutableDBKeys>
>;
export type TSecretApprovalRequestsSecretsInsert = Omit<TSecretApprovalRequestsSecrets, TImmutableDBKeys>;
export type TSecretApprovalRequestsSecretsUpdate = Partial<Omit<TSecretApprovalRequestsSecrets, TImmutableDBKeys>>;

View File

@ -22,7 +22,5 @@ export const SecretApprovalRequestsSchema = z.object({
});
export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>;
export type TSecretApprovalRequestsInsert = Omit<z.input<typeof SecretApprovalRequestsSchema>, TImmutableDBKeys>;
export type TSecretApprovalRequestsUpdate = Partial<
Omit<z.input<typeof SecretApprovalRequestsSchema>, TImmutableDBKeys>
>;
export type TSecretApprovalRequestsInsert = Omit<TSecretApprovalRequests, TImmutableDBKeys>;
export type TSecretApprovalRequestsUpdate = Partial<Omit<TSecretApprovalRequests, TImmutableDBKeys>>;

View File

@ -20,5 +20,5 @@ export const SecretBlindIndexesSchema = z.object({
});
export type TSecretBlindIndexes = z.infer<typeof SecretBlindIndexesSchema>;
export type TSecretBlindIndexesInsert = Omit<z.input<typeof SecretBlindIndexesSchema>, TImmutableDBKeys>;
export type TSecretBlindIndexesUpdate = Partial<Omit<z.input<typeof SecretBlindIndexesSchema>, TImmutableDBKeys>>;
export type TSecretBlindIndexesInsert = Omit<TSecretBlindIndexes, TImmutableDBKeys>;
export type TSecretBlindIndexesUpdate = Partial<Omit<TSecretBlindIndexes, TImmutableDBKeys>>;

View File

@ -18,5 +18,5 @@ export const SecretFolderVersionsSchema = z.object({
});
export type TSecretFolderVersions = z.infer<typeof SecretFolderVersionsSchema>;
export type TSecretFolderVersionsInsert = Omit<z.input<typeof SecretFolderVersionsSchema>, TImmutableDBKeys>;
export type TSecretFolderVersionsUpdate = Partial<Omit<z.input<typeof SecretFolderVersionsSchema>, TImmutableDBKeys>>;
export type TSecretFolderVersionsInsert = Omit<TSecretFolderVersions, TImmutableDBKeys>;
export type TSecretFolderVersionsUpdate = Partial<Omit<TSecretFolderVersions, TImmutableDBKeys>>;

View File

@ -18,5 +18,5 @@ export const SecretFoldersSchema = z.object({
});
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;
export type TSecretFoldersInsert = Omit<z.input<typeof SecretFoldersSchema>, TImmutableDBKeys>;
export type TSecretFoldersUpdate = Partial<Omit<z.input<typeof SecretFoldersSchema>, TImmutableDBKeys>>;
export type TSecretFoldersInsert = Omit<TSecretFolders, TImmutableDBKeys>;
export type TSecretFoldersUpdate = Partial<Omit<TSecretFolders, TImmutableDBKeys>>;

View File

@ -19,5 +19,5 @@ export const SecretImportsSchema = z.object({
});
export type TSecretImports = z.infer<typeof SecretImportsSchema>;
export type TSecretImportsInsert = Omit<z.input<typeof SecretImportsSchema>, TImmutableDBKeys>;
export type TSecretImportsUpdate = Partial<Omit<z.input<typeof SecretImportsSchema>, TImmutableDBKeys>>;
export type TSecretImportsInsert = Omit<TSecretImports, TImmutableDBKeys>;
export type TSecretImportsUpdate = Partial<Omit<TSecretImports, TImmutableDBKeys>>;

View File

@ -15,5 +15,5 @@ export const SecretRotationOutputsSchema = z.object({
});
export type TSecretRotationOutputs = z.infer<typeof SecretRotationOutputsSchema>;
export type TSecretRotationOutputsInsert = Omit<z.input<typeof SecretRotationOutputsSchema>, TImmutableDBKeys>;
export type TSecretRotationOutputsUpdate = Partial<Omit<z.input<typeof SecretRotationOutputsSchema>, TImmutableDBKeys>>;
export type TSecretRotationOutputsInsert = Omit<TSecretRotationOutputs, TImmutableDBKeys>;
export type TSecretRotationOutputsUpdate = Partial<Omit<TSecretRotationOutputs, TImmutableDBKeys>>;

View File

@ -26,5 +26,5 @@ export const SecretRotationsSchema = z.object({
});
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;
export type TSecretRotationsInsert = Omit<z.input<typeof SecretRotationsSchema>, TImmutableDBKeys>;
export type TSecretRotationsUpdate = Partial<Omit<z.input<typeof SecretRotationsSchema>, TImmutableDBKeys>>;
export type TSecretRotationsInsert = Omit<TSecretRotations, TImmutableDBKeys>;
export type TSecretRotationsUpdate = Partial<Omit<TSecretRotations, TImmutableDBKeys>>;

View File

@ -42,7 +42,5 @@ export const SecretScanningGitRisksSchema = z.object({
});
export type TSecretScanningGitRisks = z.infer<typeof SecretScanningGitRisksSchema>;
export type TSecretScanningGitRisksInsert = Omit<z.input<typeof SecretScanningGitRisksSchema>, TImmutableDBKeys>;
export type TSecretScanningGitRisksUpdate = Partial<
Omit<z.input<typeof SecretScanningGitRisksSchema>, TImmutableDBKeys>
>;
export type TSecretScanningGitRisksInsert = Omit<TSecretScanningGitRisks, TImmutableDBKeys>;
export type TSecretScanningGitRisksUpdate = Partial<Omit<TSecretScanningGitRisks, TImmutableDBKeys>>;

View File

@ -17,5 +17,5 @@ export const SecretSnapshotFoldersSchema = z.object({
});
export type TSecretSnapshotFolders = z.infer<typeof SecretSnapshotFoldersSchema>;
export type TSecretSnapshotFoldersInsert = Omit<z.input<typeof SecretSnapshotFoldersSchema>, TImmutableDBKeys>;
export type TSecretSnapshotFoldersUpdate = Partial<Omit<z.input<typeof SecretSnapshotFoldersSchema>, TImmutableDBKeys>>;
export type TSecretSnapshotFoldersInsert = Omit<TSecretSnapshotFolders, TImmutableDBKeys>;
export type TSecretSnapshotFoldersUpdate = Partial<Omit<TSecretSnapshotFolders, TImmutableDBKeys>>;

View File

@ -17,5 +17,5 @@ export const SecretSnapshotSecretsSchema = z.object({
});
export type TSecretSnapshotSecrets = z.infer<typeof SecretSnapshotSecretsSchema>;
export type TSecretSnapshotSecretsInsert = Omit<z.input<typeof SecretSnapshotSecretsSchema>, TImmutableDBKeys>;
export type TSecretSnapshotSecretsUpdate = Partial<Omit<z.input<typeof SecretSnapshotSecretsSchema>, TImmutableDBKeys>>;
export type TSecretSnapshotSecretsInsert = Omit<TSecretSnapshotSecrets, TImmutableDBKeys>;
export type TSecretSnapshotSecretsUpdate = Partial<Omit<TSecretSnapshotSecrets, TImmutableDBKeys>>;

View File

@ -17,5 +17,5 @@ export const SecretSnapshotsSchema = z.object({
});
export type TSecretSnapshots = z.infer<typeof SecretSnapshotsSchema>;
export type TSecretSnapshotsInsert = Omit<z.input<typeof SecretSnapshotsSchema>, TImmutableDBKeys>;
export type TSecretSnapshotsUpdate = Partial<Omit<z.input<typeof SecretSnapshotsSchema>, TImmutableDBKeys>>;
export type TSecretSnapshotsInsert = Omit<TSecretSnapshots, TImmutableDBKeys>;
export type TSecretSnapshotsUpdate = Partial<Omit<TSecretSnapshots, TImmutableDBKeys>>;

View File

@ -14,5 +14,5 @@ export const SecretTagJunctionSchema = z.object({
});
export type TSecretTagJunction = z.infer<typeof SecretTagJunctionSchema>;
export type TSecretTagJunctionInsert = Omit<z.input<typeof SecretTagJunctionSchema>, TImmutableDBKeys>;
export type TSecretTagJunctionUpdate = Partial<Omit<z.input<typeof SecretTagJunctionSchema>, TImmutableDBKeys>>;
export type TSecretTagJunctionInsert = Omit<TSecretTagJunction, TImmutableDBKeys>;
export type TSecretTagJunctionUpdate = Partial<Omit<TSecretTagJunction, TImmutableDBKeys>>;

View File

@ -19,5 +19,5 @@ export const SecretTagsSchema = z.object({
});
export type TSecretTags = z.infer<typeof SecretTagsSchema>;
export type TSecretTagsInsert = Omit<z.input<typeof SecretTagsSchema>, TImmutableDBKeys>;
export type TSecretTagsUpdate = Partial<Omit<z.input<typeof SecretTagsSchema>, TImmutableDBKeys>>;
export type TSecretTagsInsert = Omit<TSecretTags, TImmutableDBKeys>;
export type TSecretTagsUpdate = Partial<Omit<TSecretTags, TImmutableDBKeys>>;

View File

@ -14,7 +14,5 @@ export const SecretVersionTagJunctionSchema = z.object({
});
export type TSecretVersionTagJunction = z.infer<typeof SecretVersionTagJunctionSchema>;
export type TSecretVersionTagJunctionInsert = Omit<z.input<typeof SecretVersionTagJunctionSchema>, TImmutableDBKeys>;
export type TSecretVersionTagJunctionUpdate = Partial<
Omit<z.input<typeof SecretVersionTagJunctionSchema>, TImmutableDBKeys>
>;
export type TSecretVersionTagJunctionInsert = Omit<TSecretVersionTagJunction, TImmutableDBKeys>;
export type TSecretVersionTagJunctionUpdate = Partial<Omit<TSecretVersionTagJunction, TImmutableDBKeys>>;

View File

@ -36,5 +36,5 @@ export const SecretVersionsSchema = z.object({
});
export type TSecretVersions = z.infer<typeof SecretVersionsSchema>;
export type TSecretVersionsInsert = Omit<z.input<typeof SecretVersionsSchema>, TImmutableDBKeys>;
export type TSecretVersionsUpdate = Partial<Omit<z.input<typeof SecretVersionsSchema>, TImmutableDBKeys>>;
export type TSecretVersionsInsert = Omit<TSecretVersions, TImmutableDBKeys>;
export type TSecretVersionsUpdate = Partial<Omit<TSecretVersions, TImmutableDBKeys>>;

View File

@ -34,5 +34,5 @@ export const SecretsSchema = z.object({
});
export type TSecrets = z.infer<typeof SecretsSchema>;
export type TSecretsInsert = Omit<z.input<typeof SecretsSchema>, TImmutableDBKeys>;
export type TSecretsUpdate = Partial<Omit<z.input<typeof SecretsSchema>, TImmutableDBKeys>>;
export type TSecretsInsert = Omit<TSecrets, TImmutableDBKeys>;
export type TSecretsUpdate = Partial<Omit<TSecrets, TImmutableDBKeys>>;

View File

@ -25,5 +25,5 @@ export const ServiceTokensSchema = z.object({
});
export type TServiceTokens = z.infer<typeof ServiceTokensSchema>;
export type TServiceTokensInsert = Omit<z.input<typeof ServiceTokensSchema>, TImmutableDBKeys>;
export type TServiceTokensUpdate = Partial<Omit<z.input<typeof ServiceTokensSchema>, TImmutableDBKeys>>;
export type TServiceTokensInsert = Omit<TServiceTokens, TImmutableDBKeys>;
export type TServiceTokensUpdate = Partial<Omit<TServiceTokens, TImmutableDBKeys>>;

View File

@ -12,11 +12,9 @@ export const SuperAdminSchema = z.object({
initialized: z.boolean().default(false).nullable().optional(),
allowSignUp: z.boolean().default(true).nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
allowedSignUpDomain: z.string().nullable().optional(),
instanceId: z.string().uuid().default("00000000-0000-0000-0000-000000000000")
updatedAt: z.date()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
export type TSuperAdminInsert = Omit<z.input<typeof SuperAdminSchema>, TImmutableDBKeys>;
export type TSuperAdminUpdate = Partial<Omit<z.input<typeof SuperAdminSchema>, TImmutableDBKeys>>;
export type TSuperAdminInsert = Omit<TSuperAdmin, TImmutableDBKeys>;
export type TSuperAdminUpdate = Partial<Omit<TSuperAdmin, TImmutableDBKeys>>;

Some files were not shown because too many files have changed in this diff Show More