mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-19 17:33:26 +00:00
Compare commits
11 Commits
cert-mgmt
...
daniel/fix
Author | SHA1 | Date | |
---|---|---|---|
eaccb3ddbc | |||
ad482fb24e | |||
94cf3c5ae2 | |||
9b781d3af8 | |||
f38d64cc90 | |||
6c738f7ad1 | |||
afb818e41a | |||
c0d679dbd4 | |||
7046542ace | |||
cdf90bf9a5 | |||
ecfb3ec95b |
18
.env.example
18
.env.example
@ -8,17 +8,19 @@ ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
|
||||
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
AUTH_SECRET=5lrMXKKWCVocS/uerPsl7V+TX/aaUaI7iDkgl3tSmLE=
|
||||
|
||||
# Postgres creds
|
||||
POSTGRES_PASSWORD=infisical
|
||||
POSTGRES_USER=infisical
|
||||
POSTGRES_DB=infisical
|
||||
|
||||
# MongoDB
|
||||
# Backend will connect to the MongoDB instance at connection string MONGO_URL which can either be a ref
|
||||
# to the MongoDB container instance or Mongo Cloud
|
||||
# Required
|
||||
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
|
||||
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
|
||||
|
||||
# Redis
|
||||
REDIS_URL=redis://redis:6379
|
||||
|
||||
# Optional credentials for MongoDB container instance and Mongo-Express
|
||||
MONGO_USERNAME=root
|
||||
MONGO_PASSWORD=example
|
||||
|
||||
# Website URL
|
||||
# Required
|
||||
SITE_URL=http://localhost:8080
|
||||
@ -63,7 +65,3 @@ CLIENT_SECRET_GITHUB_LOGIN=
|
||||
|
||||
CLIENT_ID_GITLAB_LOGIN=
|
||||
CLIENT_SECRET_GITLAB_LOGIN=
|
||||
|
||||
CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
@ -1,4 +0,0 @@
|
||||
REDIS_URL=redis://localhost:6379
|
||||
DB_CONNECTION_URI=postgres://infisical:infisical@localhost/infisical?sslmode=disable
|
||||
AUTH_SECRET=4bnfe4e407b8921c104518903515b218
|
||||
ENCRYPTION_KEY=4bnfe4e407b8921c104518903515b218
|
190
.github/resources/changelog-generator.py
vendored
190
.github/resources/changelog-generator.py
vendored
@ -1,190 +0,0 @@
|
||||
# inspired by https://www.photoroom.com/inside-photoroom/how-we-automated-our-changelog-thanks-to-chatgpt
|
||||
import os
|
||||
import requests
|
||||
import re
|
||||
from openai import OpenAI
|
||||
import subprocess
|
||||
from datetime import datetime
|
||||
|
||||
import uuid
|
||||
|
||||
# Constants
|
||||
REPO_OWNER = "infisical"
|
||||
REPO_NAME = "infisical"
|
||||
TOKEN = os.environ["GITHUB_TOKEN"]
|
||||
SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"]
|
||||
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]
|
||||
SLACK_MSG_COLOR = "#36a64f"
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {TOKEN}",
|
||||
"Accept": "application/vnd.github+json",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
}
|
||||
|
||||
|
||||
def set_multiline_output(name, value):
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
|
||||
delimiter = uuid.uuid1()
|
||||
print(f'{name}<<{delimiter}', file=fh)
|
||||
print(value, file=fh)
|
||||
print(delimiter, file=fh)
|
||||
|
||||
def post_changelog_to_slack(changelog, tag):
|
||||
slack_payload = {
|
||||
"text": "Hey team, it's changelog time! :wave:",
|
||||
"attachments": [
|
||||
{
|
||||
"color": SLACK_MSG_COLOR,
|
||||
"title": f"🗓️Infisical Changelog - {tag}",
|
||||
"text": changelog,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
response = requests.post(SLACK_WEBHOOK_URL, json=slack_payload)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception("Failed to post changelog to Slack.")
|
||||
|
||||
def find_previous_release_tag(release_tag:str):
|
||||
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{release_tag}^"]).decode("utf-8").strip()
|
||||
while not(previous_tag.startswith("infisical/")):
|
||||
previous_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0", f"{previous_tag}^"]).decode("utf-8").strip()
|
||||
return previous_tag
|
||||
|
||||
def get_tag_creation_date(tag_name):
|
||||
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/git/refs/tags/{tag_name}"
|
||||
response = requests.get(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
commit_sha = response.json()['object']['sha']
|
||||
|
||||
commit_url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/commits/{commit_sha}"
|
||||
commit_response = requests.get(commit_url, headers=headers)
|
||||
commit_response.raise_for_status()
|
||||
creation_date = commit_response.json()['commit']['author']['date']
|
||||
|
||||
return datetime.strptime(creation_date, '%Y-%m-%dT%H:%M:%SZ')
|
||||
|
||||
|
||||
def fetch_prs_between_tags(previous_tag_date:datetime, release_tag_date:datetime):
|
||||
# Use GitHub API to fetch PRs merged between the commits
|
||||
url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/pulls?state=closed&merged=true"
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception("Error fetching PRs from GitHub API!")
|
||||
|
||||
prs = []
|
||||
for pr in response.json():
|
||||
# the idea is as tags happen recently we get last 100 closed PRs and then filter by tag creation date
|
||||
if pr["merged_at"] and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') < release_tag_date and datetime.strptime(pr["merged_at"],'%Y-%m-%dT%H:%M:%SZ') > previous_tag_date:
|
||||
prs.append(pr)
|
||||
|
||||
return prs
|
||||
|
||||
|
||||
def extract_commit_details_from_prs(prs):
|
||||
commit_details = []
|
||||
for pr in prs:
|
||||
commit_message = pr["title"]
|
||||
commit_url = pr["html_url"]
|
||||
pr_number = pr["number"]
|
||||
branch_name = pr["head"]["ref"]
|
||||
issue_numbers = re.findall(r"(www-\d+|web-\d+)", branch_name)
|
||||
|
||||
# If no issue numbers are found, add the PR details without issue numbers and URLs
|
||||
if not issue_numbers:
|
||||
commit_details.append(
|
||||
{
|
||||
"message": commit_message,
|
||||
"pr_number": pr_number,
|
||||
"pr_url": commit_url,
|
||||
"issue_number": None,
|
||||
"issue_url": None,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
for issue in issue_numbers:
|
||||
commit_details.append(
|
||||
{
|
||||
"message": commit_message,
|
||||
"pr_number": pr_number,
|
||||
"pr_url": commit_url,
|
||||
"issue_number": issue,
|
||||
}
|
||||
)
|
||||
|
||||
return commit_details
|
||||
|
||||
# Function to generate changelog using OpenAI
|
||||
def generate_changelog_with_openai(commit_details):
|
||||
commit_messages = []
|
||||
for details in commit_details:
|
||||
base_message = f"{details['pr_url']} - {details['message']}"
|
||||
# Add the issue URL if available
|
||||
# if details["issue_url"]:
|
||||
# base_message += f" (Linear Issue: {details['issue_url']})"
|
||||
commit_messages.append(base_message)
|
||||
|
||||
commit_list = "\n".join(commit_messages)
|
||||
prompt = """
|
||||
Generate a changelog for Infisical, opensource secretops
|
||||
The changelog should:
|
||||
1. Be Informative: Using the provided list of GitHub commits, break them down into categories such as Features, Fixes & Improvements, and Technical Updates. Summarize each commit concisely, ensuring the key points are highlighted.
|
||||
2. Have a Professional yet Friendly tone: The tone should be balanced, not too corporate or too informal.
|
||||
3. Celebratory Introduction and Conclusion: Start the changelog with a celebratory note acknowledging the team's hard work and progress. End with a shoutout to the team and wishes for a pleasant weekend.
|
||||
4. Formatting: you cannot use Markdown formatting, and you can only use emojis for the introductory paragraph or the conclusion paragraph, nowhere else.
|
||||
5. Links: the syntax to create links is the following: `<http://www.example.com|This message is a link>`.
|
||||
6. Linear Links: note that the Linear link is optional, include it only if provided.
|
||||
7. Do not wrap your answer in a codeblock. Just output the text, nothing else
|
||||
Here's a good example to follow, please try to match the formatting as closely as possible, only changing the content of the changelog and have some liberty with the introduction. Notice the importance of the formatting of a changelog item:
|
||||
- <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>))
|
||||
And here's an example of the full changelog:
|
||||
|
||||
*Features*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
*Fixes & Improvements*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
*Technical Updates*
|
||||
• <https://github.com/facebook/react/pull/27304/%7C#27304>: We optimize our ci to strip comments and minify production builds. (<https://linear.app/example/issue/WEB-1234/%7CWEB-1234>)
|
||||
|
||||
Stay tuned for more exciting updates coming soon!
|
||||
And here are the commits:
|
||||
{}
|
||||
""".format(
|
||||
commit_list
|
||||
)
|
||||
|
||||
client = OpenAI(api_key=OPENAI_API_KEY)
|
||||
messages = [{"role": "user", "content": prompt}]
|
||||
response = client.chat.completions.create(model="gpt-3.5-turbo", messages=messages)
|
||||
|
||||
if "error" in response.choices[0].message:
|
||||
raise Exception("Error generating changelog with OpenAI!")
|
||||
|
||||
return response.choices[0].message.content.strip()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
# Get the latest and previous release tags
|
||||
latest_tag = subprocess.check_output(["git", "describe", "--tags", "--abbrev=0"]).decode("utf-8").strip()
|
||||
previous_tag = find_previous_release_tag(latest_tag)
|
||||
|
||||
latest_tag_date = get_tag_creation_date(latest_tag)
|
||||
previous_tag_date = get_tag_creation_date(previous_tag)
|
||||
|
||||
prs = fetch_prs_between_tags(previous_tag_date,latest_tag_date)
|
||||
pr_details = extract_commit_details_from_prs(prs)
|
||||
|
||||
# Generate changelog
|
||||
changelog = generate_changelog_with_openai(pr_details)
|
||||
|
||||
post_changelog_to_slack(changelog,latest_tag)
|
||||
# Print or post changelog to Slack
|
||||
# set_multiline_output("changelog", changelog)
|
||||
|
||||
except Exception as e:
|
||||
print(str(e))
|
26
.github/resources/rename_migration_files.py
vendored
26
.github/resources/rename_migration_files.py
vendored
@ -1,26 +0,0 @@
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
def rename_migrations():
|
||||
migration_folder = "./backend/src/db/migrations"
|
||||
with open("added_files.txt", "r") as file:
|
||||
changed_files = file.readlines()
|
||||
|
||||
# Find the latest file among the changed files
|
||||
latest_timestamp = datetime.now() # utc time
|
||||
for file_path in changed_files:
|
||||
file_path = file_path.strip()
|
||||
# each new file bump by 1s
|
||||
latest_timestamp = latest_timestamp + timedelta(seconds=1)
|
||||
|
||||
new_filename = os.path.join(migration_folder, latest_timestamp.strftime("%Y%m%d%H%M%S") + f"_{file_path.split('_')[1]}")
|
||||
old_filename = os.path.join(migration_folder, file_path)
|
||||
os.rename(old_filename, new_filename)
|
||||
print(f"Renamed {old_filename} to {new_filename}")
|
||||
|
||||
if len(changed_files) == 0:
|
||||
print("No new files added to migration folder")
|
||||
|
||||
if __name__ == "__main__":
|
||||
rename_migrations()
|
||||
|
15
.github/values.yaml
vendored
15
.github/values.yaml
vendored
@ -13,10 +13,11 @@ fullnameOverride: ""
|
||||
##
|
||||
|
||||
infisical:
|
||||
autoDatabaseSchemaMigration: false
|
||||
|
||||
## @param backend.enabled Enable backend
|
||||
##
|
||||
enabled: false
|
||||
|
||||
## @param backend.name Backend name
|
||||
##
|
||||
name: infisical
|
||||
replicaCount: 3
|
||||
image:
|
||||
@ -27,7 +28,7 @@ infisical:
|
||||
deploymentAnnotations:
|
||||
secrets.infisical.com/auto-reload: "true"
|
||||
|
||||
kubeSecretRef: "managed-secret"
|
||||
kubeSecretRef: "infisical-gamma-secrets"
|
||||
|
||||
ingress:
|
||||
## @param ingress.enabled Enable ingress
|
||||
@ -49,9 +50,3 @@ ingress:
|
||||
- secretName: letsencrypt-prod
|
||||
hosts:
|
||||
- gamma.infisical.com
|
||||
|
||||
postgresql:
|
||||
enabled: false
|
||||
|
||||
redis:
|
||||
enabled: false
|
||||
|
@ -41,7 +41,6 @@ jobs:
|
||||
load: true
|
||||
context: backend
|
||||
tags: infisical/infisical:test
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: ⏻ Spawn backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
||||
@ -93,7 +92,6 @@ jobs:
|
||||
project: 64mmf0n610
|
||||
context: frontend
|
||||
tags: infisical/frontend:test
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
38
.github/workflows/build-patroni-docker-img.yml
vendored
38
.github/workflows/build-patroni-docker-img.yml
vendored
@ -1,38 +0,0 @@
|
||||
name: Build patroni
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
patroni-image:
|
||||
name: Build patroni
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: 'zalando/patroni'
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
infisical/patroni:${{ steps.commit.outputs.short }}
|
||||
infisical/patroni:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
|
140
.github/workflows/build-staging-and-deploy-aws.yml
vendored
140
.github/workflows/build-staging-and-deploy-aws.yml
vendored
@ -1,140 +0,0 @@
|
||||
name: Deployment pipeline
|
||||
on: [workflow_dispatch]
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
infisical-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile.standalone-infisical
|
||||
tags: |
|
||||
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/staging_infisical:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
|
||||
|
||||
gamma-deployment:
|
||||
name: Deploy to gamma
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-image]
|
||||
environment:
|
||||
name: Gamma
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-postgres-deployment:
|
||||
name: Deploy to production
|
||||
runs-on: ubuntu-latest
|
||||
needs: [gamma-deployment]
|
||||
environment:
|
||||
name: Production
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
120
.github/workflows/build-staging-and-deploy.yml
vendored
Normal file
120
.github/workflows/build-staging-and-deploy.yml
vendored
Normal file
@ -0,0 +1,120 @@
|
||||
name: Build, Publish and Deploy to Gamma
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
infisical-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
# - name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
load: true
|
||||
context: .
|
||||
file: Dockerfile.standalone-infisical
|
||||
tags: infisical/infisical:test
|
||||
# - name: ⏻ Spawn backend container and dependencies
|
||||
# run: |
|
||||
# docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
||||
# - name: 🧪 Test backend image
|
||||
# run: |
|
||||
# ./.github/resources/healthcheck.sh infisical-backend-test
|
||||
# - name: ⏻ Shut down backend container and dependencies
|
||||
# run: |
|
||||
# docker compose -f .github/resources/docker-compose.be-test.yml down
|
||||
- name: 🏗️ Build backend and push
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile.standalone-infisical
|
||||
tags: |
|
||||
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/staging_infisical:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
postgres-migration:
|
||||
name: Run latest migration files
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-image]
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
# - name: Run postgres DB migration files
|
||||
# env:
|
||||
# DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
# run: npm run migration:latest
|
||||
gamma-deployment:
|
||||
name: Deploy to gamma
|
||||
runs-on: ubuntu-latest
|
||||
needs: [postgres-migration]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install infisical helm chart
|
||||
run: |
|
||||
helm repo add infisical-helm-charts 'https://dl.cloudsmith.io/public/infisical/helm-charts/helm/charts/'
|
||||
helm repo update
|
||||
- name: Install kubectl
|
||||
uses: azure/setup-kubectl@v3
|
||||
- name: Install doctl
|
||||
uses: digitalocean/action-doctl@v2
|
||||
with:
|
||||
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
|
||||
- name: Save DigitalOcean kubeconfig with short-lived credentials
|
||||
run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 infisical-gamma-postgres
|
||||
- name: switch to gamma namespace
|
||||
run: kubectl config set-context --current --namespace=gamma
|
||||
- name: test kubectl
|
||||
run: kubectl get ingress
|
||||
- name: Download helm values to file and upgrade gamma deploy
|
||||
run: |
|
||||
wget https://raw.githubusercontent.com/Infisical/infisical/main/.github/values.yaml
|
||||
helm upgrade infisical infisical-helm-charts/infisical-standalone --values values.yaml --wait --install
|
||||
if [[ $(helm status infisical) == *"FAILED"* ]]; then
|
||||
echo "Helm upgrade failed"
|
||||
exit 1
|
||||
else
|
||||
echo "Helm upgrade was successful"
|
||||
fi
|
@ -5,7 +5,6 @@ on:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/src/server/routes/**"
|
||||
- "backend/src/ee/routes/**"
|
||||
|
||||
jobs:
|
||||
check-be-api-changes:
|
||||
@ -29,7 +28,7 @@ jobs:
|
||||
run: docker build --tag infisical-api .
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
|
||||
run: touch .env && docker-compose -f docker-compose.pg.yml up -d db redis
|
||||
- name: Start the server
|
||||
run: |
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
@ -40,38 +39,17 @@ jobs:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
JWT_AUTH_SECRET: something-random
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.21.5'
|
||||
- name: Wait for container to be stable and check logs
|
||||
run: |
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
if docker ps | grep infisical-api | grep -q healthy; then
|
||||
echo "Container is healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||
|
||||
docker logs infisical-api
|
||||
|
||||
sleep 2
|
||||
SECONDS=$((SECONDS+2))
|
||||
done
|
||||
|
||||
if [ $HEALTHY -ne 1 ]; then
|
||||
echo "Container did not become healthy in time"
|
||||
exit 1
|
||||
fi
|
||||
- name: Wait for containers to be stable
|
||||
run: timeout 60s sh -c 'until docker ps | grep infisical-api | grep -q healthy; do echo "Waiting for container to be healthy..."; sleep 2; done'
|
||||
- name: Install openapi-diff
|
||||
run: go install github.com/tufin/oasdiff@latest
|
||||
- name: Running OpenAPI Spec diff action
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker-compose -f "docker-compose.dev.yml" down
|
||||
docker-compose -f "docker-compose.pg.yml" down
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
||||
|
@ -1,4 +1,4 @@
|
||||
name: Check Frontend Type and Lint check
|
||||
name: Check Frontend Pull Request
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@ -10,8 +10,8 @@ on:
|
||||
- "frontend/.eslintrc.js"
|
||||
|
||||
jobs:
|
||||
check-fe-ts-lint:
|
||||
name: Check Frontend Type and Lint check
|
||||
check-fe-pr:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
@ -25,11 +25,12 @@ jobs:
|
||||
cache: "npm"
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
- name: 📦 Install dependencies
|
||||
run: npm install
|
||||
run: npm ci --only-production --ignore-scripts
|
||||
working-directory: frontend
|
||||
- name: 🏗️ Run Type check
|
||||
run: npm run type:check
|
||||
working-directory: frontend
|
||||
- name: 🏗️ Run Link check
|
||||
run: npm run lint:fix
|
||||
# -
|
||||
# name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: frontend
|
||||
- name: 🏗️ Run build
|
||||
run: npm run build
|
||||
working-directory: frontend
|
34
.github/workflows/generate-release-changelog.yml
vendored
34
.github/workflows/generate-release-changelog.yml
vendored
@ -1,34 +0,0 @@
|
||||
name: Generate Changelog
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
generate_changelog:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-tags: true
|
||||
fetch-depth: 0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12.0"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install requests openai
|
||||
- name: Generate Changelog and Post to Slack
|
||||
id: gen-changelog
|
||||
run: python .github/resources/changelog-generator.py
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
@ -5,14 +5,9 @@ on:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
|
121
.github/workflows/release_build_infisical_cli.yml
vendored
121
.github/workflows/release_build_infisical_cli.yml
vendored
@ -1,75 +1,58 @@
|
||||
name: Build and release CLI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
# packages: write
|
||||
# issues: write
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
contents: write
|
||||
# packages: write
|
||||
# issues: write
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt update && apt-cache policy libssl1.0-dev
|
||||
sudo apt-get install libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
jobs:
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt update && apt-cache policy libssl1.0-dev
|
||||
sudo apt-get install libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
47
.github/workflows/run-backend-tests.yml
vendored
47
.github/workflows/run-backend-tests.yml
vendored
@ -1,47 +0,0 @@
|
||||
name: "Run backend tests"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "!backend/README.md"
|
||||
- "!backend/.*"
|
||||
- "backend/.eslintrc.js"
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
check-be-pr:
|
||||
name: Run integration test
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
if: ${{ env.ACT }}
|
||||
name: Install `docker-compose` for local simulations
|
||||
with:
|
||||
version: "2.14.2"
|
||||
- name: 🔧 Setup Node 20
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: backend/package-lock.json
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start integration test
|
||||
run: npm run test:e2e
|
||||
working-directory: backend
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
AUTH_SECRET: something-random
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker-compose -f "docker-compose.dev.yml" down
|
55
.github/workflows/run-cli-tests.yml
vendored
55
.github/workflows/run-cli-tests.yml
vendored
@ -1,55 +0,0 @@
|
||||
name: Go CLI Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "cli/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
workflow_call:
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID:
|
||||
required: true
|
||||
CLI_TESTS_UA_CLIENT_SECRET:
|
||||
required: true
|
||||
CLI_TESTS_SERVICE_TOKEN:
|
||||
required: true
|
||||
CLI_TESTS_PROJECT_ID:
|
||||
required: true
|
||||
CLI_TESTS_ENV_SLUG:
|
||||
required: true
|
||||
CLI_TESTS_USER_EMAIL:
|
||||
required: true
|
||||
CLI_TESTS_USER_PASSWORD:
|
||||
required: true
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
|
||||
required: true
|
||||
jobs:
|
||||
test:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.21.x"
|
||||
- name: Install dependencies
|
||||
run: go get .
|
||||
- name: Test with the Go CLI
|
||||
env:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
run: go test -v -count=1 ./test
|
@ -1,59 +0,0 @@
|
||||
name: Rename Migrations
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
paths:
|
||||
- 'backend/src/db/migrations/**'
|
||||
|
||||
jobs:
|
||||
rename:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get list of newly added files in migration folder
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' | cut -f2 | xargs -n1 basename > added_files.txt
|
||||
if [ ! -s added_files.txt ]; then
|
||||
echo "No new files added. Skipping"
|
||||
echo "SKIP_RENAME=true" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Script to rename migrations
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
run: python .github/resources/rename_migration_files.py
|
||||
|
||||
- name: Commit and push changes
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
run: |
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git add ./backend/src/db/migrations
|
||||
rm added_files.txt
|
||||
git commit -m "chore: renamed new migration files to latest timestamp (gh-action)"
|
||||
|
||||
- name: Get PR details
|
||||
id: pr_details
|
||||
run: |
|
||||
PR_NUMBER=${{ github.event.pull_request.number }}
|
||||
PR_MERGER=$(curl -s "https://api.github.com/repos/${{ github.repository }}/pulls/$PR_NUMBER" | jq -r '.merged_by.login')
|
||||
|
||||
echo "PR Number: $PR_NUMBER"
|
||||
echo "PR Merger: $PR_MERGER"
|
||||
echo "pr_merger=$PR_MERGER" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create Pull Request
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: 'chore: renamed new migration files to latest UTC (gh-action)'
|
||||
title: 'GH Action: rename new migration file timestamp'
|
||||
branch-suffix: timestamp
|
||||
reviewers: ${{ steps.pr_details.outputs.pr_merger }}
|
6
.gitignore
vendored
6
.gitignore
vendored
@ -59,13 +59,7 @@ yarn-error.log*
|
||||
# Infisical init
|
||||
.infisical.json
|
||||
|
||||
.infisicalignore
|
||||
|
||||
# Editor specific
|
||||
.vscode/*
|
||||
|
||||
frontend-build
|
||||
|
||||
*.tgz
|
||||
cli/infisical-merge
|
||||
cli/test/infisical-merge
|
||||
|
@ -190,34 +190,10 @@ dockers:
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:latest-amd64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/amd64"
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/arm64"
|
||||
|
||||
docker_manifests:
|
||||
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- name_template: "infisical/cli:latest"
|
||||
image_templates:
|
||||
- "infisical/cli:latest-amd64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
- "infisical/cli:{{ .Version }}"
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}"
|
||||
- "infisical/cli:{{ .Major }}"
|
||||
- "infisical/cli:latest"
|
||||
|
@ -1,7 +1 @@
|
||||
.github/resources/docker-compose.be-test.yml:generic-api-key:16
|
||||
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/IdentityRbacSection.tsx:generic-api-key:206
|
||||
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:304
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/MemberRbacSection.tsx:generic-api-key:206
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
|
||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||
|
@ -2,6 +2,6 @@
|
||||
|
||||
Thanks for taking the time to contribute! 😃 🚀
|
||||
|
||||
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/getting-started/overview) for instructions on how to contribute.
|
||||
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/overview) for instructions on how to contribute.
|
||||
|
||||
We also have some 🔥amazing🔥 merch for our contributors. Please reach out to tony@infisical.com for more info 👀
|
||||
|
@ -1,7 +1,6 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-alpine AS base
|
||||
|
||||
@ -36,8 +35,6 @@ ARG INTERCOM_ID
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
@ -55,7 +52,6 @@ VOLUME /app/.next/cache/images
|
||||
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||
COPY --from=frontend-builder /app/public ./public
|
||||
RUN chown non-root-user:nodejs ./public/data
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||
|
||||
@ -94,18 +90,9 @@ RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chmod -R u+rwx /etc/ssl/certs
|
||||
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chown non-root-user /usr/sbin/update-ca-certificates
|
||||
RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||
|
||||
## set pre baked keys
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
@ -113,9 +100,6 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
@ -134,6 +118,9 @@ WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
||||
EXPOSE 8080
|
||||
EXPOSE 443
|
||||
|
||||
|
13
Makefile
13
Makefile
@ -5,13 +5,16 @@ push:
|
||||
docker-compose -f docker-compose.yml push
|
||||
|
||||
up-dev:
|
||||
docker compose -f docker-compose.dev.yml up --build
|
||||
docker-compose -f docker-compose.dev.yml up --build
|
||||
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
up-pg-dev:
|
||||
docker compose -f docker-compose.pg.yml up --build
|
||||
|
||||
i-dev:
|
||||
infisical run -- docker-compose -f docker-compose.dev.yml up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
docker-compose -f docker-compose.yml up --build
|
||||
|
||||
down:
|
||||
docker compose -f docker-compose.dev.yml down
|
||||
docker-compose down
|
||||
|
@ -10,8 +10,7 @@
|
||||
<a href="https://infisical.com/">Infisical Cloud</a> |
|
||||
<a href="https://infisical.com/docs/self-hosting/overview">Self-Hosting</a> |
|
||||
<a href="https://infisical.com/docs/documentation/getting-started/introduction">Docs</a> |
|
||||
<a href="https://www.infisical.com">Website</a> |
|
||||
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
|
||||
<a href="https://www.infisical.com">Website</a>
|
||||
</h4>
|
||||
|
||||
<p align="center">
|
||||
@ -76,7 +75,7 @@ Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/int
|
||||
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2"><img src=".github/images/deploy-to-aws.png" width="150" width="300" /></a> <a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean"> <img width="217" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/> </a> <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
|
||||
### Run Infisical locally
|
||||
|
||||
@ -85,13 +84,13 @@ To set up and run Infisical locally, make sure you have Git and Docker installed
|
||||
Linux/macOS:
|
||||
|
||||
```console
|
||||
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker compose -f docker-compose.prod.yml up
|
||||
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.yml up
|
||||
```
|
||||
|
||||
Windows Command Prompt:
|
||||
|
||||
```console
|
||||
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker compose -f docker-compose.prod.yml up
|
||||
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.yml up
|
||||
```
|
||||
|
||||
Create an account at `http://localhost:80`
|
||||
|
@ -1,3 +1,2 @@
|
||||
vitest-environment-infisical.ts
|
||||
vitest.config.ts
|
||||
vitest.e2e.config.ts
|
||||
|
@ -21,19 +21,6 @@ module.exports = {
|
||||
tsconfigRootDir: __dirname
|
||||
},
|
||||
root: true,
|
||||
overrides: [
|
||||
{
|
||||
files: ["./e2e-test/**/*", "./src/db/migrations/**/*"],
|
||||
rules: {
|
||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||
"@typescript-eslint/no-unsafe-assignment": "off",
|
||||
"@typescript-eslint/no-unsafe-argument": "off",
|
||||
"@typescript-eslint/no-unsafe-return": "off",
|
||||
"@typescript-eslint/no-unsafe-call": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
rules: {
|
||||
"@typescript-eslint/no-empty-function": "off",
|
||||
"@typescript-eslint/no-unsafe-enum-comparison": "off",
|
||||
|
@ -1,37 +0,0 @@
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Lock } from "@app/lib/red-lock";
|
||||
|
||||
export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
const store: Record<string, string | number | Buffer> = {};
|
||||
|
||||
return {
|
||||
setItem: async (key, value) => {
|
||||
store[key] = value;
|
||||
return "OK";
|
||||
},
|
||||
setItemWithExpiry: async (key, value) => {
|
||||
store[key] = value;
|
||||
return "OK";
|
||||
},
|
||||
deleteItem: async (key) => {
|
||||
delete store[key];
|
||||
return 1;
|
||||
},
|
||||
getItem: async (key) => {
|
||||
const value = store[key];
|
||||
if (typeof value === "string") {
|
||||
return value;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
incrementBy: async () => {
|
||||
return 1;
|
||||
},
|
||||
acquireLock: () => {
|
||||
return Promise.resolve({
|
||||
release: () => {}
|
||||
}) as Promise<Lock>;
|
||||
},
|
||||
waitTillReady: async () => {}
|
||||
};
|
||||
};
|
@ -1,71 +0,0 @@
|
||||
import { OrgMembershipRole } from "@app/db/schemas";
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
export const createIdentity = async (name: string, role: string) => {
|
||||
const createIdentityRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v1/identities",
|
||||
body: {
|
||||
name,
|
||||
role,
|
||||
organizationId: seedData1.organization.id
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(createIdentityRes.statusCode).toBe(200);
|
||||
return createIdentityRes.json().identity;
|
||||
};
|
||||
|
||||
export const deleteIdentity = async (id: string) => {
|
||||
const deleteIdentityRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/identities/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(deleteIdentityRes.statusCode).toBe(200);
|
||||
return deleteIdentityRes.json().identity;
|
||||
};
|
||||
|
||||
describe("Identity v1", async () => {
|
||||
test("Create identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethod).toBeNull();
|
||||
|
||||
await deleteIdentity(newIdentity.id);
|
||||
});
|
||||
|
||||
test("Update identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethod).toBeNull();
|
||||
|
||||
const updatedIdentity = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/identities/${newIdentity.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name: "updated-mac-1",
|
||||
role: OrgMembershipRole.Member
|
||||
}
|
||||
});
|
||||
|
||||
expect(updatedIdentity.statusCode).toBe(200);
|
||||
expect(updatedIdentity.json().identity.name).toBe("updated-mac-1");
|
||||
|
||||
await deleteIdentity(newIdentity.id);
|
||||
});
|
||||
|
||||
test("Delete Identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
|
||||
const deletedIdentity = await deleteIdentity(newIdentity.id);
|
||||
expect(deletedIdentity.name).toBe("mac1");
|
||||
});
|
||||
});
|
@ -1,6 +1,5 @@
|
||||
import jsrp from "jsrp";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import jsrp from "jsrp";
|
||||
|
||||
describe("Login V1 Router", async () => {
|
||||
// eslint-disable-next-line
|
||||
|
@ -1,40 +1,6 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { DEFAULT_PROJECT_ENVS } from "@app/db/seeds/3-project";
|
||||
|
||||
const createProjectEnvironment = async (name: string, slug: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name,
|
||||
slug
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
return payload.environment;
|
||||
};
|
||||
|
||||
const deleteProjectEnvironment = async (envId: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments/${envId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
return payload.environment;
|
||||
};
|
||||
|
||||
describe("Project Environment Router", async () => {
|
||||
test("Get default environments", async () => {
|
||||
const res = await testServer.inject({
|
||||
@ -65,10 +31,24 @@ describe("Project Environment Router", async () => {
|
||||
expect(payload.workspace.environments.length).toBe(3);
|
||||
});
|
||||
|
||||
const mockProjectEnv = { name: "temp", slug: "temp" }; // id will be filled in create op
|
||||
const mockProjectEnv = { name: "temp", slug: "temp", id: "" }; // id will be filled in create op
|
||||
test("Create environment", async () => {
|
||||
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
|
||||
expect(newEnvironment).toEqual(
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name: mockProjectEnv.name,
|
||||
slug: mockProjectEnv.slug
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
expect(payload.environment).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
name: mockProjectEnv.name,
|
||||
@ -79,15 +59,14 @@ describe("Project Environment Router", async () => {
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
);
|
||||
await deleteProjectEnvironment(newEnvironment.id);
|
||||
mockProjectEnv.id = payload.environment.id;
|
||||
});
|
||||
|
||||
test("Update environment", async () => {
|
||||
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
|
||||
const updatedName = { name: "temp#2", slug: "temp2" };
|
||||
const res = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments/${newEnvironment.id}`,
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments/${mockProjectEnv.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
@ -103,7 +82,7 @@ describe("Project Environment Router", async () => {
|
||||
expect(payload).toHaveProperty("environment");
|
||||
expect(payload.environment).toEqual(
|
||||
expect.objectContaining({
|
||||
id: newEnvironment.id,
|
||||
id: expect.any(String),
|
||||
name: updatedName.name,
|
||||
slug: updatedName.slug,
|
||||
projectId: seedData1.project.id,
|
||||
@ -112,21 +91,61 @@ describe("Project Environment Router", async () => {
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
);
|
||||
await deleteProjectEnvironment(newEnvironment.id);
|
||||
mockProjectEnv.name = updatedName.name;
|
||||
mockProjectEnv.slug = updatedName.slug;
|
||||
});
|
||||
|
||||
test("Delete environment", async () => {
|
||||
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
|
||||
const deletedProjectEnvironment = await deleteProjectEnvironment(newEnvironment.id);
|
||||
expect(deletedProjectEnvironment).toEqual(
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/environments/${mockProjectEnv.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("environment");
|
||||
expect(payload.environment).toEqual(
|
||||
expect.objectContaining({
|
||||
id: deletedProjectEnvironment.id,
|
||||
id: expect.any(String),
|
||||
name: mockProjectEnv.name,
|
||||
slug: mockProjectEnv.slug,
|
||||
position: 4,
|
||||
position: 1,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
// after all these opreations the list of environment should be still same
|
||||
test("Default list of environment", async () => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("workspace");
|
||||
// check for default environments
|
||||
expect(payload).toEqual({
|
||||
workspace: expect.objectContaining({
|
||||
name: seedData1.project.name,
|
||||
id: seedData1.project.id,
|
||||
slug: seedData1.project.slug,
|
||||
environments: expect.arrayContaining([
|
||||
expect.objectContaining(DEFAULT_PROJECT_ENVS[0]),
|
||||
expect.objectContaining(DEFAULT_PROJECT_ENVS[1]),
|
||||
expect.objectContaining(DEFAULT_PROJECT_ENVS[2])
|
||||
])
|
||||
})
|
||||
});
|
||||
// ensure only two default environments exist
|
||||
expect(payload.workspace.environments.length).toBe(3);
|
||||
});
|
||||
});
|
||||
|
@ -1,40 +1,5 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createFolder = async (dto: { path: string; name: string }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
path: dto.path
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder;
|
||||
};
|
||||
|
||||
const deleteFolder = async (dto: { path: string; id: string }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: dto.path
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder;
|
||||
};
|
||||
|
||||
describe("Secret Folder Router", async () => {
|
||||
test.each([
|
||||
{ name: "folder1", path: "/" }, // one in root
|
||||
@ -42,15 +7,30 @@ describe("Secret Folder Router", async () => {
|
||||
{ name: "folder2", path: "/" },
|
||||
{ name: "folder1", path: "/level1/level2" } // this should not create folder return same thing
|
||||
])("Create folder $name in $path", async ({ name, path }) => {
|
||||
const createdFolder = await createFolder({ path, name });
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name,
|
||||
path
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("folder");
|
||||
// check for default environments
|
||||
expect(createdFolder).toEqual(
|
||||
expect.objectContaining({
|
||||
expect(payload).toEqual({
|
||||
folder: expect.objectContaining({
|
||||
name,
|
||||
id: expect.any(String)
|
||||
})
|
||||
);
|
||||
await deleteFolder({ path, id: createdFolder.id });
|
||||
});
|
||||
});
|
||||
|
||||
test.each([
|
||||
@ -63,8 +43,6 @@ describe("Secret Folder Router", async () => {
|
||||
},
|
||||
{ path: "/level1/level2", expected: { folders: [{ name: "folder1" }], length: 1 } }
|
||||
])("Get folders $path", async ({ path, expected }) => {
|
||||
const newFolders = await Promise.all(expected.folders.map(({ name }) => createFolder({ name, path })));
|
||||
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
@ -81,22 +59,36 @@ describe("Secret Folder Router", async () => {
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("folders");
|
||||
expect(payload.folders.length >= expected.folders.length).toBeTruthy();
|
||||
expect(payload).toEqual({
|
||||
folders: expect.arrayContaining(expected.folders.map((el) => expect.objectContaining(el)))
|
||||
});
|
||||
|
||||
await Promise.all(newFolders.map(({ id }) => deleteFolder({ path, id })));
|
||||
expect(payload.folders.length).toBe(expected.length);
|
||||
expect(payload).toEqual({ folders: expected.folders.map((el) => expect.objectContaining(el)) });
|
||||
});
|
||||
|
||||
let toBeDeleteFolderId = "";
|
||||
test("Update a deep folder", async () => {
|
||||
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
|
||||
expect(newFolder).toEqual(
|
||||
const res = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/folders/folder1`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: "folder-updated",
|
||||
path: "/level1/level2"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("folder");
|
||||
expect(payload.folder).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
name: "folder-updated"
|
||||
})
|
||||
);
|
||||
toBeDeleteFolderId = payload.folder.id;
|
||||
|
||||
const resUpdatedFolders = await testServer.inject({
|
||||
method: "GET",
|
||||
@ -114,16 +106,14 @@ describe("Secret Folder Router", async () => {
|
||||
expect(resUpdatedFolders.statusCode).toBe(200);
|
||||
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
|
||||
expect(updatedFolderList).toHaveProperty("folders");
|
||||
expect(updatedFolderList.folders.length).toEqual(1);
|
||||
expect(updatedFolderList.folders[0].name).toEqual("folder-updated");
|
||||
|
||||
await deleteFolder({ path: "/level1/level2", id: newFolder.id });
|
||||
});
|
||||
|
||||
test("Delete a deep folder", async () => {
|
||||
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${newFolder.id}`,
|
||||
url: `/api/v1/folders/${toBeDeleteFolderId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
|
@ -1,57 +1,32 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createSecretImport = async (importPath: string, importEnv: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
environment: importEnv,
|
||||
path: importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
const deleteSecretImport = async (id: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
describe("Secret Import Router", async () => {
|
||||
describe("Secret Folder Router", async () => {
|
||||
test.each([
|
||||
{ importEnv: "prod", importPath: "/" }, // one in root
|
||||
{ importEnv: "dev", importPath: "/" }, // one in root
|
||||
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
||||
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
environment: importEnv,
|
||||
path: importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
// check for default environments
|
||||
const payload = await createSecretImport(importPath, importEnv);
|
||||
expect(payload).toEqual(
|
||||
expect(payload.secretImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
@ -62,12 +37,10 @@ describe("Secret Import Router", async () => {
|
||||
})
|
||||
})
|
||||
);
|
||||
await deleteSecretImport(payload.id);
|
||||
});
|
||||
|
||||
let testSecretImportId = "";
|
||||
test("Get secret imports", async () => {
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
@ -85,6 +58,7 @@ describe("Secret Import Router", async () => {
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
expect(payload.secretImports.length).toBe(2);
|
||||
testSecretImportId = payload.secretImports[0].id;
|
||||
expect(payload.secretImports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
@ -98,20 +72,12 @@ describe("Secret Import Router", async () => {
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
|
||||
test("Update secret import position", async () => {
|
||||
const prodImportDetails = { path: "/", envSlug: "prod" };
|
||||
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
||||
|
||||
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
|
||||
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
|
||||
|
||||
const updateImportRes = await testServer.inject({
|
||||
const res = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v1/secret-imports/${createdImport1.id}`,
|
||||
url: `/api/v1/secret-imports/${testSecretImportId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
@ -125,8 +91,8 @@ describe("Secret Import Router", async () => {
|
||||
}
|
||||
});
|
||||
|
||||
expect(updateImportRes.statusCode).toBe(200);
|
||||
const payload = JSON.parse(updateImportRes.payload);
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
// check for default environments
|
||||
expect(payload.secretImport).toEqual(
|
||||
@ -136,7 +102,7 @@ describe("Secret Import Router", async () => {
|
||||
position: 2,
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.stringMatching(prodImportDetails.envSlug),
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
@ -158,19 +124,28 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportsListRes.statusCode).toBe(200);
|
||||
const secretImportList = JSON.parse(secretImportsListRes.payload);
|
||||
expect(secretImportList).toHaveProperty("secretImports");
|
||||
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
||||
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
||||
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
expect(secretImportList.secretImports[1].id).toEqual(testSecretImportId);
|
||||
});
|
||||
|
||||
test("Delete secret import position", async () => {
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const deletedImport = await deleteSecretImport(createdImport1.id);
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${testSecretImportId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
// check for default environments
|
||||
expect(deletedImport).toEqual(
|
||||
expect(payload.secretImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
@ -200,7 +175,5 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList).toHaveProperty("secretImports");
|
||||
expect(secretImportList.secretImports.length).toEqual(1);
|
||||
expect(secretImportList.secretImports[0].position).toEqual(1);
|
||||
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
});
|
||||
|
@ -1,579 +0,0 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
import { SecretType, TSecrets } from "@app/db/schemas";
|
||||
import { decryptSecret, encryptSecret, getUserPrivateKey, seedData1 } from "@app/db/seed-data";
|
||||
import { decryptAsymmetric, decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
|
||||
const createServiceToken = async (
|
||||
scopes: { environment: string; secretPath: string }[],
|
||||
permissions: ("read" | "write")[]
|
||||
) => {
|
||||
const projectKeyRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v2/workspace/${seedData1.project.id}/encrypted-key`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
const projectKeyEnc = JSON.parse(projectKeyRes.payload);
|
||||
|
||||
const userInfoRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v2/users/me",
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
const { user: userInfo } = JSON.parse(userInfoRes.payload);
|
||||
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
|
||||
const projectKey = decryptAsymmetric({
|
||||
ciphertext: projectKeyEnc.encryptedKey,
|
||||
nonce: projectKeyEnc.nonce,
|
||||
publicKey: projectKeyEnc.sender.publicKey,
|
||||
privateKey
|
||||
});
|
||||
|
||||
const randomBytes = crypto.randomBytes(16).toString("hex");
|
||||
const { ciphertext, iv, tag } = encryptSymmetric128BitHexKeyUTF8(projectKey, randomBytes);
|
||||
const serviceTokenRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v2/service-token",
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
name: "test-token",
|
||||
workspaceId: seedData1.project.id,
|
||||
scopes,
|
||||
encryptedKey: ciphertext,
|
||||
iv,
|
||||
tag,
|
||||
permissions,
|
||||
expiresIn: null
|
||||
}
|
||||
});
|
||||
expect(serviceTokenRes.statusCode).toBe(200);
|
||||
const serviceTokenInfo = serviceTokenRes.json();
|
||||
expect(serviceTokenInfo).toHaveProperty("serviceToken");
|
||||
expect(serviceTokenInfo).toHaveProperty("serviceTokenData");
|
||||
return `${serviceTokenInfo.serviceToken}.${randomBytes}`;
|
||||
};
|
||||
|
||||
const deleteServiceToken = async () => {
|
||||
const serviceTokenListRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/workspace/${seedData1.project.id}/service-token-data`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(serviceTokenListRes.statusCode).toBe(200);
|
||||
const serviceTokens = JSON.parse(serviceTokenListRes.payload).serviceTokenData as { name: string; id: string }[];
|
||||
expect(serviceTokens.length).toBeGreaterThan(0);
|
||||
const serviceTokenInfo = serviceTokens.find(({ name }) => name === "test-token");
|
||||
expect(serviceTokenInfo).toBeDefined();
|
||||
|
||||
const deleteTokenRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v2/service-token/${serviceTokenInfo?.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
}
|
||||
});
|
||||
expect(deleteTokenRes.statusCode).toBe(200);
|
||||
};
|
||||
|
||||
const createSecret = async (dto: {
|
||||
projectKey: string;
|
||||
path: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment: string;
|
||||
type?: SecretType;
|
||||
token: string;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.path,
|
||||
...encryptSecret(dto.projectKey, dto.key, dto.value, dto.comment)
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.token}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret;
|
||||
};
|
||||
|
||||
const deleteSecret = async (dto: { path: string; key: string; token: string }) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.token}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: dto.path
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret;
|
||||
};
|
||||
|
||||
describe("Service token secret ops", async () => {
|
||||
let serviceToken = "";
|
||||
let projectKey = "";
|
||||
let folderId = "";
|
||||
beforeAll(async () => {
|
||||
serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/**", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
);
|
||||
|
||||
// this is ensure cli service token decryptiong working fine
|
||||
const serviceTokenInfoRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v2/service-token",
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(serviceTokenInfoRes.statusCode).toBe(200);
|
||||
const serviceTokenInfo = serviceTokenInfoRes.json();
|
||||
const serviceTokenParts = serviceToken.split(".");
|
||||
projectKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
key: serviceTokenParts[3],
|
||||
tag: serviceTokenInfo.tag,
|
||||
ciphertext: serviceTokenInfo.encryptedKey,
|
||||
iv: serviceTokenInfo.iv
|
||||
});
|
||||
|
||||
// create a deep folder
|
||||
const folderCreate = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: "folder",
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(folderCreate.statusCode).toBe(200);
|
||||
folderId = folderCreate.json().folder.id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await deleteServiceToken();
|
||||
|
||||
// create a deep folder
|
||||
const deleteFolder = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${folderId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(deleteFolder.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
const testSecrets = [
|
||||
{
|
||||
path: "/",
|
||||
secret: {
|
||||
key: "ST-SEC",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "NESTED-ST-SEC",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
const getSecrets = async (environment: string, secretPath = "/") => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath,
|
||||
environment,
|
||||
workspaceId: seedData1.project.id
|
||||
}
|
||||
});
|
||||
const secrets: TSecrets[] = JSON.parse(res.payload).secrets || [];
|
||||
return secrets.map((el) => ({ ...decryptSecret(projectKey, el), type: el.type }));
|
||||
};
|
||||
|
||||
test.each(testSecrets)("Create secret in path $path", async ({ secret, path }) => {
|
||||
const createdSecret = await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
const decryptedSecret = decryptSecret(projectKey, createdSecret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
expect(decryptedSecret.value).toEqual(secret.value);
|
||||
expect(decryptedSecret.comment).toEqual(secret.comment);
|
||||
expect(decryptedSecret.version).toEqual(1);
|
||||
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
value: secret.value,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Get secret by name in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
|
||||
const getSecByNameRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath: path,
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug
|
||||
}
|
||||
});
|
||||
expect(getSecByNameRes.statusCode).toBe(200);
|
||||
const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload);
|
||||
expect(getSecretByNamePayload).toHaveProperty("secret");
|
||||
const decryptedSecret = decryptSecret(projectKey, getSecretByNamePayload.secret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
expect(decryptedSecret.value).toEqual(secret.value);
|
||||
expect(decryptedSecret.comment).toEqual(secret.comment);
|
||||
|
||||
await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Update secret in path $path", async ({ path, secret }) => {
|
||||
await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
const updateSecretReqBody = {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Shared,
|
||||
secretPath: path,
|
||||
...encryptSecret(projectKey, secret.key, "new-value", secret.comment)
|
||||
};
|
||||
const updateSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: updateSecretReqBody
|
||||
});
|
||||
expect(updateSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
const decryptedSecret = decryptSecret(projectKey, updatedSecretPayload.secret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
expect(decryptedSecret.value).toEqual("new-value");
|
||||
expect(decryptedSecret.comment).toEqual(secret.comment);
|
||||
|
||||
// list secret should have updated value
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
value: "new-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Delete secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ projectKey, path, ...secret, token: serviceToken });
|
||||
const deletedSecret = await deleteSecret({ path, key: secret.key, token: serviceToken });
|
||||
const decryptedSecret = decryptSecret(projectKey, deletedSecret);
|
||||
expect(decryptedSecret.key).toEqual(secret.key);
|
||||
|
||||
// shared secret deletion should delete personal ones also
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk create secrets in path $path", async ({ secret, path }) => {
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`,
|
||||
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(200);
|
||||
const createSharedSecPayload = JSON.parse(createSharedSecRes.payload);
|
||||
expect(createSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
key: `BULK-${secret.key}-${i + 1}`,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
deleteSecret({ path, token: serviceToken, key: `BULK-${secret.key}-${i + 1}` })
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ projectKey, ...secret, key: `BULK-${secret.key}-1`, path, token: serviceToken });
|
||||
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`,
|
||||
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(400);
|
||||
|
||||
await deleteSecret({ path, key: `BULK-${secret.key}-1`, token: serviceToken });
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk update secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
|
||||
)
|
||||
);
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`,
|
||||
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, "update-value", secret.comment)
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
key: `BULK-${secret.key}-${i + 1}`,
|
||||
value: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}`, token: serviceToken })
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
test.each(testSecrets)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
|
||||
)
|
||||
);
|
||||
|
||||
const deletedSharedSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/batch`,
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretName: `BULK-${secret.key}-${i + 1}`
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
expect(deletedSharedSecRes.statusCode).toBe(200);
|
||||
const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload);
|
||||
expect(deletedSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
key: `BULK-${secret.value}-${i + 1}`,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Service token fail cases", async () => {
|
||||
test("Unauthorized secret path access", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
);
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v3/secrets",
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: "/nested/deep"
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(401);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
|
||||
test("Unauthorized secret environment access", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
);
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v3/secrets",
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: "prod",
|
||||
secretPath: "/"
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(401);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
|
||||
test("Unauthorized write operation", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read"]
|
||||
);
|
||||
const writeSecrets = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/NEW`,
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Shared,
|
||||
secretPath: "/",
|
||||
// doesn't matter project key because this will fail before that due to read only access
|
||||
...encryptSecret(crypto.randomBytes(16).toString("hex"), "NEW", "value", "")
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(writeSecrets.statusCode).toBe(401);
|
||||
expect(writeSecrets.json().error).toBe("PermissionDenied");
|
||||
|
||||
// but read access should still work fine
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
url: "/api/v3/secrets",
|
||||
query: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: "/"
|
||||
},
|
||||
headers: {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(200);
|
||||
await deleteServiceToken();
|
||||
});
|
||||
});
|
File diff suppressed because it is too large
Load Diff
@ -1,31 +1,26 @@
|
||||
// eslint-disable-next-line
|
||||
import "ts-node/register";
|
||||
|
||||
// import { main } from "@app/server/app";
|
||||
import { initEnvConfig } from "@app/lib/config/env";
|
||||
import dotenv from "dotenv";
|
||||
import jwt from "jsonwebtoken";
|
||||
import knex from "knex";
|
||||
import path from "path";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { initEnvConfig } from "@app/lib/config/env";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { main } from "@app/server/app";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { mockKeyStore } from "./mocks/keystore";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
import "ts-node/register";
|
||||
import { main } from "@app/server/app";
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../.env.test") });
|
||||
export default {
|
||||
name: "knex-env",
|
||||
transformMode: "ssr",
|
||||
async setup() {
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const db = knex({
|
||||
client: "pg",
|
||||
connection: cfg.DB_CONNECTION_URI,
|
||||
connection: process.env.DB_CONNECTION_URI,
|
||||
migrations: {
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
@ -42,8 +37,9 @@ export default {
|
||||
await db.seed.run();
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = mockQueue();
|
||||
const keyStore = mockKeyStore();
|
||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const server = await main({ db, smtp, logger, queue });
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
// @ts-expect-error type
|
||||
@ -52,15 +48,12 @@ export default {
|
||||
authTokenType: AuthTokenType.ACCESS_TOKEN,
|
||||
userId: seedData1.id,
|
||||
tokenVersionId: seedData1.token.id,
|
||||
authMethod: AuthMethod.EMAIL,
|
||||
organizationId: seedData1.organization.id,
|
||||
accessVersion: 1
|
||||
},
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
console.log("[TEST] Error setting up environment", error);
|
||||
await db.destroy();
|
||||
throw error;
|
||||
}
|
||||
|
4643
backend/package-lock.json
generated
4643
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -24,8 +24,8 @@
|
||||
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
"seed:run": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest && npm run seed:run"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
@ -67,73 +67,62 @@
|
||||
"tsx": "^4.4.0",
|
||||
"typescript": "^5.3.2",
|
||||
"vite-tsconfig-paths": "^4.2.2",
|
||||
"vitest": "^1.2.2"
|
||||
"vitest": "^1.0.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-iam": "^3.525.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.485.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
"@fastify/cookie": "^9.2.0",
|
||||
"@fastify/cors": "^8.4.1",
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@fastify/swagger": "^8.12.0",
|
||||
"@fastify/swagger-ui": "^1.10.1",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.10.0",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "^2.2.1",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
"axios": "^1.6.7",
|
||||
"aws-sdk": "^2.1532.0",
|
||||
"axios": "^1.6.2",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
"bullmq": "^5.1.1",
|
||||
"dotenv": "^16.3.1",
|
||||
"fastify": "^4.24.3",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
"google-auth-library": "^9.9.0",
|
||||
"googleapis": "^137.1.0",
|
||||
"handlebars": "^4.7.8",
|
||||
"ioredis": "^5.3.2",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jsrp": "^0.2.4",
|
||||
"knex": "^3.0.1",
|
||||
"ldapjs": "^3.0.7",
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"mysql2": "^3.6.5",
|
||||
"nanoid": "^5.0.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"node-cache": "^5.1.2",
|
||||
"nodemailer": "^6.9.7",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"posthog-node": "^3.6.0",
|
||||
"probot": "^12.3.3",
|
||||
"smee-client": "^2.0.0",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"uuid": "^9.0.1",
|
||||
"zod": "^3.22.4",
|
||||
"zod-to-json-schema": "^3.22.4"
|
||||
"zod-to-json-schema": "^3.22.0"
|
||||
}
|
||||
}
|
||||
}
|
@ -103,15 +103,11 @@ export const ${dalName} = (db: TDbClient) => {
|
||||
`import { z } from "zod";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
|
||||
export const register${pascalCase}Router = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
method: "GET",
|
||||
schema: {
|
||||
params: z.object({}),
|
||||
response: {
|
||||
|
@ -7,10 +7,10 @@ const prompt = promptSync({ sigint: true });
|
||||
|
||||
const migrationName = prompt("Enter name for migration: ");
|
||||
|
||||
// Remove spaces from migration name and replace with hyphens
|
||||
const formattedMigrationName = migrationName.replace(/\s+/g, "-");
|
||||
|
||||
execSync(
|
||||
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,
|
||||
`npx knex migrate:make --knexfile ${path.join(
|
||||
__dirname,
|
||||
"../src/db/knexfile.ts"
|
||||
)} -x ts ${migrationName}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
|
@ -7,10 +7,11 @@ import promptSync from "prompt-sync";
|
||||
const prompt = promptSync({ sigint: true });
|
||||
|
||||
const migrationName = prompt("Enter name for seedfile: ");
|
||||
const fileCounter = readdirSync(path.join(__dirname, "../src/db/seeds")).length || 1;
|
||||
const fileCounter = readdirSync(path.join(__dirname, "../src/db/seed")).length || 1;
|
||||
execSync(
|
||||
`npx knex seed:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${
|
||||
fileCounter + 1
|
||||
}-${migrationName}`,
|
||||
`npx knex seed:make --knexfile ${path.join(
|
||||
__dirname,
|
||||
"../src/db/knexfile.ts"
|
||||
)} -x ts ${fileCounter}-${migrationName}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
|
@ -35,8 +35,6 @@ const getZodPrimitiveType = (type: string) => {
|
||||
return "z.coerce.number()";
|
||||
case "text":
|
||||
return "z.string()";
|
||||
case "bytea":
|
||||
return "zodBuffer";
|
||||
default:
|
||||
throw new Error(`Invalid type: ${type}`);
|
||||
}
|
||||
@ -46,7 +44,7 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
|
||||
if (!value || value === "null") return;
|
||||
switch (type) {
|
||||
case "uuid":
|
||||
return `.default("00000000-0000-0000-0000-000000000000")`;
|
||||
return;
|
||||
case "character varying": {
|
||||
if (value === "gen_random_uuid()") return;
|
||||
if (typeof value === "string" && value.includes("::")) {
|
||||
@ -98,17 +96,11 @@ const main = async () => {
|
||||
const columnNames = Object.keys(columns);
|
||||
|
||||
let schema = "";
|
||||
const zodImportSet = new Set<string>();
|
||||
for (let colNum = 0; colNum < columnNames.length; colNum++) {
|
||||
const columnName = columnNames[colNum];
|
||||
const colInfo = columns[columnName];
|
||||
let ztype = getZodPrimitiveType(colInfo.type);
|
||||
if (["zodBuffer"].includes(ztype)) {
|
||||
zodImportSet.add(ztype);
|
||||
}
|
||||
|
||||
// don't put optional on id
|
||||
if (colInfo.defaultValue && columnName !== "id") {
|
||||
if (colInfo.defaultValue) {
|
||||
const { defaultValue } = colInfo;
|
||||
const zSchema = getZodDefaultValue(colInfo.type, defaultValue);
|
||||
if (zSchema) {
|
||||
@ -128,9 +120,6 @@ const main = async () => {
|
||||
.split("_")
|
||||
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
|
||||
|
||||
const zodImports = Array.from(zodImportSet);
|
||||
|
||||
// the insert and update are changed to zod input type to use default cases
|
||||
writeFileSync(
|
||||
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
|
||||
`// Code generated by automation script, DO NOT EDIT.
|
||||
@ -140,15 +129,13 @@ const main = async () => {
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
${zodImports.length ? `import { ${zodImports.join(",")} } from \"@app/lib/zod\";` : ""}
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ${pascalCase}Schema = z.object({${schema}});
|
||||
|
||||
export type T${pascalCase} = z.infer<typeof ${pascalCase}Schema>;
|
||||
export type T${pascalCase}Insert = Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>;
|
||||
export type T${pascalCase}Update = Partial<Omit<z.input<typeof ${pascalCase}Schema>, TImmutableDBKeys>>;
|
||||
export type T${pascalCase}Insert = Omit<T${pascalCase}, TImmutableDBKeys>;
|
||||
export type T${pascalCase}Update = Partial<Omit<T${pascalCase}, TImmutableDBKeys>>;
|
||||
`
|
||||
);
|
||||
}
|
||||
|
46
backend/src/@types/fastify.d.ts
vendored
46
backend/src/@types/fastify.d.ts
vendored
@ -1,22 +1,11 @@
|
||||
import "fastify";
|
||||
|
||||
import { TUsers } from "@app/db/schemas";
|
||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
@ -28,17 +17,10 @@ import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
|
||||
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
|
||||
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
|
||||
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
|
||||
@ -55,8 +37,6 @@ import { TSecretServiceFactory } from "@app/services/secret/secret-service";
|
||||
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
|
||||
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
|
||||
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
|
||||
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
|
||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
@ -77,10 +57,9 @@ declare module "fastify" {
|
||||
// identity injection. depending on which kinda of token the information is filled in auth
|
||||
auth: TAuthMode;
|
||||
permission: {
|
||||
authMethod: ActorAuthMethod;
|
||||
type: ActorType;
|
||||
id: string;
|
||||
orgId: string;
|
||||
orgId?: string;
|
||||
};
|
||||
// passport data
|
||||
passportUser: {
|
||||
@ -89,7 +68,6 @@ declare module "fastify" {
|
||||
};
|
||||
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
||||
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
||||
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
|
||||
}
|
||||
|
||||
interface FastifyInstance {
|
||||
@ -103,8 +81,6 @@ declare module "fastify" {
|
||||
orgRole: TOrgRoleServiceFactory;
|
||||
superAdmin: TSuperAdminServiceFactory;
|
||||
user: TUserServiceFactory;
|
||||
group: TGroupServiceFactory;
|
||||
groupProject: TGroupProjectServiceFactory;
|
||||
apiKey: TApiKeyServiceFactory;
|
||||
project: TProjectServiceFactory;
|
||||
projectMembership: TProjectMembershipServiceFactory;
|
||||
@ -112,7 +88,6 @@ declare module "fastify" {
|
||||
projectKey: TProjectKeyServiceFactory;
|
||||
projectRole: TProjectRoleServiceFactory;
|
||||
secret: TSecretServiceFactory;
|
||||
secretReplication: TSecretReplicationServiceFactory;
|
||||
secretTag: TSecretTagServiceFactory;
|
||||
secretImport: TSecretImportServiceFactory;
|
||||
projectBot: TProjectBotServiceFactory;
|
||||
@ -125,34 +100,17 @@ declare module "fastify" {
|
||||
identityAccessToken: TIdentityAccessTokenServiceFactory;
|
||||
identityProject: TIdentityProjectServiceFactory;
|
||||
identityUa: TIdentityUaServiceFactory;
|
||||
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
|
||||
identityGcpAuth: TIdentityGcpAuthServiceFactory;
|
||||
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
||||
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
||||
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
|
||||
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
|
||||
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
|
||||
secretApprovalRequest: TSecretApprovalRequestServiceFactory;
|
||||
secretRotation: TSecretRotationServiceFactory;
|
||||
snapshot: TSecretSnapshotServiceFactory;
|
||||
saml: TSamlConfigServiceFactory;
|
||||
scim: TScimServiceFactory;
|
||||
ldap: TLdapConfigServiceFactory;
|
||||
auditLog: TAuditLogServiceFactory;
|
||||
auditLogStream: TAuditLogStreamServiceFactory;
|
||||
certificate: TCertificateServiceFactory;
|
||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||
secretScanning: TSecretScanningServiceFactory;
|
||||
license: TLicenseServiceFactory;
|
||||
trustedIp: TTrustedIpServiceFactory;
|
||||
secretBlindIndex: TSecretBlindIndexServiceFactory;
|
||||
telemetry: TTelemetryServiceFactory;
|
||||
dynamicSecret: TDynamicSecretServiceFactory;
|
||||
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
|
||||
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
|
||||
secretSharing: TSecretSharingServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
246
backend/src/@types/knex.d.ts
vendored
246
backend/src/@types/knex.d.ts
vendored
@ -2,26 +2,11 @@ import { Knex } from "knex";
|
||||
|
||||
import {
|
||||
TableName,
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate,
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate,
|
||||
TAccessApprovalRequestsUpdate,
|
||||
TApiKeys,
|
||||
TApiKeysInsert,
|
||||
TApiKeysUpdate,
|
||||
TAuditLogs,
|
||||
TAuditLogsInsert,
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate,
|
||||
TAuditLogsUpdate,
|
||||
TAuthTokens,
|
||||
TAuthTokenSessions,
|
||||
@ -32,75 +17,21 @@ import {
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate,
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate,
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate,
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate,
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate,
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate,
|
||||
TCertificates,
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate,
|
||||
TCertificatesInsert,
|
||||
TCertificatesUpdate,
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate,
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate,
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate,
|
||||
TGitAppOrg,
|
||||
TGitAppOrgInsert,
|
||||
TGitAppOrgUpdate,
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate,
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate,
|
||||
TGroups,
|
||||
TGroupsInsert,
|
||||
TGroupsUpdate,
|
||||
TIdentities,
|
||||
TIdentitiesInsert,
|
||||
TIdentitiesUpdate,
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate,
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate,
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate,
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate,
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate,
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate,
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate,
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate,
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate,
|
||||
@ -119,21 +50,6 @@ import {
|
||||
TIntegrations,
|
||||
TIntegrationsInsert,
|
||||
TIntegrationsUpdate,
|
||||
TKmsKeys,
|
||||
TKmsKeysInsert,
|
||||
TKmsKeysUpdate,
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate,
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate,
|
||||
TLdapConfigs,
|
||||
TLdapConfigsInsert,
|
||||
TLdapConfigsUpdate,
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate,
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate,
|
||||
@ -164,18 +80,9 @@ import {
|
||||
TProjects,
|
||||
TProjectsInsert,
|
||||
TProjectsUpdate,
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate,
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsInsert,
|
||||
TSamlConfigsUpdate,
|
||||
TScimTokens,
|
||||
TScimTokensInsert,
|
||||
TScimTokensUpdate,
|
||||
TSecretApprovalPolicies,
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
@ -206,9 +113,6 @@ import {
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate,
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate,
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate,
|
||||
@ -219,9 +123,6 @@ import {
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate,
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate,
|
||||
TSecretsInsert,
|
||||
TSecretSnapshotFolders,
|
||||
TSecretSnapshotFoldersInsert,
|
||||
@ -257,15 +158,9 @@ import {
|
||||
TUserActions,
|
||||
TUserActionsInsert,
|
||||
TUserActionsUpdate,
|
||||
TUserAliases,
|
||||
TUserAliasesInsert,
|
||||
TUserAliasesUpdate,
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate,
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate,
|
||||
TUsers,
|
||||
TUsersInsert,
|
||||
TUsersUpdate,
|
||||
@ -277,54 +172,6 @@ import {
|
||||
declare module "knex/types/tables" {
|
||||
interface Tables {
|
||||
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.Groups]: Knex.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||
[TableName.CertificateAuthority]: Knex.CompositeTableType<
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCert]: Knex.CompositeTableType<
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthoritySecret]: Knex.CompositeTableType<
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCrl]: Knex.CompositeTableType<
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate
|
||||
>;
|
||||
[TableName.Certificate]: Knex.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||
[TableName.CertificateBody]: Knex.CompositeTableType<
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate
|
||||
>;
|
||||
[TableName.CertificateSecret]: Knex.CompositeTableType<
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate
|
||||
>;
|
||||
[TableName.UserGroupMembership]: Knex.CompositeTableType<
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembership]: Knex.CompositeTableType<
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembershipRole]: Knex.CompositeTableType<
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.UserAliases]: Knex.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
@ -364,24 +211,9 @@ declare module "knex/types/tables" {
|
||||
TProjectEnvironmentsUpdate
|
||||
>;
|
||||
[TableName.ProjectBot]: Knex.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
|
||||
[TableName.ProjectUserMembershipRole]: Knex.CompositeTableType<
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.ProjectRoles]: Knex.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
|
||||
[TableName.ProjectUserAdditionalPrivilege]: Knex.CompositeTableType<
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate
|
||||
>;
|
||||
[TableName.ProjectKeys]: Knex.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretReference]: Knex.CompositeTableType<
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate
|
||||
>;
|
||||
[TableName.SecretBlindIndex]: Knex.CompositeTableType<
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
@ -394,7 +226,6 @@ declare module "knex/types/tables" {
|
||||
TSecretFolderVersionsInsert,
|
||||
TSecretFolderVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretSharing]: Knex.CompositeTableType<TSecretSharing, TSecretSharingInsert, TSecretSharingUpdate>;
|
||||
[TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>;
|
||||
[TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
@ -411,26 +242,6 @@ declare module "knex/types/tables" {
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityKubernetesAuth]: Knex.CompositeTableType<
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityGcpAuth]: Knex.CompositeTableType<
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAwsAuth]: Knex.CompositeTableType<
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAzureAuth]: Knex.CompositeTableType<
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUaClientSecret]: Knex.CompositeTableType<
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
@ -451,42 +262,6 @@ declare module "knex/types/tables" {
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembershipRole]: Knex.CompositeTableType<
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectAdditionalPrivilege]: Knex.CompositeTableType<
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicy]: Knex.CompositeTableType<
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicyApprover]: Knex.CompositeTableType<
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequest]: Knex.CompositeTableType<
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequestReviewer]: Knex.CompositeTableType<
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
|
||||
[TableName.ScimToken]: Knex.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
|
||||
[TableName.SecretApprovalPolicy]: Knex.CompositeTableType<
|
||||
TSecretApprovalPolicies,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
@ -538,22 +313,9 @@ declare module "knex/types/tables" {
|
||||
TSecretSnapshotFoldersInsert,
|
||||
TSecretSnapshotFoldersUpdate
|
||||
>;
|
||||
[TableName.DynamicSecret]: Knex.CompositeTableType<TDynamicSecrets, TDynamicSecretsInsert, TDynamicSecretsUpdate>;
|
||||
[TableName.DynamicSecretLease]: Knex.CompositeTableType<
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate
|
||||
>;
|
||||
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: Knex.CompositeTableType<TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate>;
|
||||
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.AuditLogStream]: Knex.CompositeTableType<
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate
|
||||
>;
|
||||
[TableName.GitAppInstallSession]: Knex.CompositeTableType<
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
@ -577,13 +339,5 @@ declare module "knex/types/tables" {
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate
|
||||
>;
|
||||
// KMS service
|
||||
[TableName.KmsServerRootConfig]: Knex.CompositeTableType<
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate
|
||||
>;
|
||||
[TableName.KmsKey]: Knex.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
|
||||
[TableName.KmsKeyVersion]: Knex.CompositeTableType<TKmsKeyVersions, TKmsKeyVersionsInsert, TKmsKeyVersionsUpdate>;
|
||||
}
|
||||
}
|
||||
|
6
backend/src/cache/redis.ts
vendored
Normal file
6
backend/src/cache/redis.ts
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import Redis from "ioredis";
|
||||
|
||||
export const initRedisConnection = (redisUrl: string) => {
|
||||
const redis = new Redis(redisUrl);
|
||||
return redis;
|
||||
};
|
@ -6,13 +6,6 @@ export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnection
|
||||
client: "pg",
|
||||
connection: {
|
||||
connectionString: dbConnectionUri,
|
||||
host: process.env.DB_HOST,
|
||||
// @ts-expect-error I have no clue why only for the port there is a type error
|
||||
// eslint-disable-next-line
|
||||
port: process.env.DB_PORT,
|
||||
user: process.env.DB_USER,
|
||||
database: process.env.DB_NAME,
|
||||
password: process.env.DB_PASSWORD,
|
||||
ssl: dbRootCert
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
|
@ -7,29 +7,13 @@ import path from "path";
|
||||
|
||||
// Update with your config settings. .
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env.migration")
|
||||
path: path.join(__dirname, "../../../.env.migration"),
|
||||
debug: true
|
||||
});
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env")
|
||||
});
|
||||
|
||||
export default {
|
||||
development: {
|
||||
client: "postgres",
|
||||
connection: {
|
||||
connectionString: process.env.DB_CONNECTION_URI,
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
user: process.env.DB_USER,
|
||||
database: process.env.DB_NAME,
|
||||
password: process.env.DB_PASSWORD,
|
||||
ssl: process.env.DB_ROOT_CERT
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(process.env.DB_ROOT_CERT, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
connection: process.env.DB_CONNECTION_URI,
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
@ -43,20 +27,7 @@ export default {
|
||||
},
|
||||
production: {
|
||||
client: "postgres",
|
||||
connection: {
|
||||
connectionString: process.env.DB_CONNECTION_URI,
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
user: process.env.DB_USER,
|
||||
database: process.env.DB_NAME,
|
||||
password: process.env.DB_PASSWORD,
|
||||
ssl: process.env.DB_ROOT_CERT
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(process.env.DB_ROOT_CERT, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
connection: process.env.DB_CONNECTION_URI,
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
|
@ -1,31 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.ScimToken))) {
|
||||
await knex.schema.createTable(TableName.ScimToken, (t) => {
|
||||
t.string("id", 36).primary().defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("ttlDays").defaultTo(365).notNullable();
|
||||
t.string("description").notNullable();
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("scimEnabled").defaultTo(false);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ScimToken);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.ScimToken);
|
||||
await dropOnUpdateTrigger(knex, TableName.ScimToken);
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.dropColumn("scimEnabled");
|
||||
});
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { ProjectVersion, TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasGhostUserColumn = await knex.schema.hasColumn(TableName.Users, "isGhost");
|
||||
const hasProjectVersionColumn = await knex.schema.hasColumn(TableName.Project, "version");
|
||||
|
||||
if (!hasGhostUserColumn) {
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.boolean("isGhost").defaultTo(false).notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!hasProjectVersionColumn) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.integer("version").defaultTo(ProjectVersion.V1).notNullable();
|
||||
t.string("upgradeStatus").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasGhostUserColumn = await knex.schema.hasColumn(TableName.Users, "isGhost");
|
||||
const hasProjectVersionColumn = await knex.schema.hasColumn(TableName.Project, "version");
|
||||
|
||||
if (hasGhostUserColumn) {
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.dropColumn("isGhost");
|
||||
});
|
||||
}
|
||||
|
||||
if (hasProjectVersionColumn) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.dropColumn("version");
|
||||
t.dropColumn("upgradeStatus");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const isTablePresent = await knex.schema.hasTable(TableName.SuperAdmin);
|
||||
if (isTablePresent) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.string("allowedSignUpDomain");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "allowedSignUpDomain")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("allowedSignUpDomain");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-nocheck
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const ADMIN_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.uuid("instanceId").notNullable().defaultTo(knex.fn.uuid());
|
||||
});
|
||||
|
||||
const superUserConfigExists = await knex(TableName.SuperAdmin).where("id", ADMIN_CONFIG_UUID).first();
|
||||
if (!superUserConfigExists) {
|
||||
// eslint-disable-next-line
|
||||
await knex(TableName.SuperAdmin).update({ id: ADMIN_CONFIG_UUID }).whereNotNull("id").limit(1);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("instanceId");
|
||||
});
|
||||
}
|
@ -1,15 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.Integration, (t) => {
|
||||
t.datetime("lastUsed");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.Integration, (t) => {
|
||||
t.dropColumn("lastUsed");
|
||||
});
|
||||
}
|
@ -1,68 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.LdapConfig))) {
|
||||
await knex.schema.createTable(TableName.LdapConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("orgId").notNullable().unique();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.boolean("isActive").notNullable();
|
||||
t.string("url").notNullable();
|
||||
t.string("encryptedBindDN").notNullable();
|
||||
t.string("bindDNIV").notNullable();
|
||||
t.string("bindDNTag").notNullable();
|
||||
t.string("encryptedBindPass").notNullable();
|
||||
t.string("bindPassIV").notNullable();
|
||||
t.string("bindPassTag").notNullable();
|
||||
t.string("searchBase").notNullable();
|
||||
t.text("encryptedCACert").notNullable();
|
||||
t.string("caCertIV").notNullable();
|
||||
t.string("caCertTag").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.LdapConfig);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.UserAliases))) {
|
||||
await knex.schema.createTable(TableName.UserAliases, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("userId").notNullable();
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.string("username").notNullable();
|
||||
t.string("aliasType").notNullable();
|
||||
t.string("externalId").notNullable();
|
||||
t.specificType("emails", "text[]");
|
||||
t.uuid("orgId").nullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.UserAliases);
|
||||
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.string("username").unique();
|
||||
t.string("email").nullable().alter();
|
||||
t.dropUnique(["email"]);
|
||||
});
|
||||
|
||||
await knex(TableName.Users).update("username", knex.ref("email"));
|
||||
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.string("username").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.LdapConfig);
|
||||
await knex.schema.dropTableIfExists(TableName.UserAliases);
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.dropColumn("username");
|
||||
// t.string("email").notNullable().alter();
|
||||
});
|
||||
await dropOnUpdateTrigger(knex, TableName.LdapConfig);
|
||||
}
|
@ -1,50 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesTableExist = await knex.schema.hasTable(TableName.ProjectUserMembershipRole);
|
||||
if (!doesTableExist) {
|
||||
await knex.schema.createTable(TableName.ProjectUserMembershipRole, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("role").notNullable();
|
||||
t.uuid("projectMembershipId").notNullable();
|
||||
t.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
// until role is changed/removed the role should not deleted
|
||||
t.uuid("customRoleId");
|
||||
t.foreign("customRoleId").references("id").inTable(TableName.ProjectRoles);
|
||||
t.boolean("isTemporary").notNullable().defaultTo(false);
|
||||
t.string("temporaryMode");
|
||||
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
|
||||
t.datetime("temporaryAccessStartTime");
|
||||
t.datetime("temporaryAccessEndTime");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ProjectUserMembershipRole);
|
||||
|
||||
const projectMemberships = await knex(TableName.ProjectMembership).select(
|
||||
"id",
|
||||
"role",
|
||||
"createdAt",
|
||||
"updatedAt",
|
||||
knex.ref("roleId").withSchema(TableName.ProjectMembership).as("customRoleId")
|
||||
);
|
||||
if (projectMemberships.length)
|
||||
await knex.batchInsert(
|
||||
TableName.ProjectUserMembershipRole,
|
||||
projectMemberships.map((data) => ({ ...data, projectMembershipId: data.id }))
|
||||
);
|
||||
// will be dropped later
|
||||
// await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
|
||||
// t.dropColumn("roleId");
|
||||
// t.dropColumn("role");
|
||||
// });
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.ProjectUserMembershipRole);
|
||||
await dropOnUpdateTrigger(knex, TableName.ProjectUserMembershipRole);
|
||||
}
|
@ -1,52 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesTableExist = await knex.schema.hasTable(TableName.IdentityProjectMembershipRole);
|
||||
if (!doesTableExist) {
|
||||
await knex.schema.createTable(TableName.IdentityProjectMembershipRole, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("role").notNullable();
|
||||
t.uuid("projectMembershipId").notNullable();
|
||||
t.foreign("projectMembershipId")
|
||||
.references("id")
|
||||
.inTable(TableName.IdentityProjectMembership)
|
||||
.onDelete("CASCADE");
|
||||
// until role is changed/removed the role should not deleted
|
||||
t.uuid("customRoleId");
|
||||
t.foreign("customRoleId").references("id").inTable(TableName.ProjectRoles);
|
||||
t.boolean("isTemporary").notNullable().defaultTo(false);
|
||||
t.string("temporaryMode");
|
||||
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
|
||||
t.datetime("temporaryAccessStartTime");
|
||||
t.datetime("temporaryAccessEndTime");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityProjectMembershipRole);
|
||||
|
||||
const identityMemberships = await knex(TableName.IdentityProjectMembership).select(
|
||||
"id",
|
||||
"role",
|
||||
"createdAt",
|
||||
"updatedAt",
|
||||
knex.ref("roleId").withSchema(TableName.IdentityProjectMembership).as("customRoleId")
|
||||
);
|
||||
if (identityMemberships.length)
|
||||
await knex.batchInsert(
|
||||
TableName.IdentityProjectMembershipRole,
|
||||
identityMemberships.map((data) => ({ ...data, projectMembershipId: data.id }))
|
||||
);
|
||||
// await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
|
||||
// t.dropColumn("roleId");
|
||||
// t.dropColumn("role");
|
||||
// });
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityProjectMembershipRole);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityProjectMembershipRole);
|
||||
}
|
@ -1,58 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesTableExist = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||
if (!doesTableExist) {
|
||||
await knex.schema.createTable(TableName.DynamicSecret, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name").notNullable();
|
||||
t.integer("version").notNullable();
|
||||
t.string("type").notNullable();
|
||||
t.string("defaultTTL").notNullable();
|
||||
t.string("maxTTL");
|
||||
t.string("inputIV").notNullable();
|
||||
t.text("inputCiphertext").notNullable();
|
||||
t.string("inputTag").notNullable();
|
||||
t.string("algorithm").notNullable().defaultTo(SecretEncryptionAlgo.AES_256_GCM);
|
||||
t.string("keyEncoding").notNullable().defaultTo(SecretKeyEncoding.UTF8);
|
||||
t.uuid("folderId").notNullable();
|
||||
// for background process communication
|
||||
t.string("status");
|
||||
t.string("statusDetails");
|
||||
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE");
|
||||
t.unique(["name", "folderId"]);
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.DynamicSecret);
|
||||
|
||||
const doesTableDynamicSecretLease = await knex.schema.hasTable(TableName.DynamicSecretLease);
|
||||
if (!doesTableDynamicSecretLease) {
|
||||
await knex.schema.createTable(TableName.DynamicSecretLease, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.integer("version").notNullable();
|
||||
t.string("externalEntityId").notNullable();
|
||||
t.datetime("expireAt").notNullable();
|
||||
// for background process communication
|
||||
t.string("status");
|
||||
t.string("statusDetails");
|
||||
t.uuid("dynamicSecretId").notNullable();
|
||||
t.foreign("dynamicSecretId").references("id").inTable(TableName.DynamicSecret).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.DynamicSecretLease);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.DynamicSecretLease);
|
||||
await knex.schema.dropTableIfExists(TableName.DynamicSecretLease);
|
||||
|
||||
await dropOnUpdateTrigger(knex, TableName.DynamicSecret);
|
||||
await knex.schema.dropTableIfExists(TableName.DynamicSecret);
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.ProjectUserAdditionalPrivilege))) {
|
||||
await knex.schema.createTable(TableName.ProjectUserAdditionalPrivilege, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("slug", 60).notNullable();
|
||||
t.uuid("projectMembershipId").notNullable();
|
||||
t.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
t.boolean("isTemporary").notNullable().defaultTo(false);
|
||||
t.string("temporaryMode");
|
||||
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
|
||||
t.datetime("temporaryAccessStartTime");
|
||||
t.datetime("temporaryAccessEndTime");
|
||||
t.jsonb("permissions").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ProjectUserAdditionalPrivilege);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.ProjectUserAdditionalPrivilege);
|
||||
await knex.schema.dropTableIfExists(TableName.ProjectUserAdditionalPrivilege);
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityProjectAdditionalPrivilege))) {
|
||||
await knex.schema.createTable(TableName.IdentityProjectAdditionalPrivilege, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("slug", 60).notNullable();
|
||||
t.uuid("projectMembershipId").notNullable();
|
||||
t.foreign("projectMembershipId")
|
||||
.references("id")
|
||||
.inTable(TableName.IdentityProjectMembership)
|
||||
.onDelete("CASCADE");
|
||||
t.boolean("isTemporary").notNullable().defaultTo(false);
|
||||
t.string("temporaryMode");
|
||||
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
|
||||
t.datetime("temporaryAccessStartTime");
|
||||
t.datetime("temporaryAccessEndTime");
|
||||
t.jsonb("permissions").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityProjectAdditionalPrivilege);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityProjectAdditionalPrivilege);
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityProjectAdditionalPrivilege);
|
||||
}
|
@ -1,112 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { TableName, TOrgMemberships } from "../schemas";
|
||||
|
||||
const validateOrgMembership = (membershipToValidate: TOrgMemberships, firstMembership: TOrgMemberships) => {
|
||||
const firstOrgId = firstMembership.orgId;
|
||||
const firstUserId = firstMembership.userId;
|
||||
|
||||
if (membershipToValidate.id === firstMembership.id) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (membershipToValidate.inviteEmail !== firstMembership.inviteEmail) {
|
||||
throw new Error(`Invite emails are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
|
||||
}
|
||||
if (membershipToValidate.orgId !== firstMembership.orgId) {
|
||||
throw new Error(`OrgIds are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
|
||||
}
|
||||
if (membershipToValidate.role !== firstMembership.role) {
|
||||
throw new Error(`Roles are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
|
||||
}
|
||||
if (membershipToValidate.roleId !== firstMembership.roleId) {
|
||||
throw new Error(`RoleIds are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
|
||||
}
|
||||
if (membershipToValidate.status !== firstMembership.status) {
|
||||
throw new Error(`Statuses are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
|
||||
}
|
||||
if (membershipToValidate.userId !== firstMembership.userId) {
|
||||
throw new Error(`UserIds are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const RowSchema = z.object({
|
||||
userId: z.string(),
|
||||
orgId: z.string(),
|
||||
cnt: z.string()
|
||||
});
|
||||
|
||||
// Transactional find and delete duplicate rows
|
||||
await knex.transaction(async (tx) => {
|
||||
const duplicateRows = await tx(TableName.OrgMembership)
|
||||
.select("userId", "orgId") // Select the userId and orgId so we can group by them
|
||||
.whereNotNull("userId") // Ensure that the userId is not null
|
||||
.count("* as cnt") // Count the number of rows for each userId and orgId, so we can make sure there are more than 1 row (a duplicate)
|
||||
.groupBy("userId", "orgId")
|
||||
.havingRaw("count(*) > ?", [1]); // Using havingRaw for direct SQL expressions
|
||||
|
||||
// Parse the rows to ensure they are in the correct format, and for type safety
|
||||
const parsedRows = RowSchema.array().parse(duplicateRows);
|
||||
|
||||
// For each of the duplicate rows, loop through and find the actual memberships to delete
|
||||
for (const row of parsedRows) {
|
||||
const count = Number(row.cnt);
|
||||
|
||||
// An extra check to ensure that the count is actually a number, and the number is greater than 2
|
||||
if (typeof count !== "number" || count < 2) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// Find all the organization memberships that have the same userId and orgId
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const rowsToDelete = await tx(TableName.OrgMembership).where({
|
||||
userId: row.userId,
|
||||
orgId: row.orgId
|
||||
});
|
||||
|
||||
// Ensure that all the rows have exactly the same value, except id, createdAt, updatedAt
|
||||
for (const rowToDelete of rowsToDelete) {
|
||||
validateOrgMembership(rowToDelete, rowsToDelete[0]);
|
||||
}
|
||||
|
||||
// Find the row with the latest createdAt, which we will keep
|
||||
|
||||
let lowestCreatedAt: number | null = null;
|
||||
let latestCreatedRow: TOrgMemberships | null = null;
|
||||
|
||||
for (const rowToDelete of rowsToDelete) {
|
||||
if (lowestCreatedAt === null || rowToDelete.createdAt.getTime() < lowestCreatedAt) {
|
||||
lowestCreatedAt = rowToDelete.createdAt.getTime();
|
||||
latestCreatedRow = rowToDelete;
|
||||
}
|
||||
}
|
||||
if (!latestCreatedRow) {
|
||||
throw new Error("Failed to find last created membership");
|
||||
}
|
||||
|
||||
// Filter out the latest row from the rows to delete
|
||||
const membershipIdsToDelete = rowsToDelete.map((r) => r.id).filter((id) => id !== latestCreatedRow!.id);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const numberOfRowsDeleted = await tx(TableName.OrgMembership).whereIn("id", membershipIdsToDelete).delete();
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(
|
||||
`Deleted ${numberOfRowsDeleted} duplicate organization memberships for ${row.userId} and ${row.orgId}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (table) => {
|
||||
table.unique(["userId", "orgId"]);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (table) => {
|
||||
table.dropUnique(["userId", "orgId"]);
|
||||
});
|
||||
}
|
@ -1,82 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.Groups))) {
|
||||
await knex.schema.createTable(TableName.Groups, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.string("name").notNullable();
|
||||
t.string("slug").notNullable();
|
||||
t.unique(["orgId", "slug"]);
|
||||
t.string("role").notNullable();
|
||||
t.uuid("roleId");
|
||||
t.foreign("roleId").references("id").inTable(TableName.OrgRoles);
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.Groups);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.UserGroupMembership))) {
|
||||
await knex.schema.createTable(TableName.UserGroupMembership, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); // link to user and link to groups cascade on groups
|
||||
t.uuid("userId").notNullable();
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.uuid("groupId").notNullable();
|
||||
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.UserGroupMembership);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.GroupProjectMembership))) {
|
||||
await knex.schema.createTable(TableName.GroupProjectMembership, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("groupId").notNullable();
|
||||
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
await createOnUpdateTrigger(knex, TableName.GroupProjectMembership);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.GroupProjectMembershipRole))) {
|
||||
await knex.schema.createTable(TableName.GroupProjectMembershipRole, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("role").notNullable();
|
||||
t.uuid("projectMembershipId").notNullable();
|
||||
t.foreign("projectMembershipId").references("id").inTable(TableName.GroupProjectMembership).onDelete("CASCADE");
|
||||
// until role is changed/removed the role should not deleted
|
||||
t.uuid("customRoleId");
|
||||
t.foreign("customRoleId").references("id").inTable(TableName.ProjectRoles);
|
||||
t.boolean("isTemporary").notNullable().defaultTo(false);
|
||||
t.string("temporaryMode");
|
||||
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
|
||||
t.datetime("temporaryAccessStartTime");
|
||||
t.datetime("temporaryAccessEndTime");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.GroupProjectMembershipRole);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.GroupProjectMembershipRole);
|
||||
await dropOnUpdateTrigger(knex, TableName.GroupProjectMembershipRole);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.UserGroupMembership);
|
||||
await dropOnUpdateTrigger(knex, TableName.UserGroupMembership);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.GroupProjectMembership);
|
||||
await dropOnUpdateTrigger(knex, TableName.GroupProjectMembership);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.Groups);
|
||||
await dropOnUpdateTrigger(knex, TableName.Groups);
|
||||
}
|
@ -1,47 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { ProjectMembershipRole, TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesProjectRoleFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "role");
|
||||
const doesProjectRoleIdFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "roleId");
|
||||
await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
|
||||
if (doesProjectRoleFieldExist) t.dropColumn("roleId");
|
||||
if (doesProjectRoleIdFieldExist) t.dropColumn("role");
|
||||
});
|
||||
|
||||
const doesIdentityProjectRoleFieldExist = await knex.schema.hasColumn(TableName.IdentityProjectMembership, "role");
|
||||
const doesIdentityProjectRoleIdFieldExist = await knex.schema.hasColumn(
|
||||
TableName.IdentityProjectMembership,
|
||||
"roleId"
|
||||
);
|
||||
await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
|
||||
if (doesIdentityProjectRoleFieldExist) t.dropColumn("roleId");
|
||||
if (doesIdentityProjectRoleIdFieldExist) t.dropColumn("role");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesProjectRoleFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "role");
|
||||
const doesProjectRoleIdFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "roleId");
|
||||
await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
|
||||
if (!doesProjectRoleFieldExist) t.string("role").defaultTo(ProjectMembershipRole.Member);
|
||||
if (!doesProjectRoleIdFieldExist) {
|
||||
t.uuid("roleId");
|
||||
t.foreign("roleId").references("id").inTable(TableName.ProjectRoles);
|
||||
}
|
||||
});
|
||||
|
||||
const doesIdentityProjectRoleFieldExist = await knex.schema.hasColumn(TableName.IdentityProjectMembership, "role");
|
||||
const doesIdentityProjectRoleIdFieldExist = await knex.schema.hasColumn(
|
||||
TableName.IdentityProjectMembership,
|
||||
"roleId"
|
||||
);
|
||||
await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
|
||||
if (!doesIdentityProjectRoleFieldExist) t.string("role").defaultTo(ProjectMembershipRole.Member);
|
||||
if (!doesIdentityProjectRoleIdFieldExist) {
|
||||
t.uuid("roleId");
|
||||
t.foreign("roleId").references("id").inTable(TableName.ProjectRoles);
|
||||
}
|
||||
});
|
||||
}
|
@ -1,15 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.UserGroupMembership, (t) => {
|
||||
t.boolean("isPending").notNullable().defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.UserGroupMembership, (t) => {
|
||||
t.dropColumn("isPending");
|
||||
});
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.LdapGroupMap))) {
|
||||
await knex.schema.createTable(TableName.LdapGroupMap, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("ldapConfigId").notNullable();
|
||||
t.foreign("ldapConfigId").references("id").inTable(TableName.LdapConfig).onDelete("CASCADE");
|
||||
t.string("ldapGroupCN").notNullable();
|
||||
t.uuid("groupId").notNullable();
|
||||
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
t.unique(["ldapGroupCN", "groupId", "ldapConfigId"]);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.LdapGroupMap);
|
||||
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.string("groupSearchBase").notNullable().defaultTo("");
|
||||
t.string("groupSearchFilter").notNullable().defaultTo("");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.LdapGroupMap);
|
||||
await dropOnUpdateTrigger(knex, TableName.LdapGroupMap);
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.dropColumn("groupSearchBase");
|
||||
t.dropColumn("groupSearchFilter");
|
||||
});
|
||||
}
|
@ -1,15 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.string("searchFilter").notNullable().defaultTo("");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.dropColumn("searchFilter");
|
||||
});
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
const doesCreatedAtExist = await knex.schema.hasColumn(TableName.AuditLog, "createdAt");
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesProjectIdExist && doesCreatedAtExist) t.index(["projectId", "createdAt"]);
|
||||
if (doesOrgIdExist && doesCreatedAtExist) t.index(["orgId", "createdAt"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
const doesCreatedAtExist = await knex.schema.hasColumn(TableName.AuditLog, "createdAt");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesProjectIdExist && doesCreatedAtExist) t.dropIndex(["projectId", "createdAt"]);
|
||||
if (doesOrgIdExist && doesCreatedAtExist) t.dropIndex(["orgId", "createdAt"]);
|
||||
});
|
||||
}
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.AuditLogStream))) {
|
||||
await knex.schema.createTable(TableName.AuditLogStream, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("url").notNullable();
|
||||
t.text("encryptedHeadersCiphertext");
|
||||
t.text("encryptedHeadersIV");
|
||||
t.text("encryptedHeadersTag");
|
||||
t.string("encryptedHeadersAlgorithm");
|
||||
t.string("encryptedHeadersKeyEncoding");
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.AuditLogStream);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.AuditLogStream);
|
||||
await knex.schema.dropTableIfExists(TableName.AuditLogStream);
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const isUsersTablePresent = await knex.schema.hasTable(TableName.Users);
|
||||
if (isUsersTablePresent) {
|
||||
const hasIsEmailVerifiedColumn = await knex.schema.hasColumn(TableName.Users, "isEmailVerified");
|
||||
|
||||
if (!hasIsEmailVerifiedColumn) {
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.boolean("isEmailVerified").defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
// Backfilling the isEmailVerified to true where isAccepted is true
|
||||
await knex(TableName.Users).update({ isEmailVerified: true }).where("isAccepted", true);
|
||||
}
|
||||
|
||||
const isUserAliasTablePresent = await knex.schema.hasTable(TableName.UserAliases);
|
||||
if (isUserAliasTablePresent) {
|
||||
await knex.schema.alterTable(TableName.UserAliases, (t) => {
|
||||
t.string("username").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
const isSuperAdminTablePresent = await knex.schema.hasTable(TableName.SuperAdmin);
|
||||
if (isSuperAdminTablePresent) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.boolean("trustSamlEmails").defaultTo(false);
|
||||
t.boolean("trustLdapEmails").defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.Users, "isEmailVerified")) {
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.dropColumn("isEmailVerified");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustSamlEmails")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("trustSamlEmails");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustLdapEmails")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("trustLdapEmails");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicy))) {
|
||||
await knex.schema.createTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name").notNullable();
|
||||
t.integer("approvals").defaultTo(1).notNullable();
|
||||
t.string("secretPath");
|
||||
|
||||
t.uuid("envId").notNullable();
|
||||
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicy);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover))) {
|
||||
await knex.schema.createTable(TableName.AccessApprovalPolicyApprover, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("approverId").notNullable();
|
||||
t.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
|
||||
t.uuid("policyId").notNullable();
|
||||
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicyApprover);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicyApprover);
|
||||
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicy);
|
||||
|
||||
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicyApprover);
|
||||
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicy);
|
||||
}
|
@ -1,51 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.AccessApprovalRequest))) {
|
||||
await knex.schema.createTable(TableName.AccessApprovalRequest, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.uuid("policyId").notNullable();
|
||||
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
|
||||
|
||||
t.uuid("privilegeId").nullable();
|
||||
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("CASCADE");
|
||||
|
||||
t.uuid("requestedBy").notNullable();
|
||||
t.foreign("requestedBy").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
|
||||
// We use these values to create the actual privilege at a later point in time.
|
||||
t.boolean("isTemporary").notNullable();
|
||||
t.string("temporaryRange").nullable();
|
||||
|
||||
t.jsonb("permissions").notNullable();
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
await createOnUpdateTrigger(knex, TableName.AccessApprovalRequest);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.AccessApprovalRequestReviewer))) {
|
||||
await knex.schema.createTable(TableName.AccessApprovalRequestReviewer, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("member").notNullable();
|
||||
t.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
t.string("status").notNullable();
|
||||
t.uuid("requestId").notNullable();
|
||||
t.foreign("requestId").references("id").inTable(TableName.AccessApprovalRequest).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
await createOnUpdateTrigger(knex, TableName.AccessApprovalRequestReviewer);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.AccessApprovalRequestReviewer);
|
||||
await knex.schema.dropTableIfExists(TableName.AccessApprovalRequest);
|
||||
|
||||
await dropOnUpdateTrigger(knex, TableName.AccessApprovalRequestReviewer);
|
||||
await dropOnUpdateTrigger(knex, TableName.AccessApprovalRequest);
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityAwsAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityAwsAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("type").notNullable();
|
||||
t.string("stsEndpoint").notNullable();
|
||||
t.string("allowedPrincipalArns").notNullable();
|
||||
t.string("allowedAccountIds").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityAwsAuth);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityAwsAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityAwsAuth);
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityGcpAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityGcpAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("type").notNullable();
|
||||
t.string("allowedServiceAccounts").notNullable();
|
||||
t.string("allowedProjects").notNullable();
|
||||
t.string("allowedZones").notNullable(); // GCE only (fully qualified zone names)
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityGcpAuth);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityGcpAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityGcpAuth);
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretReference))) {
|
||||
await knex.schema.createTable(TableName.SecretReference, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("environment").notNullable();
|
||||
t.string("secretPath").notNullable();
|
||||
t.uuid("secretId").notNullable();
|
||||
t.foreign("secretId").references("id").inTable(TableName.Secret).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretReference);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretReference);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretReference);
|
||||
}
|
@ -1,36 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityKubernetesAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("kubernetesHost").notNullable();
|
||||
t.text("encryptedCaCert").notNullable();
|
||||
t.string("caCertIV").notNullable();
|
||||
t.string("caCertTag").notNullable();
|
||||
t.text("encryptedTokenReviewerJwt").notNullable();
|
||||
t.string("tokenReviewerJwtIV").notNullable();
|
||||
t.string("tokenReviewerJwtTag").notNullable();
|
||||
t.string("allowedNamespaces").notNullable();
|
||||
t.string("allowedNames").notNullable();
|
||||
t.string("allowedAudience").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityKubernetesAuth);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityKubernetesAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityKubernetesAuth);
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasIsSyncedColumn = await knex.schema.hasColumn(TableName.Integration, "isSynced");
|
||||
const hasSyncMessageColumn = await knex.schema.hasColumn(TableName.Integration, "syncMessage");
|
||||
const hasLastSyncJobId = await knex.schema.hasColumn(TableName.Integration, "lastSyncJobId");
|
||||
|
||||
await knex.schema.alterTable(TableName.Integration, (t) => {
|
||||
if (!hasIsSyncedColumn) {
|
||||
t.boolean("isSynced").nullable();
|
||||
}
|
||||
|
||||
if (!hasSyncMessageColumn) {
|
||||
t.text("syncMessage").nullable();
|
||||
}
|
||||
|
||||
if (!hasLastSyncJobId) {
|
||||
t.string("lastSyncJobId").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasIsSyncedColumn = await knex.schema.hasColumn(TableName.Integration, "isSynced");
|
||||
const hasSyncMessageColumn = await knex.schema.hasColumn(TableName.Integration, "syncMessage");
|
||||
const hasLastSyncJobId = await knex.schema.hasColumn(TableName.Integration, "lastSyncJobId");
|
||||
|
||||
await knex.schema.alterTable(TableName.Integration, (t) => {
|
||||
if (hasIsSyncedColumn) {
|
||||
t.dropColumn("isSynced");
|
||||
}
|
||||
|
||||
if (hasSyncMessageColumn) {
|
||||
t.dropColumn("syncMessage");
|
||||
}
|
||||
|
||||
if (hasLastSyncJobId) {
|
||||
t.dropColumn("lastSyncJobId");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesProjectIdExist) t.index("projectId");
|
||||
if (doesOrgIdExist) t.index("orgId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesProjectIdExist) t.dropIndex("projectId");
|
||||
if (doesOrgIdExist) t.dropIndex("orgId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "envId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesEnvIdExist) t.index("envId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "envId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesEnvIdExist) t.dropIndex("envId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SecretVersion, "envId");
|
||||
if (await knex.schema.hasTable(TableName.SecretVersion)) {
|
||||
await knex.schema.alterTable(TableName.SecretVersion, (t) => {
|
||||
if (doesEnvIdExist) t.index("envId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SecretVersion, "envId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretVersion)) {
|
||||
await knex.schema.alterTable(TableName.SecretVersion, (t) => {
|
||||
if (doesEnvIdExist) t.dropIndex("envId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "snapshotId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesSnapshotIdExist) t.index("snapshotId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "snapshotId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesSnapshotIdExist) t.dropIndex("snapshotId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotFolder, "snapshotId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotFolder)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotFolder, (t) => {
|
||||
if (doesSnapshotIdExist) t.index("snapshotId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotFolder, "snapshotId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotFolder)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotFolder, (t) => {
|
||||
if (doesSnapshotIdExist) t.dropIndex("snapshotId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesFolderIdExist = await knex.schema.hasColumn(TableName.Secret, "folderId");
|
||||
const doesUserIdExist = await knex.schema.hasColumn(TableName.Secret, "userId");
|
||||
if (await knex.schema.hasTable(TableName.Secret)) {
|
||||
await knex.schema.alterTable(TableName.Secret, (t) => {
|
||||
if (doesFolderIdExist && doesUserIdExist) t.index(["folderId", "userId"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesFolderIdExist = await knex.schema.hasColumn(TableName.Secret, "folderId");
|
||||
const doesUserIdExist = await knex.schema.hasColumn(TableName.Secret, "userId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Secret)) {
|
||||
await knex.schema.alterTable(TableName.Secret, (t) => {
|
||||
if (doesUserIdExist && doesFolderIdExist) t.dropIndex(["folderId", "userId"]);
|
||||
});
|
||||
}
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesExpireAtExist = await knex.schema.hasColumn(TableName.AuditLog, "expiresAt");
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesExpireAtExist) t.index("expiresAt");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesExpireAtExist = await knex.schema.hasColumn(TableName.AuditLog, "expiresAt");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesExpireAtExist) t.dropIndex("expiresAt");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityAzureAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityAzureAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("tenantId").notNullable();
|
||||
t.string("resource").notNullable();
|
||||
t.string("allowedServicePrincipalIds").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityAzureAuth);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityAzureAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityAzureAuth);
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasConsecutiveFailedMfaAttempts = await knex.schema.hasColumn(TableName.Users, "consecutiveFailedMfaAttempts");
|
||||
const hasIsLocked = await knex.schema.hasColumn(TableName.Users, "isLocked");
|
||||
const hasTemporaryLockDateEnd = await knex.schema.hasColumn(TableName.Users, "temporaryLockDateEnd");
|
||||
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
if (!hasConsecutiveFailedMfaAttempts) {
|
||||
t.integer("consecutiveFailedMfaAttempts").defaultTo(0);
|
||||
}
|
||||
|
||||
if (!hasIsLocked) {
|
||||
t.boolean("isLocked").defaultTo(false);
|
||||
}
|
||||
|
||||
if (!hasTemporaryLockDateEnd) {
|
||||
t.dateTime("temporaryLockDateEnd").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasConsecutiveFailedMfaAttempts = await knex.schema.hasColumn(TableName.Users, "consecutiveFailedMfaAttempts");
|
||||
const hasIsLocked = await knex.schema.hasColumn(TableName.Users, "isLocked");
|
||||
const hasTemporaryLockDateEnd = await knex.schema.hasColumn(TableName.Users, "temporaryLockDateEnd");
|
||||
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
if (hasConsecutiveFailedMfaAttempts) {
|
||||
t.dropColumn("consecutiveFailedMfaAttempts");
|
||||
}
|
||||
|
||||
if (hasIsLocked) {
|
||||
t.dropColumn("isLocked");
|
||||
}
|
||||
|
||||
if (hasTemporaryLockDateEnd) {
|
||||
t.dropColumn("temporaryLockDateEnd");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretSharing))) {
|
||||
await knex.schema.createTable(TableName.SecretSharing, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name").notNullable();
|
||||
t.text("encryptedValue").notNullable();
|
||||
t.text("iv").notNullable();
|
||||
t.text("tag").notNullable();
|
||||
t.text("hashedHex").notNullable();
|
||||
t.timestamp("expiresAt").notNullable();
|
||||
t.uuid("userId").notNullable();
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretSharing);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretSharing);
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSecretVersionIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "secretVersionId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesSecretVersionIdExist) t.index("secretVersionId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesSecretVersionIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "secretVersionId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesSecretVersionIdExist) t.dropIndex("secretVersionId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretSharing))) {
|
||||
await knex.schema.createTable(TableName.SecretSharing, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name").notNullable();
|
||||
t.text("encryptedValue").notNullable();
|
||||
t.text("iv").notNullable();
|
||||
t.text("tag").notNullable();
|
||||
t.text("hashedHex").notNullable();
|
||||
t.timestamp("expiresAt").notNullable();
|
||||
t.uuid("userId").notNullable();
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretSharing);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretSharing);
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasExpiresAfterViewsColumn = await knex.schema.hasColumn(TableName.SecretSharing, "expiresAfterViews");
|
||||
const hasSecretNameColumn = await knex.schema.hasColumn(TableName.SecretSharing, "name");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (!hasExpiresAfterViewsColumn) {
|
||||
t.integer("expiresAfterViews");
|
||||
}
|
||||
|
||||
if (hasSecretNameColumn) {
|
||||
t.dropColumn("name");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasExpiresAfterViewsColumn = await knex.schema.hasColumn(TableName.SecretSharing, "expiresAfterViews");
|
||||
const hasSecretNameColumn = await knex.schema.hasColumn(TableName.SecretSharing, "name");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (hasExpiresAfterViewsColumn) {
|
||||
t.dropColumn("expiresAfterViews");
|
||||
}
|
||||
|
||||
if (!hasSecretNameColumn) {
|
||||
t.string("name").notNullable();
|
||||
}
|
||||
});
|
||||
}
|
@ -1,85 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
|
||||
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"isReplicationSuccess"
|
||||
);
|
||||
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"replicationStatus"
|
||||
);
|
||||
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
|
||||
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretImport)) {
|
||||
await knex.schema.alterTable(TableName.SecretImport, (t) => {
|
||||
if (!doesSecretImportIsReplicationExist) t.boolean("isReplication").defaultTo(false);
|
||||
if (!doesSecretImportIsReplicationSuccessExist) t.boolean("isReplicationSuccess").nullable();
|
||||
if (!doesSecretImportReplicationStatusExist) t.text("replicationStatus").nullable();
|
||||
if (!doesSecretImportLastReplicatedExist) t.datetime("lastReplicated").nullable();
|
||||
if (!doesSecretImportIsReservedExist) t.boolean("isReserved").defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
|
||||
if (await knex.schema.hasTable(TableName.SecretFolder)) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
if (!doesSecretFolderReservedExist) t.boolean("isReserved").defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalRequest,
|
||||
"isReplicated"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
|
||||
if (!doesSecretApprovalRequestIsReplicatedExist) t.boolean("isReplicated");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
|
||||
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"isReplicationSuccess"
|
||||
);
|
||||
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"replicationStatus"
|
||||
);
|
||||
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
|
||||
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretImport)) {
|
||||
await knex.schema.alterTable(TableName.SecretImport, (t) => {
|
||||
if (doesSecretImportIsReplicationExist) t.dropColumn("isReplication");
|
||||
if (doesSecretImportIsReplicationSuccessExist) t.dropColumn("isReplicationSuccess");
|
||||
if (doesSecretImportReplicationStatusExist) t.dropColumn("replicationStatus");
|
||||
if (doesSecretImportLastReplicatedExist) t.dropColumn("lastReplicated");
|
||||
if (doesSecretImportIsReservedExist) t.dropColumn("isReserved");
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
|
||||
if (await knex.schema.hasTable(TableName.SecretFolder)) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
if (doesSecretFolderReservedExist) t.dropColumn("isReserved");
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalRequest,
|
||||
"isReplicated"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
|
||||
if (doesSecretApprovalRequestIsReplicatedExist) t.dropColumn("isReplicated");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.KmsServerRootConfig))) {
|
||||
await knex.schema.createTable(TableName.KmsServerRootConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.binary("encryptedRootKey").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.KmsKey))) {
|
||||
await knex.schema.createTable(TableName.KmsKey, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.binary("encryptedKey").notNullable();
|
||||
t.string("encryptionAlgorithm").notNullable();
|
||||
t.integer("version").defaultTo(1).notNullable();
|
||||
t.string("description");
|
||||
t.boolean("isDisabled").defaultTo(false);
|
||||
t.boolean("isReserved").defaultTo(true);
|
||||
t.string("projectId");
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.KmsKey);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.KmsKeyVersion))) {
|
||||
await knex.schema.createTable(TableName.KmsKeyVersion, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.binary("encryptedKey").notNullable();
|
||||
t.integer("version").notNullable();
|
||||
t.uuid("kmsKeyId").notNullable();
|
||||
t.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.KmsKeyVersion);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.KmsServerRootConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.KmsKeyVersion);
|
||||
await dropOnUpdateTrigger(knex, TableName.KmsKeyVersion);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.KmsKey);
|
||||
await dropOnUpdateTrigger(knex, TableName.KmsKey);
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
|
||||
TableName.Users,
|
||||
"consecutiveFailedPasswordAttempts"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.Users, (tb) => {
|
||||
if (!hasConsecutiveFailedPasswordAttempts) {
|
||||
tb.integer("consecutiveFailedPasswordAttempts").defaultTo(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
|
||||
TableName.Users,
|
||||
"consecutiveFailedPasswordAttempts"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.Users, (tb) => {
|
||||
if (hasConsecutiveFailedPasswordAttempts) {
|
||||
tb.dropColumn("consecutiveFailedPasswordAttempts");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
|
||||
await knex.schema.alterTable(TableName.Project, (tb) => {
|
||||
if (!hasPitVersionLimitColumn) {
|
||||
tb.integer("pitVersionLimit").notNullable().defaultTo(10);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
|
||||
await knex.schema.alterTable(TableName.Project, (tb) => {
|
||||
if (hasPitVersionLimitColumn) {
|
||||
tb.dropColumn("pitVersionLimit");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,137 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Project)) {
|
||||
const doesProjectCertificateKeyIdExist = await knex.schema.hasColumn(TableName.Project, "kmsCertificateKeyId");
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
if (!doesProjectCertificateKeyIdExist) {
|
||||
t.uuid("kmsCertificateKeyId").nullable();
|
||||
t.foreign("kmsCertificateKeyId").references("id").inTable(TableName.KmsKey);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateAuthority))) {
|
||||
await knex.schema.createTable(TableName.CertificateAuthority, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("parentCaId").nullable();
|
||||
t.foreign("parentCaId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.string("type").notNullable(); // root / intermediate
|
||||
t.string("status").notNullable(); // active / pending-certificate
|
||||
t.string("friendlyName").notNullable();
|
||||
t.string("organization").notNullable();
|
||||
t.string("ou").notNullable();
|
||||
t.string("country").notNullable();
|
||||
t.string("province").notNullable();
|
||||
t.string("locality").notNullable();
|
||||
t.string("commonName").notNullable();
|
||||
t.string("dn").notNullable();
|
||||
t.string("serialNumber").nullable().unique();
|
||||
t.integer("maxPathLength").nullable();
|
||||
t.string("keyAlgorithm").notNullable();
|
||||
t.datetime("notBefore").nullable();
|
||||
t.datetime("notAfter").nullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateAuthorityCert))) {
|
||||
// table to keep track of certificates belonging to CA
|
||||
await knex.schema.createTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("caId").notNullable().unique();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.binary("encryptedCertificate").notNullable();
|
||||
t.binary("encryptedCertificateChain").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateAuthoritySecret))) {
|
||||
await knex.schema.createTable(TableName.CertificateAuthoritySecret, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("caId").notNullable().unique();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.binary("encryptedPrivateKey").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateAuthorityCrl))) {
|
||||
await knex.schema.createTable(TableName.CertificateAuthorityCrl, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("caId").notNullable().unique();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.binary("encryptedCrl").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.Certificate))) {
|
||||
await knex.schema.createTable(TableName.Certificate, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("caId").notNullable();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.string("status").notNullable(); // active / pending-certificate
|
||||
t.string("serialNumber").notNullable().unique();
|
||||
t.string("friendlyName").notNullable();
|
||||
t.string("commonName").notNullable();
|
||||
t.datetime("notBefore").notNullable();
|
||||
t.datetime("notAfter").notNullable();
|
||||
t.datetime("revokedAt").nullable();
|
||||
t.integer("revocationReason").nullable(); // integer based on crl reason in RFC 5280
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateBody))) {
|
||||
await knex.schema.createTable(TableName.CertificateBody, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("certId").notNullable().unique();
|
||||
t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE");
|
||||
t.binary("encryptedCertificate").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateAuthority);
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateAuthorityCert);
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateAuthoritySecret);
|
||||
await createOnUpdateTrigger(knex, TableName.Certificate);
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateBody);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// project
|
||||
if (await knex.schema.hasTable(TableName.Project)) {
|
||||
const doesProjectCertificateKeyIdExist = await knex.schema.hasColumn(TableName.Project, "kmsCertificateKeyId");
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
if (doesProjectCertificateKeyIdExist) t.dropColumn("kmsCertificateKeyId");
|
||||
});
|
||||
}
|
||||
|
||||
// certificates
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateBody);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateBody);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.Certificate);
|
||||
await dropOnUpdateTrigger(knex, TableName.Certificate);
|
||||
|
||||
// certificate authorities
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateAuthoritySecret);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateAuthoritySecret);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateAuthorityCrl);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateAuthorityCrl);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateAuthorityCert);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateAuthorityCert);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateAuthority);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateAuthority);
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AccessApprovalPoliciesApproversSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
approverId: z.string().uuid(),
|
||||
policyId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>;
|
||||
export type TAccessApprovalPoliciesApproversInsert = Omit<
|
||||
z.input<typeof AccessApprovalPoliciesApproversSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TAccessApprovalPoliciesApproversUpdate = Partial<
|
||||
Omit<z.input<typeof AccessApprovalPoliciesApproversSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,26 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AccessApprovalRequestsReviewersSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
member: z.string().uuid(),
|
||||
status: z.string(),
|
||||
requestId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAccessApprovalRequestsReviewers = z.infer<typeof AccessApprovalRequestsReviewersSchema>;
|
||||
export type TAccessApprovalRequestsReviewersInsert = Omit<
|
||||
z.input<typeof AccessApprovalRequestsReviewersSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TAccessApprovalRequestsReviewersUpdate = Partial<
|
||||
Omit<z.input<typeof AccessApprovalRequestsReviewersSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,26 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AccessApprovalRequestsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
policyId: z.string().uuid(),
|
||||
privilegeId: z.string().uuid().nullable().optional(),
|
||||
requestedBy: z.string().uuid(),
|
||||
isTemporary: z.boolean(),
|
||||
temporaryRange: z.string().nullable().optional(),
|
||||
permissions: z.unknown(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;
|
||||
export type TAccessApprovalRequestsInsert = Omit<z.input<typeof AccessApprovalRequestsSchema>, TImmutableDBKeys>;
|
||||
export type TAccessApprovalRequestsUpdate = Partial<
|
||||
Omit<z.input<typeof AccessApprovalRequestsSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -19,5 +19,5 @@ export const ApiKeysSchema = z.object({
|
||||
});
|
||||
|
||||
export type TApiKeys = z.infer<typeof ApiKeysSchema>;
|
||||
export type TApiKeysInsert = Omit<z.input<typeof ApiKeysSchema>, TImmutableDBKeys>;
|
||||
export type TApiKeysUpdate = Partial<Omit<z.input<typeof ApiKeysSchema>, TImmutableDBKeys>>;
|
||||
export type TApiKeysInsert = Omit<TApiKeys, TImmutableDBKeys>;
|
||||
export type TApiKeysUpdate = Partial<Omit<TApiKeys, TImmutableDBKeys>>;
|
||||
|
@ -1,25 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AuditLogStreamsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
url: z.string(),
|
||||
encryptedHeadersCiphertext: z.string().nullable().optional(),
|
||||
encryptedHeadersIV: z.string().nullable().optional(),
|
||||
encryptedHeadersTag: z.string().nullable().optional(),
|
||||
encryptedHeadersAlgorithm: z.string().nullable().optional(),
|
||||
encryptedHeadersKeyEncoding: z.string().nullable().optional(),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAuditLogStreams = z.infer<typeof AuditLogStreamsSchema>;
|
||||
export type TAuditLogStreamsInsert = Omit<z.input<typeof AuditLogStreamsSchema>, TImmutableDBKeys>;
|
||||
export type TAuditLogStreamsUpdate = Partial<Omit<z.input<typeof AuditLogStreamsSchema>, TImmutableDBKeys>>;
|
@ -24,5 +24,5 @@ export const AuditLogsSchema = z.object({
|
||||
});
|
||||
|
||||
export type TAuditLogs = z.infer<typeof AuditLogsSchema>;
|
||||
export type TAuditLogsInsert = Omit<z.input<typeof AuditLogsSchema>, TImmutableDBKeys>;
|
||||
export type TAuditLogsUpdate = Partial<Omit<z.input<typeof AuditLogsSchema>, TImmutableDBKeys>>;
|
||||
export type TAuditLogsInsert = Omit<TAuditLogs, TImmutableDBKeys>;
|
||||
export type TAuditLogsUpdate = Partial<Omit<TAuditLogs, TImmutableDBKeys>>;
|
||||
|
@ -20,5 +20,5 @@ export const AuthTokenSessionsSchema = z.object({
|
||||
});
|
||||
|
||||
export type TAuthTokenSessions = z.infer<typeof AuthTokenSessionsSchema>;
|
||||
export type TAuthTokenSessionsInsert = Omit<z.input<typeof AuthTokenSessionsSchema>, TImmutableDBKeys>;
|
||||
export type TAuthTokenSessionsUpdate = Partial<Omit<z.input<typeof AuthTokenSessionsSchema>, TImmutableDBKeys>>;
|
||||
export type TAuthTokenSessionsInsert = Omit<TAuthTokenSessions, TImmutableDBKeys>;
|
||||
export type TAuthTokenSessionsUpdate = Partial<Omit<TAuthTokenSessions, TImmutableDBKeys>>;
|
||||
|
@ -21,5 +21,5 @@ export const AuthTokensSchema = z.object({
|
||||
});
|
||||
|
||||
export type TAuthTokens = z.infer<typeof AuthTokensSchema>;
|
||||
export type TAuthTokensInsert = Omit<z.input<typeof AuthTokensSchema>, TImmutableDBKeys>;
|
||||
export type TAuthTokensUpdate = Partial<Omit<z.input<typeof AuthTokensSchema>, TImmutableDBKeys>>;
|
||||
export type TAuthTokensInsert = Omit<TAuthTokens, TImmutableDBKeys>;
|
||||
export type TAuthTokensUpdate = Partial<Omit<TAuthTokens, TImmutableDBKeys>>;
|
||||
|
@ -22,5 +22,5 @@ export const BackupPrivateKeySchema = z.object({
|
||||
});
|
||||
|
||||
export type TBackupPrivateKey = z.infer<typeof BackupPrivateKeySchema>;
|
||||
export type TBackupPrivateKeyInsert = Omit<z.input<typeof BackupPrivateKeySchema>, TImmutableDBKeys>;
|
||||
export type TBackupPrivateKeyUpdate = Partial<Omit<z.input<typeof BackupPrivateKeySchema>, TImmutableDBKeys>>;
|
||||
export type TBackupPrivateKeyInsert = Omit<TBackupPrivateKey, TImmutableDBKeys>;
|
||||
export type TBackupPrivateKeyUpdate = Partial<Omit<TBackupPrivateKey, TImmutableDBKeys>>;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user